Pen Settings

HTML

CSS

CSS Base

Vendor Prefixing

Add External Stylesheets/Pens

Any URLs added here will be added as <link>s in order, and before the CSS in the editor. You can use the CSS from another Pen by using its URL and the proper URL extension.

+ add another resource

JavaScript

Babel includes JSX processing.

Add External Scripts/Pens

Any URL's added here will be added as <script>s in order, and run before the JavaScript in the editor. You can use the URL of any other Pen and it will include the JavaScript from that Pen.

+ add another resource

Packages

Add Packages

Search for and use JavaScript packages from npm here. By selecting a package, an import statement will be added to the top of the JavaScript editor for this package.

Behavior

Auto Save

If active, Pens will autosave every 30 seconds after being saved once.

Auto-Updating Preview

If enabled, the preview panel updates automatically as you code. If disabled, use the "Run" button to update.

Format on Save

If enabled, your code will be formatted when you actively save your Pen. Note: your code becomes un-folded during formatting.

Editor Settings

Code Indentation

Want to change your Syntax Highlighting theme, Fonts and more?

Visit your global Editor Settings.

HTML

              
                <label for="mask">Mask:</label>
<input type="file" id="mask" name="file" accept="image/*" />
<img src="https://s3-us-west-2.amazonaws.com/s.cdpn.io/697675/GAN_mask.png" id="maskImg" crossOrigin="anonymous" />
<br />
<label for="photo">Photo:</label>
<input type="file" id="photo" name="file" accept="image/*" />
<br clear="all" />
<img id="chromaImg" src="https://images-na.ssl-images-amazon.com/images/I/51Hykj55a2L._UX320_.jpg" crossOrigin="anonymous" />
<canvas id="chroma"></canvas>
<div class="pixi">
  <div id="pixi"></div>
</div>

<details>
  <ol>
    <li><a href="//github.com/blueimp/JavaScript-Load-Image#demo">camera orient</a></li>
    <li>pad/<a href="https://docs.opencv.org/3.0-beta/doc/py_tutorials/py_imgproc/py_contours/py_contour_features/py_contour_features.html#b-rotated-rectangle">orient face</a> => <a href="//github.com/opencv/opencv/tree/master/data/haarcascades">feature detection</a> => <a href="https://docs.opencv.org/2.4/modules/imgproc/doc/miscellaneous_transformations.html?highlight=grabcut#cv2.grabCut">GrabCut mask prep</a></li>
    <li>GreenScreen/GrabCut</li>
    <li><a href="https://docs.opencv.org/3.0-beta/modules/ximgproc/doc/superpixels.html">superpixels</a>/<a href="https://docs.opencv.org/3.0-beta/doc/py_tutorials/py_imgproc/py_contours/py_contours_more_functions/py_contours_more_functions.html#convexity-defects">segment bones</a></li>
    <li><a href="https://bl.ocks.org/mbostock/3750558">mesh</a> or <a href="https://futurism.com/google-ai-surroundings-3d-model">mesh</a></li>
    <li><a href="https://www.theverge.com/2016/12/20/14022958/ai-image-manipulation-creation-fakes-audio-video">mesh action (i.e. nod)</a></li>
  </ol>

  <pre><img style="float:left;height:6em;" src="https://s3-us-west-2.amazonaws.com/s.cdpn.io/697675/superpixel.png" />COLOR	WHAT	WHY
green	top	gravity
gray	area	group
yellow	convex	group
orange	remove	ux
</pre>

  <pre>
STEP		WHAT
+pad 1.5x	
faceDetect	faces
		=> probable grabCut mask/Mat
grabCut		alpha
==========	==========
bones		findContours => convexHull (GROUPING)
		?=> convexityDefects
		?=> invert alpha mask/Mat
		?=> threshold => fitLine cross-sections bitwise_not
==========	==========
facemask	copyTo roi (seamlessClone)
-pad 1.5x	=> canvas => Pixi
</pre>
</details>

<p class="err" id="errorMessage"></p>

              
            
!

CSS

              
                html,
body,
#pixi {
  margin: 0;
  padding: 0;
  font-family: sans-serif;
}

.loading:before {
  content: '◌';
  font-size: 20rem;
  text-align: center;
  position: fixed;
  z-index: 1;
  top: 0;
  left: 0;
  width: 100%;
  height: 100%;
  animation: wait 1s infinite;
  @keyframes wait {
    50% {
      opacity: 0;
    }
  }
}

label {
  min-width: 3em;
  display: inline-block;
  margin: .25em;
}

#maskImg {
  height: 3em;
  float: left;
}

canvas {
  background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAQAAAAECAYAAACp8Z5+AAAAIElEQVQYV2Nk+M/QwMDI0MAABYxgGkkQIoAkiBCACgIABm4HhEEa4PgAAAAASUVORK5CYII=");
}

#chroma {
  cursor: pointer;
}

#chromaImg,
#chroma,
.pixi {
  width: 33.3%;
  height: auto;
  float: left;
  position: relative;
}

#pixi canvas {
  position: absolute;
  top: 0;
  left: 0;
  &+canvas {
    opacity: .25
  }
}

details {
  clear: both;
}

              
            
!

JS

              
                let utils = new Utils('errorMessage');
utils.loadOpenCv(() => {
  utils.createFileFromUrl('/haarcascade_frontalface_default.xml', 'https://raw.githubusercontent.com/opencv/opencv/master/data/haarcascades/haarcascade_frontalface_default.xml', faceDetect);
});
utils.loadImageToCanvas = function(url, cavansId) {
  let img = document.getElementById(cavansId + "Img");
	img.crossOrigin = "anonymous";
  img.onload = function() {
    if (cavansId == "mask") {
      img.src = url;
      return true;
    }
		let canvas = document.getElementById(cavansId);
		let ctx = canvas.getContext("2d");
    ctx.drawImage(img, 0, 0, img.width, img.height);
    faceDetect();
  };
  img.src = url;
};

utils.addFileInputHandler('photo', 'chroma');
utils.addFileInputHandler('mask', 'mask');


//codepen.io/shshaw/details/JbrQrW
//faces' animation size not independent of overall grid size
let opts = {
	image: document.getElementById('chroma').toDataURL("image/png"),
	gravity: 0,
	friction: 0.25,
	bounce: 0.66,
	pointsX: 80,
	pointsY: 80,
	renderCloth: true,
	mouseInfluence: 25,
	pinCorners: true,
	OpenCV: {
		faceSet: [],
		source: document.getElementById('chromaImg'),
		chroma: []
	}
};


function faceDetect() {
	console.log('faceDetect');
	opts.OpenCV.faceSet = []; //reset storage
	document.body.className = 'loading';

	let src = cv.imread('chromaImg');

	//search area expand (1/3)
	const size = src.size(),
				R = 0.5;
	cv.copyMakeBorder(src, src, size.height*R, size.height*R, size.width*R, size.width*R, cv.BORDER_ISOLATED);

	let gray = new cv.Mat();
	cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY, 0);
	let faces = new cv.RectVector();
	let faceCascade = new cv.CascadeClassifier();
	// load pre-trained classifiers (face detect)
	faceCascade.load('haarcascade_frontalface_default.xml');
	let minSize = new cv.Size(size.width/16, size.height/16),
		maxSize = new cv.Size(size.width/2, size.height/2);//divide by R for total canvas

	faceCascade.detectMultiScale(gray, faces, 1.1, 4, cv.CASCADE_DO_CANNY_PRUNING, minSize, maxSize);

	let decal = cv.imread('maskImg');
	let mask = new cv.Mat();
	mask.setTo(new cv.Scalar(0, 0, 0));
	for (let i = 0; i < faces.size(); ++i) {
		let face = faces.get(i);
		//search area expand 2/3
		let faceUnBorder = faces.get(i);
		faceUnBorder.x -= size.width*R;
		faceUnBorder.y -= size.height*R;
		faceUnBorder.rowColBind= [];
		opts.OpenCV.faceSet.push(faceUnBorder);

		let roiGray = gray.roi(face);
		let roiSrc = src.roi(face);
		let point1 = new cv.Point(face.x, face.y);
		let point2 = new cv.Point(face.x + face.width, face.y + face.height);
		//cv.rectangle(src, point1, point2, [255, 0, 0, 255]);

		//facemask
		let mskWH = [point2.x - point1.x, point2.y - point1.y];
		cv.resize(decal, decal, new cv.Size(mskWH[0], mskWH[1]),0,0,cv.INTER_NEAREST);
		let mskRoi = src.roi(new cv.Rect(point1.x, point1.y, mskWH[0], mskWH[1]));

		//mask alpha
		let alpha = new cv.Mat();
		cv.cvtColor(decal, alpha, cv.COLOR_BGR2GRAY);
		cv.threshold(alpha, alpha, 0, 255, cv.THRESH_BINARY);

		decal.copyTo(mskRoi, alpha);
		alpha.delete();
		roiGray.delete();
		roiSrc.delete();
	}

	//search area contract (3/3)
	let rect = new cv.Rect(size.width*R, size.height*R, size.width, size.height);
	src = src.roi(rect);

	cv.imshow('chroma', src);
	src.delete();
	gray.delete();
	faceCascade.delete();
	faces.delete();

	grabCut();
}


function grabCut() {
	
		console.log('grabCut');
		var hits = [];

		let src = cv.imread('chroma');
		// probable mask from face zones
		//answers.opencv.org/question/132163/grabcut-mask-values/
		cv.cvtColor(src, src, cv.COLOR_RGBA2RGB, 0);
		let mask = new cv.Mat.zeros(src.size(), cv.CV_8UC1);
		const srcW = src.size().width,
		srcH = src.size().height,
		D = canvas.width*0.1;
	
		let GC = {
		BGD: new cv.Scalar(cv.GC_BGD),
		FGD: new cv.Scalar(cv.GC_FGD),
		PR_BGD: new cv.Scalar(cv.GC_PR_BGD),
		PR_FGD: new cv.Scalar(cv.GC_PR_FGD),
		GreenScreen: function(i,j){
			if (src.ucharPtr(i, j)[0] < 48 &&
				src.ucharPtr(i, j)[1] > 224 &&
				src.ucharPtr(i, j)[2] < 48) {
				return true;
			}
		}
	};
	
	//helper rects
	let GC_PR = [new cv.Point(0, 0), new cv.Point(srcW, srcH)];
	cv.rectangle(mask, GC_PR[0], GC_PR[1], GC.PR_FGD, -1, 4, 0);
	cv.rectangle(mask, GC_PR[0], GC_PR[1], GC.PR_BGD, D * 2, 4, 0);
	//corners background?
	cv.circle(mask, new cv.Point(0, 0), D*2, GC.PR_BGD, -1, 4, 0);
	cv.circle(mask, new cv.Point(srcW, 0), D*2, GC.PR_BGD, -1, 4, 0);
	cv.circle(mask, new cv.Point(0, srcH), D*2, GC.PR_BGD, -1, 4, 0);
	cv.circle(mask, new cv.Point(srcW, srcH), D*2, GC.PR_BGD, -1, 4, 0);
	//greenscreen?
	for (let i = 0; i < src.rows; i+=3) {
		for (let j = 0; j < src.cols; j+=3) {
			if (GC.GreenScreen(i,j)) {
				mask.ucharPtr(i, j)[0] = GC.PR_BGD;
			}
		}
	}
		

		let faces = opts.OpenCV.faceSet;
		for (let i = 0; i < faces.length; ++i) {
			//face zone classify
			let pt = faces[i];
			let GC_PR = [new cv.Point(pt.x, pt.y-(pt.height/3)),
						 new cv.Point(pt.x+pt.width, pt.y+(pt.height*3))];
			let GC = [new cv.Point(pt.x+(pt.width/3), pt.y),
					  new cv.Point(pt.x+pt.width-(pt.width/3), pt.y+(pt.height*3)/*3 head-heights, unless no body*/)];
			
			cv.rectangle(mask, GC_PR[0], GC_PR[1], new cv.Scalar(cv.GC_PR_FGD), -1, 4, 0);
			cv.rectangle(mask, GC[0], GC[1], new cv.Scalar(cv.GC_FGD), -1, 4, 0);
		}

		let bgdModel = new cv.Mat();
		let fgdModel = new cv.Mat();
		let rect = new cv.Rect(D, D, srcW, srcH);
		cv.grabCut(src, mask, rect, bgdModel, fgdModel, 2, cv.GC_INIT_WITH_MASK);

		// draw grab rect
		//let point1 = new cv.Point(rect.x, rect.y);
		//let point2 = new cv.Point(rect.x + rect.width, rect.y + rect.height);
		//cv.rectangle(src, point1, point2, new cv.Scalar(0, 0, 255));

		// foreground
	cv.cvtColor(src, src, cv.COLOR_RGB2RGBA);
		for (let i = 0; i < src.rows; i++) {
			hits[i] = [];
			for (let j = 0; j < src.cols; j++) {
				if (mask.ucharPtr(i, j)[0] === 0 || mask.ucharPtr(i, j)[0] === 2) {
					src.ucharPtr(i, j)[0] = 0;
					src.ucharPtr(i, j)[1] = 255;
					src.ucharPtr(i, j)[2] = 0;
				}
			}
		}

		var spaceX = src.rows / opts.pointsY;
		var spaceY = src.cols / opts.pointsX;

		function alphaPointTest(i, j, opacity) {
			if (i % spaceX < 1 && j % spaceY < 1) {
				hits[i].push(opacity);
			}
		}

		var promise = new Promise(function(resolve, reject) {
			
			// background chroma to transparent
			
			for (let i = 0; i < src.rows; i++) {
				
				for (let j = 0; j < src.cols; j++) {
					if (src.ucharPtr(i, j)[1] == 255) {
						src.ucharPtr(i, j)[3] = 0;
						alphaPointTest(i, j, 0);
					} else {
						alphaPointTest(i, j, 1);
					}
				}
			}
		
		cv.imshow('chroma', src);
		
		src.delete();
		mask.delete();
		bgdModel.delete();
		fgdModel.delete();

		opts.OpenCV.chroma = hits.filter(function(el) {
			return el.length != 0;
		});

		resolve('GrabCut => Pixi');
	});

	promise.then(function(value) {
		pointActive();
		loadTexture();
		console.log(value);
		// expected output: "GrabCut => Pixi"
	});

}

function pinEdge(rc) {
	if (rc[0] <= 1 || rc[1] <= 1 ||
		rc[0] >= opts.pointsX - 2 || rc[1] >= opts.pointsY - 2) {
		console.log('edge');
		return true;
	}
	return false;
}


//PIXI.js
var PixiDiv = document.getElementById("pixi");

let mesh;
let cloth;
let spacingX = 1;
let spacingY = 1;
let accuracy = 1;

let canvas = document.createElement('canvas');
let ctx = canvas.getContext('2d');
PixiDiv.appendChild(canvas);

let mouse = {
	down: false,
	x: 0,
	y: 0,
	px: 0,
	py: 1
};

/*////////////////////////////////////////*/

let stage = new PIXI.Container();

let renderer = PIXI.autoDetectRenderer(opts.OpenCV.source.width, opts.OpenCV.source.height, {
	transparent: true
});

PixiDiv.insertBefore(renderer.view, canvas);
renderer.render(stage);
canvas.width = renderer.width;
canvas.height = renderer.height;

/*////////////////////////////////////////*/

function loadTexture() {
	opts.image = document.getElementById('chroma').toDataURL("image/png");

	if (cloth != undefined || mesh != undefined) {
		mesh.destroy(true);
		delete cloth.points;
		renderer.resize(opts.OpenCV.source.width, opts.OpenCV.source.height);
		canvas.width = opts.OpenCV.source.width;
		canvas.height = opts.OpenCV.source.height;
		console.log(opts.OpenCV.faceSet);
	}

	console.log('loading texture', opts.image);
	document.body.className = 'loading';

	let texture = new PIXI.Texture.fromImage(opts.image);
	if (!texture.requiresUpdate) {
		texture.update();
	}

	texture.on('error', function() {
		console.error('AGH!');
	});

	texture.on('update', function() {
		document.body.className = '';
		console.log('texture loaded');

		if (mesh) {
			stage.removeChild(mesh);
		}

		mesh = new PIXI.mesh.Plane(this, opts.pointsX, opts.pointsY);
		mesh.width = this.width;
		mesh.height = this.height;

		spacingX = mesh.width / (opts.pointsX - 1);
		spacingY = mesh.height / (opts.pointsY - 1);

		cloth = new Cloth(opts.pointsX - 1, opts.pointsY - 1, !opts.pinCorners);

		stage.addChild(mesh);

		var clothPoints = new Promise(function(resolve, reject) {

			for (var i = 0; i < cloth.points.length; i++) {
				let point = cloth.points[i];
				let rc = point.rowCol;

				if (point && point.chroma === 0) {
					//dont pin for freefloating
					//point.pinX = point.x;
					//point.pinY = point.y;

					//expand img zone 2pt for less pinching
					if (pinEdge(rc) ||
						!point.pinX && !point.pinY &&
						opts.OpenCV.chroma[rc[1]][rc[0] + 2] === 0 &&
						opts.OpenCV.chroma[rc[1]][rc[0] - 2] === 0 &&
						opts.OpenCV.chroma[rc[1] + 2][rc[0]] === 0 &&
						opts.OpenCV.chroma[rc[1] - 2][rc[0]] === 0) {
						delete cloth.points[i];
					}
				}

			}
			console.log(opts.OpenCV.chroma);

			resolve('Pixi => animate');
		});

		clothPoints.then(function(value) {
			update();
			pointMove();
			console.log(value);
			// expected output: "Pixi => animate"
		});

	});

}


function update() {
	requestAnimationFrame(update);

	ctx.clearRect(0, 0, canvas.width, canvas.height);

	if (cloth) {
		cloth.update(0.016);
	}

	renderer.render(stage);
}

/*////////////////////////////////////////*/

class Point {
	constructor(x, y) {
		this.x = x;
		this.y = y;
		this.px = x;
		this.py = y;
		this.vx = 0;
		this.vy = 0;
		this.pinX = null;
		this.pinY = null;

		this.constraints = [];
	}

	update(delta) {
		if (this.pinX && this.pinY) return this;

		if (mouse.down) {
			let dx = this.x - mouse.x;
			let dy = this.y - mouse.y;
			let dist = Math.sqrt(dx * dx + dy * dy);

			if (mouse.button === 1 && dist < opts.mouseInfluence) {
				this.px = this.x - (mouse.x - mouse.px);
				this.py = this.y - (mouse.y - mouse.py);
			} else if (dist < mouse.cut) {
				this.constraints = [];
			}
		}

		this.addForce(0, opts.gravity);

		let nx = this.x + (this.x - this.px) * opts.friction + this.vx * delta;
		let ny = this.y + (this.y - this.py) * opts.friction + this.vy * delta;

		this.px = this.x;
		this.py = this.y;

		this.x = nx;
		this.y = ny;

		this.vy = this.vx = 0;

		if (this.x >= canvas.width) {
			this.px = canvas.width + (canvas.width - this.px) * opts.bounce;
			this.x = canvas.width;
		} else if (this.x <= 0) {
			this.px *= -1 * opts.bounce;
			this.x = 0;
		}

		if (this.y >= canvas.height) {
			this.py = canvas.height + (canvas.height - this.py) * opts.bounce;
			this.y = canvas.height;
		} else if (this.y <= 0) {
			this.py *= -1 * opts.bounce;
			this.y = 0;
		}

		return this;
	}

	draw() {
		let i = this.constraints.length;
		while (i--) this.constraints[i].draw();
	}

	resolve() {
		if (this.pinX && this.pinY) {
			this.x = this.pinX;
			this.y = this.pinY;
			return;
		}

		this.constraints.forEach((constraint) => constraint.resolve());
	}

	attach(point) {
		this.constraints.push(new Constraint(this, point));
	}

	free(constraint) {
		this.constraints.splice(this.constraints.indexOf(constraint), 1);
	}

	addForce(x, y) {
		this.vx += x;
		this.vy += y;
	}

	pin(pinx, piny) {
		this.pinX = pinx;
		this.pinY = piny;
	}

	unpin() {
		this.pinX = null;
		this.pinY = null;
	}
}

/*////////////////////////////////////////*/

class Constraint {
	constructor(p1, p2, length) {
		this.p1 = p1;
		this.p2 = p2;
		this.length = length || spacingX;
	}

	resolve() {
		let dx = this.p1.x - this.p2.x;
		let dy = this.p1.y - this.p2.y;
		let dist = Math.sqrt(dx * dx + dy * dy);

		if (dist < this.length) return;

		let diff = (this.length - dist) / dist;

		//if (dist > tearDist) this.p1.free(this)

		let mul = diff * 0.025 * (1 - this.length / dist); //non-square ratio unpinned/deleted multiple less

		let px = dx * mul;
		let py = dy * mul;

		!this.p1.pinX && (this.p1.x += px);
		!this.p1.pinY && (this.p1.y += py);
		!this.p2.pinX && (this.p2.x -= px);
		!this.p2.pinY && (this.p2.y -= py);

		return this;
	}

	draw() {
		ctx.moveTo(this.p1.x, this.p1.y);
		ctx.lineTo(this.p2.x, this.p2.y);
	}

}

/*////////////////////////////////////////*/

class Cloth {
	constructor(clothX, clothY, free) {
		this.points = [];

		let startX = canvas.width / 2 - clothX * spacingX / 2;
		let startY = 1;

		for (let y = 0; y <= clothY; y++) {
			for (let x = 0; x <= clothX; x++) {

				let point = new Point(
					startX + x * spacingX /* - (spacingX * Math.sin(y) )*/ ,
					y * spacingY + startY /*+ ( y !== 0 ? 5 * Math.cos(x) : 0 )*/
				);

				//opencv chroma for grid cleanup
				point.chroma = opts.OpenCV.chroma[y] ? opts.OpenCV.chroma[y][x] : 0;
				point.rowCol = [x, y];

				for (var i = 0; i < opts.OpenCV.faceSet.length; i++) {
					let rcF = opts.OpenCV.faceSet[i].rowCol,
						rcP = point.rowCol;

					//expand mesh range to animate
					if (((rcP[0] >= rcF[0] - 6 && rcP[0] <= rcF[0] - 3) ||
						 (rcP[0] >= rcF[0] + 3 && rcP[0] <= rcF[0] + 6))
						&&
						(rcP[1] >= rcF[1] - 4 && rcP[1] <= rcF[1] - 2)) {
						point.active = true;
						opts.OpenCV.faceSet[i].rowColBind.push(point);
					}
				}

				!free && /* y === 0 */ pinEdge(point.rowCol) && point.pin(point.x, point.y);
				x !== 0 && point.attach(this.points[this.points.length - 1]);
				y !== 0 && point.attach(this.points[x + (y - 1) * (clothX + 1)]);

				this.points.push(point);

			}

		}
		console.log(this.points);

	}


	update(delta) {
		let i = accuracy;

		while (i--) {
			this.points.forEach((point) => {
				point.resolve();
			});
		}

		ctx.beginPath();

		this.points.forEach((point, i) => {
			point.update(delta * delta);

			if (opts.renderCloth) {
				point.draw();
			}

			if (mesh) {
				i *= 2;
				mesh.vertices[i] = point.x;
				mesh.vertices[i + 1] = point.y;
			}

		});

		ctx.stroke();
	}

}

function pointerMove(e) {
	e.preventDefault()
	var elRect = e.target.getBoundingClientRect();
	var offX = elRect.left,
		offY = elRect.top;

	let pointer = e.touches ? e.touches[0] : e;
	mouse.px = mouse.x || pointer.clientX;
	mouse.py = mouse.y || pointer.clientY;
	mouse.x = pointer.clientX - offX;
	mouse.y = pointer.clientY - offY;
}

function pointerDown(e) {
	e.preventDefault()
	mouse.down = true;
	mouse.button = 1;
	pointerMove(e);
}

function pointerUp(e) {
	mouse.down = false;
	mouse.px = null;
	mouse.py = null;
	console.log('pointer up');
}


PixiDiv.addEventListener('mousedown', pointerDown);
PixiDiv.addEventListener('touchstart', pointerDown);

PixiDiv.addEventListener('mousemove', pointerMove);
PixiDiv.addEventListener('touchmove', pointerMove);

PixiDiv.addEventListener('mouseup', pointerUp);
PixiDiv.addEventListener('touchend', pointerUp);
PixiDiv.addEventListener('mouseleave', pointerUp);


function pointActive() {
	Object.keys(opts.OpenCV.faceSet).some(function(key) {
		var face = opts.OpenCV.faceSet[key],
			midXY = [face.x + (face.width / 2),
				face.y + (face.height / 2)
			],
			midPrc = [midXY[0] / canvas.width,
				midXY[1] / canvas.height
			];
		opts.OpenCV.faceSet[key].rowCol =
			[Math.round(opts.pointsX * midPrc[0]),
			Math.round(opts.pointsY * midPrc[1])];
	});
}

function pointMove() {
	let faceSet = opts.OpenCV.faceSet;
	movePt = setTimeout(function() {
		clearTimeout(movePt);
		for (var i = 0; i < faceSet.length; i++) {
			let bindPoint = faceSet[i].rowColBind;
			//console.log(bindPoint);
			for (var j = 0; j < bindPoint.length; j++) {
				bindPoint[j].y -= (canvas.height/opts.pointsY);
			}
		}
	}, 1000);
}



              
            
!
999px

Console