Skip to content

Commit

Permalink
Rewrite code to render video stream on canvas
Browse files Browse the repository at this point in the history
  • Loading branch information
Valkryst committed Dec 5, 2023
1 parent 3ea4b9d commit cf6ed11
Show file tree
Hide file tree
Showing 5 changed files with 47 additions and 105 deletions.
62 changes: 4 additions & 58 deletions css/styles.css
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,6 @@ html {

& > body {
height: 100%;

& > main {
& > canvas, & > video {
position: absolute;
}
}
}
}

Expand All @@ -28,61 +22,13 @@ table, th, td {
#jellron-display {
& > canvas, & > video {
position: absolute;

transform-origin: top left;
transform: scale(-1, 1) translateX(-100%);
}
}

@media only screen and (min-width: 320px) {
#jellron-display {
& > video {
height: 240px;
width: 320px;
}
}
}

@media only screen and (min-width: 640px) {
#jellron-display {
& > video {
height: 480px;
width: 640px;
}
& > canvas {
transform: scaleX(-1);
}
}

@media only screen and (min-width: 800px) {
#jellron-display > video {
height: 600px;
width: 800px;
}
}

@media only screen and (min-width: 1024px) {
#jellron-display > video {
height: 768px;
width: 1024px;
}
}

@media only screen and (min-width: 1280px) {
#jellron-display > video {
height: 960px;
width: 1280px;
}
}

@media only screen and (min-width: 1600px) {
#jellron-display > video {
height: 1200px;
width: 1600px;
}
}

@media only screen and (min-width: 1920px) {
#jellron-display > video {
height: 1440px;
width: 1920px;
& > video {
visibility: hidden;
}
}
21 changes: 8 additions & 13 deletions index.html
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<title>Jellron</title>

<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=1.0, user-scalable=no">

<link rel="stylesheet" href="css/styles.css">
</head>
Expand Down Expand Up @@ -224,11 +224,10 @@
</form>

<div id="jellron-display">
<canvas></canvas>
<video autoplay playsinline>
This page requires a browser that supports the <em>video</em> element.
</video>

<canvas></canvas>
</div>
</main>
</body>
Expand Down Expand Up @@ -274,6 +273,9 @@
});
glContext.autoClear = true;

const deviceSelect = await Camera.getSelectElement();
document.getElementById("video-input-device-select-container").appendChild(deviceSelect);

const mesh = new Mesh();
const faceDetector = new FaceDetector();
const bodyDetector = new BodyDetector();
Expand Down Expand Up @@ -336,7 +338,7 @@
await camera.setVideoElement(document.getElementsByTagName("video")[0]);

const videoElement = await camera.getVideoElement();
glContext.setSize(videoElement.scrollWidth, videoElement.scrollHeight, false);
glContext.setSize(videoElement.width, videoElement.height, false);

if (!bodyDetector.isRunning()) {
bodyDetector.start(15, videoElement, mesh);
Expand Down Expand Up @@ -367,20 +369,13 @@

window.onresize = () => updateDisplay();

const deviceSelect = await Camera.getSelectElement(() => {
deviceSelect.onchange = () => {
try {
updateDisplay();
} catch (error) {
console.error(error);

bodyDetector.stop();
faceDetector.stop();
handDetector.stop();
meshRenderer.stop();
}
});
document.getElementById("video-input-device-select-container").appendChild(deviceSelect);

};


// Register Listeners for Body Settings
Expand Down
10 changes: 3 additions & 7 deletions js/camera.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,21 +20,17 @@ export class Camera {
* The select element is configured to automatically update as the set of available devices changes, and when the
* camera permissions are changed.
*
* @param {function} onChange Function to call when the value of the select element changes. This will replace the existing onChange function.
*
* @returns {Promise<HTMLSelectElement>} A promise that resolves to a select element.
*/
static async getSelectElement(onChange = null) {
static async getSelectElement() {
if (Camera.selectElement) {
Camera.selectElement.onchange = () => onChange?.();
await Camera.updateSelectElement();
return Camera.selectElement;
}

const select = document.createElement("select");
select.disabled = true;
select.id = "video-input-device-select";
select.onchange = () => onChange?.();
Camera.selectElement = select;

await Camera.updateSelectElement();
Expand Down Expand Up @@ -129,8 +125,8 @@ export class Camera {
* Both FaceDetector and HandDetector use the video element's height and width properties to scale the input
* MediaStream before processing it. This is why we need to set them.
*/
this.videoElement.height = this.videoElement.scrollHeight;
this.videoElement.width = this.videoElement.scrollWidth;
this.videoElement.height = await this.getMediaStreamHeight();
this.videoElement.width = await this.getMediaStreamWidth();
}

/**
Expand Down
8 changes: 1 addition & 7 deletions js/detector/body_detector.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,20 +40,14 @@ export class BodyDetector extends Detector {
this.intervalId = setInterval(async () => {
const currentTime = performance.now();

let frame = null;
let rawBodies = [];
try {
frame = tf.browser.fromPixels(videoElement);
frame = tf.image.resizeBilinear(frame, [videoElement.height, videoElement.width]);

rawBodies = await this.detector.estimatePoses(frame);
rawBodies = await this.detector.estimatePoses(videoElement);
} catch (e) {
/*
* Depending on the state of the video element, this can throw a "Requested texture size [0x0] is
* invalid." error. It doesn't seem to cause any issues, so we ignore it.
*/
} finally {
frame?.dispose();
}

if (rawBodies.length === 0) {
Expand Down
51 changes: 31 additions & 20 deletions js/mesh_renderer.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import {Keypoint} from "./keypoint.js";
import {Mesh} from "./mesh.js";
import {OrthographicCamera, Scene, WebGLRenderer} from "three";
import {FrontSide, MeshBasicMaterial, OrthographicCamera, PlaneGeometry, Scene, SRGBColorSpace, VideoTexture, WebGLRenderer} from "three";
import {Mesh as ThreeMesh} from "three";
import {
validateBoolean,
validateInstanceOf, validateNonEmptyString,
Expand Down Expand Up @@ -40,8 +41,9 @@ export class MeshRenderer {

const camera = this.createCamera(glContext);
const scene = new Scene();
scene.add(this.createVideoMesh());

this.intervalId = setInterval(() => {
this.intervalId = setInterval(async () => {
const currentTime = performance.now();

for (const keypoint of mesh.getBodyKeypoints()) {
Expand Down Expand Up @@ -160,6 +162,33 @@ export class MeshRenderer {
return camera;
}

// todo Cleanup & Document, also pass in params.
createVideoMesh() {
const canvasElement = document.getElementsByTagName("canvas")[0];
const videoElement = document.getElementsByTagName("video")[0];

const videoTexture = new VideoTexture(videoElement);
videoTexture.colorSpace = SRGBColorSpace;
videoTexture.needsUpdate = true;

const videoMaterial = new MeshBasicMaterial({
map: videoTexture,
side: FrontSide,
toneMapped: false
});
videoMaterial.needsUpdate = true;

const videoMesh = new ThreeMesh(
new PlaneGeometry(canvasElement.scrollWidth, canvasElement.scrollHeight),
videoMaterial
);
videoMesh.position.x += canvasElement.scrollWidth / 2;
videoMesh.position.y -= canvasElement.scrollHeight / 2;
videoMesh.position.z -= 100;

return videoMesh;
}

/**
* Displays a 2D necklace on the necklace Keypoint.
*
Expand Down Expand Up @@ -188,24 +217,6 @@ export class MeshRenderer {
mesh.getEarlobeKeypoints()[isLeft ? 0 : 1].display2DAsset(url);
}

/**
* Calculates the distance between two Keypoints.
*
* @param {Keypoint} keypoint1 First Keypoint.
* @param {Keypoint} keypoint2 Second Keypoint.
*
* @returns {number} Distance between the Keypoints.
*/
distanceBetweenKeypoints(keypoint1, keypoint2) {
validateInstanceOf(keypoint1, Keypoint);
validateInstanceOf(keypoint2, Keypoint);

return Math.sqrt(
Math.pow(keypoint1.getX() - keypoint2.getX(), 2) +
Math.pow(keypoint1.getY() - keypoint2.getY(), 2)
);
}

/**
* Places a Keypoint within a Scene.
*
Expand Down

0 comments on commit cf6ed11

Please sign in to comment.