Skip to content

Commit

Permalink
Make the AR avatar example behave like a mirror
Browse files Browse the repository at this point in the history
  • Loading branch information
willeastcott committed Feb 10, 2025
1 parent 952132c commit a1a9dbd
Show file tree
Hide file tree
Showing 2 changed files with 51 additions and 4 deletions.
2 changes: 1 addition & 1 deletion examples/assets/scripts/camera-feed.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ export class CameraFeed extends Script {
* @type {boolean}
* @attribute
*/
mirror = false;
mirror = true;

/**
* @type {HTMLVideoElement|null}
Expand Down
53 changes: 50 additions & 3 deletions examples/assets/scripts/face-detection.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,30 @@ import { FaceLandmarker, FilesetResolver } from '@mediapipe/tasks-vision';
import { Mat4, Script } from 'playcanvas';

export class FaceDetection extends Script {
/** @type {FaceLandmarker} */
/**
* @type {FaceLandmarker}
* @private
*/
faceLandmarker;

/**
* @type {boolean}
* @private
*/
mirror = true;

/**
* @type {HTMLCanvasElement}
* @private
*/
offscreenCanvas = null;

/**
* @type {CanvasRenderingContext2D}
* @private
*/
offscreenCtx = null;

async initialize() {
const wasmFileset = await FilesetResolver.forVisionTasks(
'../node_modules/@mediapipe/tasks-vision/wasm'
Expand All @@ -24,9 +45,35 @@ export class FaceDetection extends Script {
update(dt) {
if (this.faceLandmarker) {
const video = document.querySelector('video');
// Only process if the video has enough data.
if (video && video.readyState >= HTMLMediaElement.HAVE_ENOUGH_DATA) {
const detections = this.faceLandmarker.detectForVideo(video, Date.now());
if (detections && detections.faceBlendshapes) {
let inputElement = video;

// If we want the detection to work in the mirrored space,
// draw the video frame into an off-screen canvas that flips it.
if (this.mirror) {
if (!this.offscreenCanvas) {
this.offscreenCanvas = document.createElement('canvas');
this.offscreenCtx = this.offscreenCanvas.getContext('2d');
}
// Update canvas dimensions (in case they change).
this.offscreenCanvas.width = video.videoWidth;
this.offscreenCanvas.height = video.videoHeight;

// Draw the video frame flipped horizontally:
this.offscreenCtx.save();
this.offscreenCtx.scale(-1, 1);
// Drawing at negative width flips the image.
this.offscreenCtx.drawImage(video, -video.videoWidth, 0, video.videoWidth, video.videoHeight);
this.offscreenCtx.restore();

// Feed the flipped image to MediaPipe.
inputElement = this.offscreenCanvas;
}

const detections = this.faceLandmarker.detectForVideo(inputElement, Date.now());
if (detections) {
// Example: apply head transform using facial transformation matrix
if (detections.facialTransformationMatrixes.length > 0) {
const { data } = detections.facialTransformationMatrixes[0];
const matrix = new Mat4();
Expand Down

0 comments on commit a1a9dbd

Please sign in to comment.