I'm trying to use pose estimation coordinates to animate a rigged model in three.js The pose estimation tech I'm using provides real time x,y,z coordinates from a person in a video feed and I'm trying to use those to move the 3D model accordingly. I used the code below (some of which I found in an answer to a related question) as a starting point...
let camera, scene, renderer, clock, rightArm;
init();
animate();
function init() {
camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 0.01, 10);
camera.position.set(2, 2, -2);
clock = new THREE.Clock();
scene = new THREE.Scene();
scene.background = new THREE.Color(0xffffff);
const light = new THREE.HemisphereLight(0xbbbbff, 0x444422);
light.position.set(0, 1, 0);
scene.add(light);
// model
const loader = new THREE.GLTFLoader();
loader.load('https://threejs.org/examples/models/gltf/Soldier.glb', function(gltf) {
const model = gltf.scene;
rightArm = model.getObjectByName('mixamorigRightArm');
scene.add(model);
});
renderer = new THREE.WebGLRenderer({
antialias: true
});
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.outputEncoding = THREE.sRGBEncoding;
document.body.appendChild(renderer.domElement);
window.addEventListener('resize', onWindowResize, false);
const controls = new THREE.OrbitControls(camera, renderer.domElement);
controls.target.set(0, 1, 0);
controls.update();
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
//This was my attempt at deriving the rotation from two vector3's and applying it to the model
//storedresults is simply an array where I store the pose estimation data for a given position
//getPosition is just a helper function for getting the vector three for a specific position
function setRightArmRotation() {
if (rightArm) {
if (storedresults === undefined || storedresults.length == 0) {
return;
} else {
if (vectorarray.length < 2) {
vectorarray.push(getPosition(12));
} else {
vectorarray.pop();
vectorarray.push(getPosition(12));
var quaternion = new THREE.Quaternion();
quaternion.setFromUnitVectors(vectorarray[0], vectorarray[1]);
var matrix = new THREE.Matrix4();
matrix.makeRotationFromQuaternion(quaternion);
rightArm.applyMatrix4(matrix);
}
}
}
}
function animate() {
requestAnimationFrame(animate);
const t = clock.getElapsedTime();
if (rightArm) {
rightArm.rotation.z += Math.sin(t) * 0.005;
//setRightArmRotation()
}
renderer.render(scene, camera);
}
<script src="https://cdn.jsdelivr.net/npm/three@0.125.2/build/three.js"></script>
<script src="https://cdn.jsdelivr.net/npm/three@0.125.2/examples/js/loaders/GLTFLoader.js"></script>
<script src="https://cdn.jsdelivr.net/npm/three@0.125.2/examples/js/controls/OrbitControls.js"></script>
I also referred to this answer on finding rotations from two vectors but I haven't been successful in implementing it to achieve the desired results... How to find rotation matrix between two vectors
I can get the Vector3 from the pose estimation tech easily, and I understand how most of what is in the jsfiddle works but I can't seem to put it all together to get the desired result of having my 3D model 'mirror' the movement of what is in my video using the pose estimation coords. I pretty much can just get the model to 'thrash around'.
As I understand it I need to manipulate the rotations of the bones to achieve the desired results, and to do that I need to compute those rotations using two vectors, but again after much research and trial and error I just can't seem to put it all together. Any help would be appreciated.