-1

What does google play music use to create the "particles" visualiser on their website? What 3D graphic software was used? My guess was originally unity 3D exported via webGL, or perhaps three.js or UE4?

I don't understand how they get the audio web player to stream the audio while the 3D visualiser reacts to the audio frequencies.

I wanted to re-create the same thing. Not sure where to start. I lack the knowledge on how it is done. Couldn't find any answers on the web.

Most importantly are there different methods doing what google has done. What are the main differences?

Link of Visualiser demonstration: https://www.youtube.com/watch?v=mjfKCSPFdGI

Thanks.

Fritz
  • 9,987
  • 4
  • 30
  • 49
Hedi
  • 139
  • 10
  • I guess your question may be closed due to being too broad. As a side note you could have added the link to what you're mentioning. Answering your question I'd say they didn't use Unity or UE4 to create it but rather JS (not keen on this sorry...). – Kardux Jan 03 '17 at 09:14

1 Answers1

3

It would be nice if you'd post a gif or something to show what you're referring to.

Making something audio reactive is pretty simple though. Here's an open source site with lots audio reactive examples.

As for how to do it you basically use the Web Audio API to stream the music and use it's AnalyserNode to get audio data out.

"use strict";

const ctx = document.querySelector("canvas").getContext("2d");
ctx.fillText("click to start", 100, 75);
ctx.canvas.addEventListener('click', start);

function start() {
  ctx.canvas.removeEventListener('click', start);

  // make a Web Audio Context
  const context = new AudioContext();
  const analyser = context.createAnalyser();

  // Make a buffer to receive the audio data
  const numPoints = analyser.frequencyBinCount;
  const audioDataArray = new Uint8Array(numPoints);

  function render() {
    ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height);

    // get the current audio data
    analyser.getByteFrequencyData(audioDataArray);

    const width = ctx.canvas.width;
    const height = ctx.canvas.height;
    const size = 5;

    // draw a point every size pixels
    for (let x = 0; x < width; x += size) {
      // compute the audio data for this point
      const ndx = x * numPoints / width | 0;
      // get the audio data and make it go from 0 to 1
      const audioValue = audioDataArray[ndx] / 255;
      // draw a rect size by size big
      const y = audioValue * height;
      ctx.fillRect(x, y, size, size);
    }
    requestAnimationFrame(render);
  }
  requestAnimationFrame(render);

  // Make a audio node
  var audio = new Audio();
  audio.loop = true;
  audio.autoplay = true;

  // this line is only needed if the music you are trying to play is on a
  // different server than the page trying to play it.
  // It asks the server for permission to use the music. If the server says "no"
  // then you will not be able to play the music
  audio.crossOrigin = "anonymous";

  // call `handleCanplay` when it music can be played
  audio.addEventListener('canplay', handleCanplay);
  audio.src = "https://twgljs.org/examples/sounds/DOCTOR%20VOX%20-%20Level%20Up.mp3";
  audio.load();


  function handleCanplay() {
    // connect the audio element to the analyser node and the analyser node
    // to the main Web Audio context
    const source = context.createMediaElementSource(audio);
    source.connect(analyser);
    analyser.connect(context.destination);
  }
}
canvas { border: 1px solid black; display: block; }
<canvas></canvas>

Then it's just up to you to do something creative. For example instead of drawing a bunch of black dots across the screen like the first example we could scale random colored circles and adjust their color and velocity something like this

"use strict";

var context = new AudioContext();
var analyser = context.createAnalyser();

var numPoints = analyser.frequencyBinCount;
var audioDataArray = new Uint8Array(numPoints);

var ctx = document.querySelector("canvas").getContext("2d");
var ctx2 = document.createElement("canvas").getContext("2d");

var numSpots = 5;
var spots = [];
for (var ii = 0; ii < numSpots; ++ii) {
  spots.push({
    x: Math.random(), 
    y: Math.random(), 
    velocity: 0.01,
    direction: Math.random(),
    hue: Math.random() * 360 | 0,
  });
}

function rnd(min, max) {
  if (max === undefined) {
    max = min;
    min = 0;
  }
  return Math.random() * (max - min) + min;
}

function render() {
  ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height);
  ctx.save();
  ctx.globalAlpha = .97;
  ctx.globalCompositeOperation = "source-out";
  ctx.translate(ctx.canvas.width / 2, ctx.canvas.height / 2);
  ctx.scale(1.001, 1.001);
  ctx.rotate(0.003);
  ctx.translate(-ctx.canvas.width / 2, -ctx.canvas.height / 2);
  ctx.drawImage(ctx2.canvas, 0, 0, ctx.canvas.width, ctx.canvas.height);
  ctx.restore();

  analyser.getByteFrequencyData(audioDataArray);
  
  const width = ctx.canvas.width;
  const height = ctx.canvas.height;

  spots.forEach((spot, n) => {
    const ndx = n * numPoints / numSpots | 0;
    const audioValue = audioDataArray[ndx] / 255;
    const sat = Math.pow(audioValue, 2) * 100;
    
    spot.velocity = audioValue * 0.02;
    spot.direction = (spot.direction + 1 + rnd(-.01, 0.01)) % 1;
    const angle = spot.direction * Math.PI * 2;
    spot.x = (spot.x + Math.cos(angle) * spot.velocity + 1) % 1; 
    spot.y = (spot.y + Math.sin(angle) * spot.velocity + 1) % 1;
    
    ctx.fillStyle = "hsl(" + spot.hue + "," + sat + "%,50%)";
    ctx.beginPath();
    ctx.arc(spot.x * width, spot.y * height, 50 * audioValue, 0, Math.PI * 2, false);
    ctx.fill();
  });
  
  var temp = ctx;
  ctx = ctx2;
  ctx2 = temp;
  
  requestAnimationFrame(render);
}
requestAnimationFrame(render);


var audio = new Audio();
audio.loop = true;
audio.autoplay = true;
// this line is only needed if the music you are trying to play is on a
// different server than the page trying to play it.
// It asks the server for permission to use the music. If the server says "no"
// then you will not be able to play the music
audio.crossOrigin = "anonymous";

audio.addEventListener('canplay', handleCanplay);
audio.loop = true;
audio.src = "https://twgljs.org/examples/sounds/DOCTOR%20VOX%20-%20Level%20Up.mp3";
audio.load();


function handleCanplay() {
  const source = context.createMediaElementSource(audio);
  source.connect(analyser);
  analyser.connect(context.destination);
}
canvas { border: 1px solid black; display: block; }
<canvas></canvas>

music: DOCTOR VOX - Level Up

gman
  • 100,619
  • 31
  • 269
  • 393
  • didn't realise I had to be more specfic than actually mentioning google play music's feature. Google play has a visualiser. – Hedi Jan 03 '17 at 17:07
  • So for that particular effect AFAICT you basically take the code above, use the sound to move 2 dots in semi random directions adjusting their velocity and size by the sound (the code above already adjusts their size). Then you'd run it in a smoke simulation in WebGL. [Here's one](https://gamedevelopment.tutsplus.com/tutorials/how-to-write-a-smoke-shader--cms-25587), [here's another](http://stackoverflow.com/questions/27646383/glsl-shader-for-texture-smoke-effect) or maybe use a glsl 3d particle processor – gman Jan 04 '17 at 08:38