Jason Zhu & Miranda Luong – Final Project

****Note: Project does not properly run on Safari. Run on Chrome.****
If you’d like, we’ve uploaded this zip file containing all assets to our project. Open this program like any other p5js sound sketch using a local server. To do so make sure you follow the instructions noted in https://courses.ideate.cmu.edu/15-104/f2018/lab-week-11/ under Task B: Triggering sound file playback.

sketch

/*
Issho, an audio visualizer by Miranda Luong and Jason Zhu.

Our final project is an audio visualizer named Ishho, derived from the Sino-Japanese
word for "impression". To say that Ishho is just an audio visualizer would be an 
understatement. Our high level goal was to give music its own unique visual identity
––similar to some of the work of Neil Harbisson who has created color portraits of various 
songs. Our secondary goal was to create this unique visual identity in real time.
*/

// Global audio variables.
var PREFIX = "https://courses.ideate.cmu.edu/15-104/f2018/wp-content/uploads/2018/12/moi-je-bouge.mp3"
var song;
var amplitude;
var fft;
var peakDetect;

// Global line variables.
var nLines = 88;
var linesArray = [];
var nforces = 4;
var nParticles = 88;
var forcesArray = [];
var fillScreen = true;
var strokeW = 1;

// Preload song.
function preload(){
  song = loadSound(PREFIX);
}

function setup() {
  createCanvas(500, 500);
  frameRate(60);

  // Audio setup.
  amplitude = new p5.Amplitude();
  amplitude.setInput(song);
  fft = new p5.FFT();
  fft.setInput(song);
  peakDetect = new p5.PeakDetect(20,20000,.06,.00694444444);
  song.play();

  // Setup line and force particles.
  initialize();
}

function draw() {
  if (song.isPlaying()){

    // Start display with noFill(). Toggle between white and black backgrounds by
    // pressing 'spacebar' key and changing var fillScreen's boolean value.
    noFill();
    if (fillScreen){
      background(0);
    } else {
      background(255);
    }

    // Update audio analyzer.
    fft.analyze();
    peakDetect.update(fft);

    for (var i = 0; i < nforces; i++) {
      forcesArray[i].move();
    }

    // Standard radius
    var radius = 75 * cos(frameCount / 80);

    //If beat is detected, enlargen radius based on level of amplitude.
    if (peakDetect.isDetected){
      var radius = map(amplitude.getLevel(),.06, .3, 10, 150) * cos(frameCount/80);
    }

    // Setup a range of two colors for the gradient coloring of lines 
    // and have the gradient change as the song and animation progress.
    var highRed = map(song.currentTime()* 3.5, 0, song.duration() * 3, 255, 0);
    var highGreen = map(song.currentTime()*3.5, 0, song.duration() * 3, 0, 255);

    // Setup rate of gradient change between colors depending on amplitude 
    // of the song at that current time.
    var low = 30;
    var high = map(amplitude.getLevel(), 0, .125, 0, 255);
    
    for (var i = 0; i < linesArray.length; i++) {
      // Create interaction for every line with every force particle.
      linesArray[i].interact(radius, forcesArray[0].position.x, forcesArray[0].position.y);
      linesArray[i].interact(radius, forcesArray[1].position.x, forcesArray[1].position.y);
      linesArray[i].interact(-radius, forcesArray[2].position.x, forcesArray[2].position.y);
      linesArray[i].interact(-radius, forcesArray[3].position.x, forcesArray[3].position.y);
    
      // Color lines using a gradient.
      var col = lerp(low, high, i / linesArray.length);
      stroke(highRed, highGreen, col);

      // Change strokeweight of lines depending on amplitude of song at the given time.
      if (strokeW >= 5){
        strokeW = 1;
      }
      strokeWeight(strokeW);

      linesArray[i].draw();
    }
  }
}

function initialize() {
  // Create and store Lines into linesArray.
  for (var i = 0; i < nLines; i++) {
    linesArray[i] = new Line(42 + 4.8* i);
    linesArray[i].addParticles();
  }
  // Create and store force particles in forcesArray.
  for (var i = 0; i < nforces; i++) {
    if (i== 0){
    forcesArray[i] = new Particle(30+ (1) * 470 / 3, 42 + (1) * 423 / 3);
    }
    if (i == 1){
      forcesArray[i] = new Particle(30+ (2) * 470 / 3, 42 + (2) * 423 / 3);
    }
    if (i == 2){
      forcesArray[i] = new Particle(30+ (1) * 470 / 3, 42 + (2) * 423 / 3);
    }
    if (i == 3){
      forcesArray[i] = new Particle(30+ (2) * 470 / 3, 42 + (1) * 423 / 3);
    }

    // Start force particles with random velocities.
    var angle = random(0, TWO_PI);
    forcesArray[i].velocity.set(cos(angle), sin(angle));
  }
}

// Click to play and pause animation and song.
function mousePressed() {
  if (song.isPlaying()){
    song.pause();
  } else {
    song.play();
  }
}

function keyPressed() {
  // Toggle between black or white backgrounds by pressing 'spacebar'.
  if (key === ' ') {
    fillScreen = !fillScreen;
  }
  // Press 's' to increase strokeWeight or later reset to 1
  if (key === 's') {
    strokeW += 1;
  }
}

// Line class.
var Line = function(y){
  this.y = y;
  this.particlesArray = [];
}

// Add particles to lines particlesArray.
Line.prototype.addParticles = function(){
  for (var i = 0; i < nParticles; i++){
    this.particlesArray.push(new Particle(30 + 5 * i, this.y));
  }
}

// Connect all particles in line's particleArray to draw line.
Line.prototype.draw = function(){    
  beginShape();
    for (var i = 0; i < this.particlesArray.length; i++) {
      curveVertex(this.particlesArray[i].position.x, this.particlesArray[i].position.y);
    }
  endShape();
}


// Interact line with force particles by having all of 
// line's particles individually interact with force particles.
Line.prototype.interact = function(radius, xpos, ypos) { 
  for (var i = 0; i < this.particlesArray.length; i++) {
    this.particlesArray[i].interact(radius, xpos, ypos);
  }

  // Change size of line when necessary to make for smooth texture.
  for (var i = 0; i < this.particlesArray.length-1; i++) {
    var d = dist(this.particlesArray[i].position.x, this.particlesArray[i].position.y, 
                 this.particlesArray[i+1].position.x, this.particlesArray[i + 1].position.y);
    
    // Add a new Particle to particleArray when two neighbor particles are too far apart.
    if (d > 5) {
      var x = ((this.particlesArray[i].position.x + this.particlesArray[i + 1].position.x) / 2);
      var y = ((this.particlesArray[i].position.y + this.particlesArray[i + 1].position.y) / 2);
      this.particlesArray.splice(i + 1, 0, new Particle(x, y));
    }

    // Remove a particle when 2 neighbor particles are too close.
    if (d < 1) {
      this.particlesArray.splice(i, 1);
    }
  }     
}

// Particle class.
var Particle = function(x, y){
  this.position = createVector(x, y);
  this.velocity= createVector(0, 0);
  this.acceleration = createVector(0, 0);
}

// Updates force particles' positions.
Particle.prototype.move = function(){
  // Change direction of force particles sometimes.
  if (random(1) > .97){
    var angle = random(-PI, PI);
    this.acceleration.set(cos(angle), sin(angle));
    var mod = this.acceleration.angleBetween(this.velocity);
    mod = map(mod, 0, PI, 0.1, 0.001);
    this.acceleration.mult(mod); 
  }

  // Change pace of force particle's position change
  this.velocity.add(this.acceleration);

  // Stop if current amplitude reaches or surpasses 0.675.
  // Force particle to increase impact of interaction with lines.
  if (amplitude.getLevel() > .675){
      this.velocity.set(0, 0);
  }

  // Move force particle
  this.position.add(this.velocity);

  // Check edges.
  this.position.x = (this.position.x + width)%width;
  this.position.y = (this.position.y + height)%height;
}

// Force particle to line particle interaction.
Particle.prototype.interact = function(radius, xpos, ypos){
  var dir = radius/abs(radius);
  var radius = abs(radius);

  var r = dist(this.position.x, this.position.y, xpos, ypos);
  var angle = atan2(this.position.y - ypos, this.position.x - xpos);

  // If line particle is within radius of force particle,
  // change velocity to change position of line particle.
  if (r <= radius) {
    // If cuerrent amplitude is greater than .05, generate wider,
    // radial movement from particles to highlight song's beats.
    if (amplitude.getLevel() > .05){
      var radius = 2 * dir * (radius - r) / radius;
    }
    else{ 
      var radius = .3 * dir * (radius - r) / radius;
    }
    this.velocity.set(radius * cos(angle), radius * sin(angle));
  } else {
    this.velocity.set(0, 0);
  }
  this.position.add(this.velocity);
}

 

Preface
Our final project is an audio visualizer which we have named Ishho, derived from the Sino-Japanese word for “impression”. Our original proposal was to have ripples radiate from a central node that would affect geometry in order to create complex forms. While this still holds true to some extent, feedback in our project proposal in addition to further research, particularly the work of Japanese design studio, teamLab, led us to reconsider.

To say that Ishho is just an audio visualizer would be an understatement. Our high-level goal was to give music its own unique visual identity-similar to some of the work of Neil Harbisson who has created color portraits of various songs. Our secondary goal was to create this unique visual identity in real time.

Project Description
We accomplished our high-level and secondary goals by breaking down the audio into its duration, peaks, amplitudes. We used these variables to affect various aspects of our base canvas. The base canvas starts with straight lines that run across the page. These lines are colored with a gradient that changes according to the relation of the song’s current time to its overall duration. The rate of which the gradient changes from its bottom color to its top is governed by the amplitude of the song at that given time. So impactful beats are displayed onto these lines. In addition to this visualization, we used music to imprint the terrain that our base lines created. From the start, hidden force particles lightly push and interact with the lines, creating minimal impressions on our canvas, but when a beat is detected, the force particles’ effects are magnified. This effect is calculated by the amplitude of that beat and has the potential to create large valleys, dips and ridges in our canvas terrain. 

Division of Work
As per the guidelines and to make the project more feasible, we decided to divide the work into what we felt was an equitable 50-50 split. Miranda did the research on particle systems and from there coded the basis  for our audio visualizer. She setup the Line and Particle classes, defined the individual force particles and created their respective arrays. Update and interaction functions were also reasoned using her math. Jason focused on integrating music into the code and adding interactive elements such as toggling backgrounds and changing stroke weights. With his research on the sound library, he considered ways to best visualize the beats in our song with the use of line movement and negative space.

JasonZhu-Proposal-12

Rudimentary video animation of our project.

 

Collaborator: Miranda Luong

My partner, Miranda Luong, and I plan on doing an audiovisual performance visualization for our final project. Interested in the dynamic capabilities of static design, our project will oscillate simple geometric shapes to visualize sound waves in music.

We were heavily inspired by Swiss Design, a time in design where strong geometry was utilized to create interplay between form and content. To break down our proposal for this project, our source audio will be represented by a singular circle placed in the midst of triangles and quadrilaterals. This circle will change in scale in accordance to the intensity of beats and the outside shapes will change as well. We imagined ripples, created by water droplets as an inspiration.  We wanted our visual display to suggest a connection between the source audio and its environment. We will explore the communicative capabilities of flat imagery and test our own abilities to visually communicate music.

JasonZhu-LookingOutwards-12

AMATA K.K.’s logo design

AMATA K.K. is a Japanese game development company centered in Tokyo, Japan. What I wanted to focus on was the logo for their company. A square composition constructed out of slightly variated quadrilaterals with a few triangles thrown into the mix, they use this logo as an opportunity to communicate their strong abilities in animation. Although the logo is merely a flat composition of simple shapes, the proximity of these shapes and their slight differences convey a sense of movement and depth. In relation to our project proposal, this is exactly what we hope to communicate through our own imagery. The difference is that we would be actually animating these shapes to generate a series of images that will visualize more than just one wave of movement but multiple, like that of music.

GIFs by Sami Emory.
LEVEL by Paris-based design studio IF

LEVEL is an audiovisual experiment by Paris-based design studio IF for Paris nightclub Machine du Moulin Rouge’s fifth-anniversary celebration. It utilizes light to “represent the dialogue between space and its limitations.” Set to the ambient tracks of Trespur, LEVEL projects a fast-paced series of dizzying distortions on a transparent material superimposed onto a semi-reflective surface. Each viewer in the room sees something different as lights shift and change from any given perspective in the room. I found this to be an inspiration for my own final project due to the very nature of the project, but especially for their one projection of ripples. In addition, after seeing all these other kinds of projections, perhaps I will try other kinds of movement besides radial.

JasonZhu_Project-11-Composition

sketch

/* Jason Zhu
Section E
jlzhu@andrew.cmu.edu
Project 11
*/

var bigturtle;

function setup() {
    createCanvas(450, 450);
    // set color variables for background
    var cx = constrain(mouseX, 0, 480);
	var cy = constrain(mouseY, 0, 480);
	var red = cx * .3
	var green = cy * .3
	var blue = 100
    background(red * .3 - 15, green * .3 - 15, blue * .3 - 15);
    // set stroke settings
    strokeJoin(MITER);
    strokeCap(PROJECT);
    // create turtle and adjust settings
    bigturtle = makeTurtle(width / 2, height / 2);
    bigturtle.setColor(255);
    bigturtle.setWeight(2);
    bigturtle.penDown();
    frameRate(999);
}

function draw() {
		for (var i=0;i<1;i++){
    	turtle = makeTurtle(-25,-25)
    	turtle.penDown
    }
        // set color variables for turtle
        var cx = constrain(mouseX, 0, 450);
		var cy = constrain(mouseY, 0, 450);
    	var red = cx * .58
		var green = cy * .58
		var blue = 108
		turtle.setColor(color(red,green,blue))
		turtle.setWeight(mouseY/20)
		turtle.penDown()
		turtle.forward(mouseY)// move depending on the y position of the mouse.
		turtle.right(90) // turn turtle right.
		turtle.forward(mouseX) // move again depending ont he xposition of the mouse.
		turtle.left(90) // turn turtle left.
		turtle.back(mouseX) // move again depending ont he xposition of the mouse.
		turtle.right(90) // turn turtle right.
		turtle.back(mouseX) // move again depending ont he xposition of the mouse.
		turtle.left(90) // turn turtle left.
		turtle.back(mouseX) // move again depending ont he xposition of the mouse.
	}

// Turtle Code
function turtleLeft(d){this.angle-=d;}function turtleRight(d){this.angle+=d;}
function turtleForward(p){var rad=radians(this.angle);var newx=this.x+cos(rad)*p;
var newy=this.y+sin(rad)*p;this.goto(newx,newy);}function turtleBack(p){
this.forward(-p);}function turtlePenDown(){this.penIsDown=true;}
function turtlePenUp(){this.penIsDown = false;}function turtleGoTo(x,y){
if(this.penIsDown){stroke(this.color);strokeWeight(this.weight);
line(this.x,this.y,x,y);}this.x = x;this.y = y;}function turtleDistTo(x,y){
return sqrt(sq(this.x-x)+sq(this.y-y));}function turtleAngleTo(x,y){
var absAngle=degrees(atan2(y-this.y,x-this.x));
var angle=((absAngle-this.angle)+360)%360.0;return angle;}
function turtleTurnToward(x,y,d){var angle = this.angleTo(x,y);if(angle< 180){
this.angle+=d;}else{this.angle-=d;}}function turtleSetColor(c){this.color=c;}
function turtleSetWeight(w){this.weight=w;}function turtleFace(angle){
this.angle = angle;}function makeTurtle(tx,ty){var turtle={x:tx,y:ty,
angle:0.0,penIsDown:true,color:color(128),weight:1,left:turtleLeft,
right:turtleRight,forward:turtleForward, back:turtleBack,penDown:turtlePenDown,
penUp:turtlePenUp,goto:turtleGoTo, angleto:turtleAngleTo,
turnToward:turtleTurnToward,distanceTo:turtleDistTo, angleTo:turtleAngleTo,
setColor:turtleSetColor, setWeight:turtleSetWeight,face:turtleFace};
return turtle;}

For this project I wanted to try to create hallway compositions with p5.js. I devised the following system in order to replicate hallways under a compliment color scheme when a user draws a line diagonally. Overall, I found the project stimulating and particularly informative in the inner workings of how the turtle function runs.

Example of a hallway being rendered with the code (1) when a line is drawing primarily diagonally towards the left.
Example of a hallway being rendered with the code (1) when a line is drawing primarily diagonally towards the right.
Free from random drawing.

JasonZhu_LookingOutwards11

This week, I chose to write about Ryoji Ikeda, a Japanese computational musician whose work uses computers in order to create music that can convey emotion differently and convey more complex concepts. In particular, I want to talk about one of his projects, Superposition, which is a collaborative work by Ryoji Ikeda with Stephane Garin, Amélie Grould in 2012.

Superposition is a project that aims to help people understand nature on an atomic scale. It was inspired by the mathematics that go into quantum mechanics. The project makes use of quantum information. While bits are typically displayed in binary (0 or 1), quantum information is QUBIT (quantum binary digits) where 0 and 1 superposed at the same time. This is incredibly concept conceptually, but is much more replicative of nature. Using sound as at the medium and quantum information is the inspiration, Ikeda takes significantly from computation in developing the work as well. It is nearly entirely data and algorithmic driven and makes a powerful commentary on the nature of computationally inspired and created music. This is perhaps what I most admire about the piece and the composer.

http://www.ryojiikeda.com/project/superposition/

An photo of the piece being performed.

JasonZhu-LookingOutwards-10


Video of Social Soul


An image of the room in which Social Soul was held.

I chose Lauren Mccarthy who created p5.js. However, the project I want to discuss today is Social Soul, an immersive digital experience that puts users in someone else’s social media stream. It was created in 2014 by Lauren Mccarthy, Kyle McDonald, and MKG for Delta Air Lines TED2014 summit. I admire it because it poses a really valid question that often goes unanswered. It’s something that we really do not think about yet pervades every aspect of our realities. It makes use of a custom algorithm that matches conference goers with other attendees’ social streams. The project uses 7 coding languages and incorporates generative audio/music as well. Following the viewing session, the viewer is sent a tweet that encourages them to connect off-screen to their soul mate.

Jason Zhu-10-Landscape

Cannot post the updated file from the email I sent over. I have instead uploaded the archive for this project. Archive

For this project, I created worms that change color and inch along the landscape with hills in the background. The worms were inspired from nematode references in cartoons.

A nematode from the hit children cartoon, Spongebob!

JasonZhu-Project-09-Portrait

sketch

/* Jason Zhu
Section E
jlzhu@andrew.cmu.edu
Project-10
*/

var terrainSpeed = 0.0003;
var terrainDetail = 0.0008;
var flags = [];

function setup() {
    createCanvas(480, 300);
    frameRate(50);
    for (var i = 0; i < 10; i++){
        var rx = random(width);
        flags[i] = makeflag(rx);
    }
}

function draw() {
    background(246,201,116)
    push();
    beginShape(); 
    noStroke();
    fill(104,176,247)
    vertex(0, height);
    for (var x = 0; x < width; x++) {
        var t = (x * terrainDetail) + (millis() * terrainSpeed);
        var y = map(noise(t), 0,1, 0, height);
        vertex(x, y - 50); 
    }
    vertex(width, height)
    endShape();
    pop();
    displayHorizon();
    updateAndDisplayflags();
    removeflagsThatHaveSlippedOutOfView();
    addNewflagsWithSomeRandomProbability(); 
}
function updateAndDisplayflags(){
    // Update the flag's positions, and display them.
    for (var i = 0; i < flags.length; i++){
        flags[i].move();
        flags[i].display();
    }
}

function removeflagsThatHaveSlippedOutOfView(){
    var flagsToKeep = [];
    for (var i = 0; i < flags.length; i++){
        if (flags[i].x + flags[i].breadth > 0) {
            flagsToKeep.push(flags[i]);
        }
    }
    flags = flagsToKeep; // remember the surviving flags
}

function addNewflagsWithSomeRandomProbability() {
    // With a very tiny probability, add a new flag to the end.
    var newflagLikelihood = 0.007; 
    if (random(0,1) < newflagLikelihood) {
        flags.push(makeflag(width));
    }
}

// method to update position of flag every frame
function flagMove() {
    this.x += this.speed;
}
    

// draw the flag
function flagDisplay() {
    var floorHeight = 10;
    var bHeight = this.nFloors * floorHeight;
    noStroke();
    // pole
    push();
    translate(this.x, height - 30);
    fill(30, 37, 35);
    rect(0, -bHeight * 1.03, this.breadth, bHeight);
    // flag 
    fill(12, 36, 112);
    triangle(5, -bHeight * 1.03, 40, 20-bHeight, 5, 30 - bHeight);
    pop();
}

function makeflag(birthLocationX) {
    var bldg = {x: birthLocationX,
                breadth: 6,
                speed: -.75,
                nFloors: round(random(1,10)),
                move: flagMove,
                display: flagDisplay}
    return bldg;
}


function displayHorizon(){
    noStroke();
    fill(55,222,153)
    rect (0,height-30, width, height-30); 
}

For this project, I wanted to look at recreating a scene from an old film. I created flags underwater to get the look and feel of what I was going for. It was a bit hard to get what I wanted to happen so I had to simplify quite a bit. This project was definitely a struggle for me compared to past projects.

JasonZhu-LookingOutwards-09

This week, I reviewed Rachel Lee’s Week 3 Assignment, Mushtari. A collaborative work by MIT Media Labs and Stratasys, Mushtari is a wearable that mimics biological growth patterns. I thought the project was both insightful and intriguing. I agree with much of Rachel’s commentary. I think she hit the nail on the head when she said that living Mushtari allows for “existing biological structures to take place naturally and respects its processes. Adding to that thought, I think the piece brings much attention to not only the natural elements that permeate our daily lives, but the elements that are unseen. In taking such a unique approach, I think the piece really distinguishes itself from similar projects at a micro scale. The one part of Rachel’s commentary that I took issue with is when she discusses how the project harnesses biological processes and adapts them for greater human use in a non destructive way. While the same thoughts are reiterated in the official MIT posting, I do not see eye to eye on the non destructive aspect of the project.


Video of the project by MIT Media Labs


Closeup of some of the elements in the wearable from Living Mushtari

JasonZhu-LookingOutwards-08

Alexander Chen is an artist at Google who is most well known for his personal work combining music and visualization techniques. At Google, he lead projects like the Chrome Music Lab and A.I machine learning experiments. I think the best way to understand Alexander Chen is to delve into some of his work. One of his the visualizations is for a Beach Boys song. He uses circles and colors to communicate complex auditory ideas. His code works by drawing circles for each note of the song; distinguishing a relationship between circumference of a circular surface and pitch. While this project was fascinating, I was most intrigued by his visualizations with the MTA. I thought his combination of sound with movement patterns was well done. In visualizing MTA patterns and hearing the intersections, it gave me a lot of insight.


A screenshot of the MTA visualization conducted by Alexander Chen.

http://www.mta.me