Shannon Ha – Final Project

sketch

//Shannon Ha
//sha2@andrew.cmu.edu
//Section D
// Final Project
var soundLogos = [];
var amplitude; //initialize amplitude
var level; //initialize amp level
var rotatePath = 0;
var h;
var dY; // dot x position
var dX; // dot y position

function preload(){ //load sounds
    soundLogos[1] = loadSound('https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/12/Apple-1-2.wav');
    soundLogos[2] = loadSound('https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/12/Apple-2-text-2.wav');
    soundLogos[4] = loadSound('https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/12/Intel-p2.wav');
    soundLogos[5] = loadSound('https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/12/Microsoft-2-1.wav');
    soundLogos[6] = loadSound('https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/12/Microsoft-3-1.wav');
    soundLogos[7] = loadSound('https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/12/Netflix-1.wav')
    soundLogos[8] = loadSound('https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/12/Nokia-1.wav');
    soundLogos[9] = loadSound('https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/12/Skype-1.wav');
}

function setup() {
  createCanvas(300, 300);
  amplitude = new p5.Amplitude(); // p5.js sound library for amplitude
}

function draw() {
  background(0);
  push();
  translate(width / 2, height / 2); // centers the epicycloid
  drawEpicy();
  pop();
  drawThreeDots();
  drawIntelEllipse();
  drawMicroGrid()
}

function keyPressed(){ // assigns the number keys to respective sound files and animations.
    if (key == '1'){
        soundLogos[1].play();
        drawEpicy();

    } else if (key == '2'){
        soundLogos[2].play();
        dY = height / 2; // makes starting position of dot center of canvas
        dX = width / 2; // makes starting position of dot center of canvas
        drawThreeDots();

    } else if (key == '3'){
        soundLogos[4].play();
        drawIntelEllipse();

    } else if (key == '4'){
        soundLogos[5].play();
        drawMicroGrid();

    } else if (key == '5'){
        soundLogos[6].play();

    } else if (key == '6'){
        soundLogos[7].play();

    } else if (key == '7'){
        soundLogos[8].play();

    } else if (key == '8'){
        soundLogos[9].play();
        }
    }

//animation for apple
function drawEpicy() {
    var a = 60; // variable that controls the size and curvature of shape
    var b = a / 2; // variable that controls the size and curvature of shape
    level = amplitude.getLevel(); //takes the value of measured amp level.
    h = map(level, 0, 3, 20, 900); // maps amplitude to appropriate value
    var ph = h / 2; // links mapped amp value as a variable that controls shape.
    fill(h * 5, h * 10, 200); //fills color according to measured amp.
    noStroke();
    beginShape();
  for (var i = 0; i < 100; i ++) { // for loop to draw vertex for epicycloid
      var t = map(i, 0, 100, 0, TWO_PI); // radian value.
      x = (a + b) * cos(t) - h * cos(ph + t * (a + b) / b);
      y = (a + b) * sin(t) - h * sin(ph + t * (a + b) / b);
        vertex(x, y); //curve line
         }
  endShape();
}

//animation for text
function drawThreeDots(){ //draws the three dots that fall
    fill(255);
    noStroke();
    print(h);
    ellipse(dX - 50, dY, 25, 25);
    ellipse(dX, dY * 1.2, 25, 25);
    ellipse(dX + 50, dY * 1.5, 25, 25);
    dY += 10; // vertical falling
}

function drawIntelEllipse(){
    level = amplitude.getLevel();
    let h = map(level, 1, 8, 5, 50);
    for (var i = 0; i < 15 ; i++){
  		var diam = rotatePath - 30 * i; // creates the spin out effect of the ellipse

      if (diam > 0){ //pushes each new ellipse to show
        noFill();
  		stroke(0, 113, 197); //intel blue
        strokeWeight(2);
        ellipseMode(CENTER)
        push();
        translate(width/8 - 90, height/4 - 10); //position of starting ellipse
        rotate(cos(2.0));// rotation angle
        ellipse(200, 200, diam / h, 40 / h); // the size of the ellipse is affected by amp.
      }
    }
    rotatePath = rotatePath + 2; // controls the speed of the spin out effect.
  }

function drawMicroGrid(){ // suppose to imitate the microsoft grid
    level = amplitude.getLevel();
    let h = map(level, 1, 8, 20, 200);
    noStroke();
    rectMode(CENTER);
    for (var y = 50; y < height + 50; y += 100) { // nested for loop for tiling
       for (var x = 50; x < width + 50; x += 100) {
           fill(x, y + 100, h * 5); // color is affected by amp
           rect(x , y, h * 3, h * 3); // size is affected by amp
       }
   }
}

How does it work:

Press all the number keys from 1 to 8 and watch the patterns move and change to familiar sound logos!

(For some reason there are problems when Safari is used to view my project so please use Chrome or Firefox!)

In my proposal, I wanted to do some sort of sound visualization either in a form of a game or something similar to patatap. Sticking to my proposal, I used some iconic sounds that can be recognized through our day to day usage of technology. I realized how easy it is to overlook the importance of how these sounds inform our usage of technology so I ended up creating a sonic art piece that creates patterns according to the amplitude measured through the sound files.

My final project does not quite achieve the level of fidelity I originally intended my sonic art to have as the visuals are a bit too noisy and I want it to have a simple yet sophisticated look. I spent quite a bit of time trying to figure out how to use sound libraries in p5.js and mapping the amplitude to each of the various patterns, so I had a bit of difficulty debugging the whole code.

Shannon Ha – Looking Outwards – 12

Dot Piano By Alex Chen and Yotam Mann

Dot Piano is a visual musical instrument that displays dots of different colors and sizes when keys on a computer keyboard are pressed. Each note is assigned a color and moves a certain direction according to how it was programmed. I really like all the tones sound well together and it visually looks beautiful too. For my final project, I wish to do something similar, but with sounds recur often in our daily lives (from the tech world) and the visual display will be associated through color and shape with the brand that owns the sound.

https://dotpiano.com/

Sosumi (Apple Startup Chime) By Jim Reekes

The Apple startup sound is one of the most iconic sounds for any Mac User as it is the first thing that greets the user along with the logo when you start up your computer. The history of the sound is a bit complex (it was also a pun that referenced the lawsuit between Apple Company and Apple Corps at the time) as it had several versions, but Jim Reekes, the Sound Designer for apple, created it after he decided that the Mac 2 sound program had a horrible sound (it used the tri-tone, the worst-sounding music interval according to Christianity and Jim Reekes), so he tweaked it to create Sosumi. I want to use Sosumi for my game, but I am not sure if I am allowed to since it is trademarked, but reading about the history that has shaped this iconic sound intrigued me to create a project that subtly embodies the age of technology.

Shannon Ha – Project 12 – Proposal

For my final project, I want to do something with sounds, more specifically, iconic sounds that we are accustomed to hearing through our daily usage of technology (think Apple startup sound, Intel sound, Netflix sound, Wii sound etc.). I want to create some sort of sound remix game that allows players to randomly press on keys to play the iconic sounds while some sort of interactive graphic related to the company that owns the sound will be displayed (kind of like Patatap). Another idea I have is to make a sound matching game where players have to match trimmings of iconic sounds to each brand/company that it is associated with by clicking on keys that will be linked to each brand/company. I don’t actually know if I am allowed to use these sounds (if there are copyright issues or not), so my final project will very much depend of which of these sounds I will be able to find free sourcing for.

Shannon Ha – Looking Outwards 11

Video taken from Nova’s website: https://www.novajiang.com/projects/ideogenetic-machine/

Nova Jiang: Ideogenetic Machine

Nova Jiang is a Chinese installation artist who grew up in New Zealand and is currently based in Los Angeles. She holds an MFA in media art from UCLA and has exhibited her work in numerous museums and biennales around the world. Her work encourages tactile and creative engagement between visitors and her artwork. One project that I found particularly interesting was her Ideogenetic Machine, a machine that incorporates portraits of participants captured with a camera and a database of drawings made by Jiang about current news and events. Visitors are prompted to respond to the storyline by adding their own performance and expressions in order to personalize the comic strip. The layout and configuration of images from which new narratives continuously form never repeats. The software also adds blank speech bubbles into the composition which participants can fill with their own dialogue.

What I really admire about this piece, in particular, is that it gives the audience something to take away from the whole experience. Nova allows her art to be used as an ‘open source’ so that everyone can experience it to their own liking and add their own narratives to an incomplete story. This interactive experience not only highlights her own artistic style but also promotes collaboration between people, suggesting that the audience themselves are also artists that hold creative agency in this process of creating.

Shannon Ha – Project 11 – Generative Landscape

sketch

//Shannon Ha
//Section D
//sha2@andrew.cmu.edu
//Project 11 Generative Landscape

var terrainSpeed = 0.0001;
var terrainDetail = 0.006;
var martians = [];

function setup() {
    createCanvas(480, 300);

    for (var i = 0; i < 4; i++){ //create initial collection of martians displayed on screen.
        var rx = random(width);
        martians[i] = makeMartian(rx);
    }
    
    for (var i = 0; i < stars; i++) { //randomly generates star size and position.
    sX.push(random(0, width)); // x position of star
    sY.push(random(0, height / 3)); // y position of star
    starSize.push(random(0.1, 1)); // star size
    }
    frameRate(15);
}

function draw() { // calls all the objects
    background(43, 14, 7);
    drawStars();
    drawMountain();
    drawMountainB();

    //calls martian objects
    updateAndDisplayMartians();
    removeMartians();
    addMartians();

    // draw distant planet A
    fill(130, 67, 52);
    noStroke();
    ellipse(400, 20, 30, 30);
    // draw distant planet B
    fill(176, 91, 70);
    ellipse(350, 15, 10, 10);
}
var stars = 300; //number of star points
var sX = []; //x position array
var sY = []; //y position array
var starSize = [];// star size array

function drawStars() {
    noStroke();
    for (var i = 0; i < stars; i++) { // draws the stars
        stroke(random(100, 255)); // randomize grayscale for stroke to give the twinkle effect
        strokeWeight(starSize[i]);
        point(sX[i], sY[i], 10, 10);
    }
}

function drawMountain(){ //background terrain
    push();
    fill(79, 16, 0);
    noStroke();
    beginShape();
    vertex(0, height);
    for (var x = 0; x < width; x++) {
        var t = (x * terrainDetail) + (millis() * terrainSpeed);// adjusts flatness of terrain
        var y = map(noise(t), 0,1, height/2.5, height * 0.2);
        vertex(x, y);
    }
    vertex(width, height);
    endShape();
    pop();
}

function drawMountainB(){ //terrain in the front
    push();
    fill(138, 31, 4);
    noStroke();
    beginShape();
    vertex(0, height);
    for (var x = 0; x < width; x++) {
        var t = (x * terrainDetail) + (millis() * terrainSpeed * 2);
        var y = map(noise(t), 0,1, height , height * 0.1);
        vertex(x, y);
    }
    vertex(width, height);
    endShape();
    pop();
}

function updateAndDisplayMartians(){
    // Update the martians' positions, and display them
    for (var i = 0; i < martians.length; i++){
        martians[i].move();
        martians[i].display();
    }
}

function removeMartians(){ // removes all martians that go off the canvas.
    var martiansToKeep = [];
    for (var i = 0; i < martians.length; i++){
        if (martians[i].x + martians[i].breadth > 0) {
            martiansToKeep.push(martians[i]);
        }
    }
    martians = martiansToKeep; // remembers the remaining martians on canvas.
}

function addMartians(){ // adds new martians onto canvas
    var newMartiansLikelihood = 0.017;
if (random(0,1) < newMartiansLikelihood) {
    martians.push(makeMartian(0));
}
}

function martianMove() { // allows the martians to glide across screen
    this.x += this.speed;
}

function displayMartian() { //draws the martian.
    fill(12, 63, 117);
    noStroke();
    push();
    translate(this.x, height - 60);
    // body
    ellipse(20, 30, this.breadth, this.height);
    // white part of eye
    fill(255);
    ellipse(20, 20, this.breadth / 2, this.breadth / 2);
    //blue part of eye
    fill(105, 160, 219);
    ellipse(20, 20, 10, 10);
    //antennas
    stroke(105, 160, 219);
    strokeWeight(4);
    line(10, 10, 5, 5);
    line(30, 10, 35, 5);
    //ends of antenna
    fill(random(255), random(100), random(200));
    noStroke();
    ellipse(5, 5, 10, 10);
    ellipse(35, 5, 10, 10);

    pop();
}

function makeMartian(birthLocationX){ // martian characteristics
    var alien = {x: birthLocationX,
                breadth: 30,
                height: 50,
                speed: random(3, 7),
                move: martianMove,
                display: displayMartian};
    return alien;
}

Rough sketch.
Screenshot of the music video I took inspiration from.

For this project, I wanted to create a fictional landscape of Mars, so I took a bit of inspiration from the iconic Britney Spears’ Oops I did it again Music Video (I wanted to add an astronaut but I didn’t know how to remove the background of a picture I found online) and added my own twist by putting in blue aliens that greet you with their antennas as you pass glance over the Martian landscape. I had a lot of fun making this project as it helped me understand the use of objects and characteristics better!

Shannon Ha – Project 10 – Sonic Sketch

sketch

//Shannon Ha
//sha2@andrew.cmu.edu
// Section D
//Project 10 Sonic-Sketch

var underlyingImage;

function preload() { // call loadImage() and loadSound() for all media files here
    scissorSnd = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/scissors-2/");
    combingSnd = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/combing2-2/");
    hairdryerSnd = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/hairdrying-2/");
    spraySnd = loadSound(" https://courses.ideate.cmu.edu/15-104/f2019/spray-2/");
    tadaSnd = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/mirror-2/");
    spraySndB = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/spray2-2/");
    combingSndB = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/combing-hair-2/")


    var saloonImg = "https://i.imgur.com/PqazkIh.jpg";
    underlyingImage = loadImage(saloonImg);

}


function setup() {
    createCanvas(500, 500);
    useSound();
}

function soundSetup() { // setup for audio generation
    scissorSnd.setVolume(2);
    combingSnd.setVolume(5);
    hairdryerSnd.setVolume(1);
    spraySnd.setVolume(1);
    tadaSnd.setVolume(1);
}

function draw() {
    background(0);
    image(underlyingImage, 0, 0); // stock photo of the saloon

}

function mousePressed() {
    //makes the sound of hair combing play when the mouse is between the set of XY coordinates boundaries of comb images.
    if (mouseX > 80 & mouseX < 160 & mouseY > 255 & mouseY < 360){
        combingSndB.play();
        }
        else{
            combingSndB.pause(); //pauses sound when mouse is clicked outside of the XY coordinate boundaries.
        }
    //makes the sound of spray play when the mouse is between the set of XY coordinate boundaries of spray image.
    if (mouseX > 10 & mouseX < 160 & mouseY > 130 & mouseY < 240){
            spraySndB.play();
        }
        else{
            spraySndB.pause(); //pauses sound when mouse is clicked outside of the XY coordinate boundaries.
            }

    //makes the sound of hairdrying play when the mouse is between the set of XY coordinate boundaries of spray image.
    if (mouseX > 70 & mouseX < 190 & mouseY > 30 & mouseY < 130){
            hairdryerSnd.play();
        }
        else{
            hairdryerSnd.pause();//pauses sound when mouse is clicked outside of the XY coordinate boundaries.
            }

    //makes the sound of combing play when the mouse is between the set of XY coordinate boundaries of combing image.
    if (mouseX > 210 & mouseX < 320 & mouseY > 20 & mouseY < 110){
            combingSnd.play();
        }
        else{
            combingSnd.pause();//pauses sound when mouse is clicked outside of the XY coordinate boundaries.
            }
    //makes the sound of spray play when the mouse is between the set of XY coordinate boundaries of spray image.
    if (mouseX > 330 & mouseX < 460 & mouseY > 20 & mouseY < 150){
            spraySnd.play();
        }
        else{
            spraySnd.pause(); //pauses sound when mouse is clicked outside of the XY coordinate boundaries.
            }
    if (mouseX > 330 & mouseX < 470 & mouseY > 150 & mouseY < 200){
            scissorSnd.play();
        }
        else{
            scissorSnd.pause();
            }
    if (mouseX > 330 & mouseX < 450 & mouseY > 205 & mouseY < 325){
            tadaSnd.play();
            }
        else{
            tadaSnd.pause();
            }

}

This week I decided to use base my project on the sounds that you would hear at the hair salon. I found a copyright-free image that visualizes the steps the one goes through at a hair salon and I think adding the audio was a good way to show the procedure of getting your hair cut. I don’t know why my sketch won’t load on WordPress but it works so my localhost. Attached below is the image that I used for this project.

*1 grace day used for this project.

Shannon Ha – Looking Outwards – 10

Photo taken from https://itp.nyu.edu/classes/cc-s18/gesture-controlled-musical-gloves/

The mi.mu gloves can be defined as a wearable musical instrument for expressive creation, composition, and performance. These gloves were the creation of music composer and songwriter Imogen Heap, who wrote the musical score for Harry Potter and the Cursed Child. Her aim in creating these gloves is to push innovation and share resources. She believes that these gloves will allow musicians and performers to better engage fans with more dynamic and visual performances, simplify the hardware that controls music (laptops, keyboards, controllers) and connect movement with sound.

The flex sensors are embedded in the gloves which measure the bend of the fingers, the IMU measures the orientation of the wrist. All the information received by these sensors is communicated over wifi. There is also a vibration motor implemented for haptic feedback. With the glove comes software that allows the user to program movements to coordinate with different sounds. Movements can be coordinated with MIDI and OSC.

I believe this piece of technology really pushes the boundaries of computational music as it allows musicians to have complete agency over electronically generated sounds through curated body movement without having to control sounds through a stationary piece of hardware. Performers, in particular, could benefit heavily from these gloves as their artistry moves beyond music and into how body language is incorporated with sound.  As a designer, I personally admire how she was able to use advanced technology to create these novel experiences not only for the performer but also for the audience. There are instances where the use of technology can hinder artistry especially when it is overused, but I think these gloves allow musicians to connect more with the music and how it’s being presented to the audience.

Shannon Ha – Looking Outwards – 09

Landing page of the online visualization of this project showing aggregated statistics of Twitter activity.

Taken from https://nand.io/projects/emoto

For this weeks post, I will be looking at Jina Lee’s Week 07 Looking Outwards post on data visualization. The emote project for the London 2012 Olympics assessed the global audiences’s real time emotional response to the different events throughout out the games and visualized it physically through a generative physical sculpture. I agree with Jina’s point about how innovative and well executed this project was as it was able to synthesize everyone’s opinions within a small amount of space. This allows people to unify and gain new perspectives on a general response to this global event. One thing that Jina did not touch upon was how this information can help improve future games as the Olympic Committee can use this data to improve parts of the event that weren’t well received by the general public. I think that’s a very effective way to utilize this data beyond an observation tool for visitors of the exhibition

As a designer I also admire the simplicity of the control’s design. The singular nob/button really invites the user to interact with it as there isn’t much complication beyond that one button. This is important as the intent behind this project is to make this information to be as accessible and inclusive to all people as possible.

Shannon Ha – Project 09 – Portrait

sketch

//Shannon Ha
//Section D
//sha2@andrew.cmu.edu
//Project 09 - Variable Face

//pre load my underlying image
var underlyingImage;
var frameSpeed = 15;
function preload() {
    var myImageURL = "https://i.imgur.com/AAXi1mS.jpg";
    underlyingImage = loadImage(myImageURL);
}

function setup() {
    createCanvas(344, 480);
    background(0);
//load the pixels of the image so they can be referenced later
    underlyingImage.loadPixels();
    framespeed = map(mouseX, 10, width, 2, 150);
    frameRate(frameSpeed);
}

function draw() {
//randomly select a pixel on the canvas
    var randomX = random(width); //x-coordinate
    var randomY = random(height); //y-coordinate
    var ix = constrain(floor(randomX), 0, width-1);
    var iy = constrain(floor(randomY), 0, height-1);
//loads the color from the base image so the rectangles coordinate with the colors of the base image
    var theColorAtLocationXY = underlyingImage.get(ix, iy);

//creates rectangles at different sizes.
    noFill();
    stroke(theColorAtLocationXY);
    strokeWeight(random(1,3));
    rect(randomX, randomY, random(5,20), random(5,20));

//creates circles according to position of mouse.
    var theColorAtTheMouse = underlyingImage.get(mouseX, mouseY);
    strokeWeight(1);
    stroke(theColorAtTheMouse);
    ellipse(pmouseX, pmouseY, 15, 15);
}

(20 seconds)
(30 seconds)
Nearly finished render (1 – 1:30 minutes)
The actual photo!

For this project, I chose a portrait of my sister to recreate and explored how to randomize the dimensions of of a rectangle to create variable portrait. I tested different shapes both filled and unfilled to see which has a better effect for the end result and I realized that the filled shapes some times makes the face look too blurred out so I used stroke instead to retain some texture in the image and to distinguish facial features better. It was a fun project to do because it was interesting to see how different shapes produce different textures and effects on the image.

Shannon Ha – Looking Outwards 08

Mimi Son is a Korean Interaction Designer who studied at Copenhagen Institute of Interaction Design and Landsdown Centre of Middlesex University in London. She currently teaches Interactive Storytelling at Kaywon School of Art and Design and is the director of her own studio, Kimchi and Chips alongside Elliot Woods. Their studio focuses on novel interactions involving people and media materials, discovering new technical and artistic paradigms. Coming from an artistic background she enjoys observing her surroundings and uses her observations as inspiration for creating something interactive and funny.  

Link (2010)
Photo taken from kimchiandchips.com
Line Segment Space (2013)
Photo taken from kimchiandchips.com

Her current work studies the emotional and tangible interaction for future life and the effects of technology from creative approach. In particular, she creates speculative visual objects that has unpredictable reactions when touched with technology. She mentions in her talk that she is quite experimental in the way she works and there is not that much ‘meaning’  behind her artistic choices and I think I can definitely relate to her in a way that my own creative process is also more random and less technical. I think it’s interesting to compare the balance between storytelling and randomness in her variety of work. In her work Link (2010), the installation is a lot more about user interaction as it invites users to record their stories into a city scape of cardboard boxes (see image) and it explores the agency that individuals have over technology. In contrast, her piece called Line Segment Space (2013) is a lot more abstract and undefined and it focuses more on on the viewers emotional with the space that is filled with with dynamic forms created by light and lines. 

I used a grace day for this project.