Looking Outwards 10

Since my LO-4 assignment focused on a form of sound art, I chose the option to investigate a piece of music that you could hear in a concert hall. Which in this case, actually took place in one. I was drawn to this project because it wasn’t just auditory, but also a beautiful visual experience as well. The Stanford Laptop Orchestra used SLork musical lanterns that communicate with ChucK via wifi to translate the movements of the lanterns into sound and light.

The musical performance was called “Aura” since they used multiple people with different light color and sounds that harmonized with one another in a similar way that people do in society. I thought it was a very clever and interesting way to create music through movement.

LO-10 Computer Sound

Star Wars Blaster Sound Effect

Ben Burtt, who did sound design for the original star wars movies, also made the class blaster sound effect. A combination of digital & analog techniques, it has been remade and remixed countless times over the years as the franchise is adapted and updated.

As demonstrated in the video, the sound was originally recorded using steel cable under tension (or a slinky, in this case) & a wrench. The classic PEW PEW was then saved, isolated from background noise, layered with other sounds, and edited into the movie. In the same way that Star Wars has defined what a good sci-fantasy movie SHOULD be, its iconic blaster sound has also cemented into our cultural consciousness what a laser gun SHOULD sound like. That iconic sound would not be possible without a combination of analog & digital processes.

-Robert

Project-10 Sound Story

This is a story about a duck, a duckling, a cloud, and lightning bolt.

sketch
//Robert Rice
//rdrice
//Section C


// sketch.js template for sound and DOM
//
// This is the 15104 Version 1 template for sound and Dom.
// This template prompts the user to click on the web page
// when it is first loaded.
// The function useSound() must be called in setup() if you
// use sound functions.
// The function soundSetup() is called when it is safe
// to call sound functions, so put sound initialization there.
// (But loadSound() should still be called in preload().)

var mama = {filename:'https://i.imgur.com/z44s88k.png', //https://images.dailykos.com/images/214263/story_image/Duck-37.png?1456291242
            x:0,
            y:0,
            playFunc: playOsc,
            stopFunc: stopOsc,
            drawFunc: drawImg}
var duckling = {filename:'https://i.imgur.com/X5iYcio.png', //https://purepng.com/public/uploads/large/91508076238ploll99zx4ifi35p6b1qrontiecfaivclrqbiz0gfg0rru6qtj7qmlw2qmvrthjbk3sj2wgiwa12pz4n00nufufllybyth2akpcx.png
            x:0,
            y:0,
            playFunc: playOsc,
            stopFunc: stopOsc,
            drawFunc: drawImg}
var cloud = {filename:'https://i.imgur.com/igVfind.png', //https://clipground.com/images/clipart-cloud-png-10.png
            x:-50,
            y:100,
            playFunc: playOsc,
            stopFunc: stopOsc,
            drawFunc: drawImg}
var lightning = {filename:'https://i.imgur.com/9RODxMu.png', //https://asr4u.files.wordpress.com/2013/06/lightning-bolt-hi1.png
            x:150,
            y:150,
            playFunc: playOsc,
            stopFunc: stopOsc,
            drawFunc: drawImg}
var tScale = 1; //used later for scaling stuff down. 1 == 100%


function preload() {
    // call loadImage() and loadSound() for all media files here

    mama.image = loadImage(mama.filename);
    duckling.image = loadImage(duckling.filename);
    cloud.image = loadImage(cloud.filename);
    lightning.image = loadImage(lightning.filename);
    //loadSound();
}


function setup() {
    // you can change the next 2 lines:
    createCanvas(300, 300);
    createDiv("p5.dom.js library is loaded.");
    frameRate(30);
    imageMode(CENTER);
    //======== call the following to use sound =========
    useSound();
}


function soundSetup() { // setup for audio generation
    // you can replace any of this with your own audio code:
    mama.osc = new p5.Oscillator();
    mama.trem = new p5.Oscillator();    //mama duck's voice
    mama.trem.freq(10);
    mama.osc.setType('sawtooth');
    mama.osc.freq(midiToFreq(60));
    mama.osc.amp(mama.trem);

    duckling.osc = new p5.Oscillator();
    duckling.trem = new p5.Oscillator();    //baby duck's voice
    duckling.trem.freq(30);
    duckling.osc.setType('sawtooth');
    duckling.osc.freq(midiToFreq(70));
    duckling.osc.amp(mama.trem);

    cloud.osc = new p5.Oscillator();
    cloud.trem = new p5.Oscillator();   //makes cloud go brrrrrrr
    cloud.trem.freq(10);
    cloud.osc.setType('sawtooth');
    cloud.osc.freq(midiToFreq(31));
    cloud.osc.amp(cloud.trem);

    lightning.osc = new p5.Oscillator();    //lightning sound
    lightning.trem = new p5.Oscillator();  //makes it go pew pew
    lightning.trem.freq(10000);
    lightning.osc.setType('square');
    lightning.osc.amp(lightning.trem);
    lightning.osc.freq(midiToFreq(90));
}


function draw() {
    // you can replace any of this with your own code:
    background(200);    
    
    if (frameCount >= 0 & frameCount <= 150) {     //act I the status quo
        mama.x = 50;
        mama.y = 250;
        mama.drawFunc(100, 100);

        duckling.x = 100;
        duckling.y = 275;
        duckling.drawFunc(40, 50);

        if (frameCount == 30) {mama.playFunc();
            mama.drawFunc(200, 200);}
        if (frameCount == 50) {mama.stopFunc();}

        if (frameCount == 60) {duckling.playFunc();
            duckling.drawFunc(80, 100);}
        if (frameCount == 70) {duckling.stopFunc();}

        if (frameCount == 90) {mama.playFunc();
            mama.drawFunc(200, 200);}
        if (frameCount == 150) {mama.stopFunc();}

        if (frameCount == 120) {duckling.playFunc();}
        if (frameCount > 120 & frameCount < 150) {duckling.drawFunc(200, 200);}
        if (frameCount == 150) {duckling.stopFunc();}
    }

    if (frameCount >= 150 & frameCount <= 300) {   //act II a cloud arrives
        mama.drawFunc(100, 100);
        duckling.drawFunc(40, 50);

        var cDX = 2 //the speed at which the cloud will move across the screen


        cloud.drawFunc(100, 50);

        cloud.x += cDX;
        if (cloud.x > 150) {cloud.x = 150;} //will move across the screen, before settling in the middle

        if (frameCount == 250) {cloud.playFunc();}
        if (frameCount > 250 & frameCount < 300) {cloud.drawFunc(300, 150);}
        if (frameCount == 300) {cloud.stopFunc();}
    }

    if (frameCount >= 300 & frameCount <= 450) {   //act III the cloud brings forth lightning
        mama.drawFunc(100, 100);
        duckling.drawFunc(40, 50);
        cloud.drawFunc(300, 150);

        if (frameCount == 325) {lightning.playFunc(); lightning.drawFunc(100, 100);}
        if (frameCount == 330) {lightning.stopFunc(); lightning.drawFunc(50, 50);}

        if (frameCount == 355) {lightning.playFunc(); lightning.drawFunc(100, 100);}
        if (frameCount == 360) {lightning.stopFunc(); lightning.drawFunc(50, 50);}

        if (frameCount == 385) {lightning.playFunc(); lightning.drawFunc(100, 100);}
        if (frameCount == 390) {lightning.stopFunc(); lightning.drawFunc(50, 50);}

        if (frameCount == 415) {lightning.playFunc(); lightning.drawFunc(100, 100);}
        if (frameCount == 420) {lightning.stopFunc(); lightning.drawFunc(50, 50);}
    }

    if (frameCount >= 450 & frameCount <= 600) {   //act IV mama duck defends her child
        mama.drawFunc(100, 100);
        duckling.drawFunc(40, 50);
        cloud.drawFunc(200, 100);

        if (frameCount == 510) {mama.playFunc();}
        if (frameCount > 510 & frameCount < 600){
            mama.drawFunc(300, 300);
            mama.x += random(-10, 10);
            mama.y += random(-10, 10);
        }
        if (frameCount == 600) {mama.stopFunc(); mama.x = 50; mama.y = 250;}
    }

    if (frameCount >= 600 & frameCount <= 750) {   //act V the attackers rejected
        if (frameCount == 600) {
            lightning.x = 250
        }

        mama.drawFunc(100, 100);
        duckling.drawFunc(40, 50);

        push();
        scale(tScale, tScale);
        cloud.drawFunc(200, 100);
        lightning.drawFunc(75, 75);
        pop();

        tScale = tScale * 0.95
    }

    if (frameCount >= 750 & frameCount <= 900) {   //act VI return to status quo
        mama.x = 50;
        mama.y = 250;
        mama.drawFunc(100, 100);

        duckling.x = 100;
        duckling.y = 275;
        duckling.drawFunc(40, 50);

        if (frameCount == 780) {mama.playFunc();
            mama.drawFunc(200, 200);}
        if (frameCount == 800) {mama.stopFunc();}

        if (frameCount == 810) {duckling.playFunc();
            duckling.drawFunc(80, 100);}
        if (frameCount == 820) {duckling.stopFunc();}

        if (frameCount == 840) {mama.playFunc();
            mama.drawFunc(200, 200);}
        if (frameCount == 900) {mama.stopFunc();}

        if (frameCount == 870) {duckling.playFunc();}
        if (frameCount > 870 & frameCount < 900) {duckling.drawFunc(200, 200);}
        if (frameCount == 900) {duckling.stopFunc();}
    }
}

function playOsc() {
    this.trem.start();
    this.osc.start();//plays the sound
}

function stopOsc() {
    this.osc.stop();
    this.trem.stop();//stops the sound
}

function drawImg(w, h) {    //draws the picture at the specified scale
    image(this.image, this.x, this.y, w, h);
}

LO 10-Computational Music

The computational music project I looked at was begun by American bandleader, engineer and inventor Raymond Scott, and reimagined by Yuri Suzuki, a Japanese inventor. The machine is made to display an instantaneous performance-composition through the use of Google Magenta’s AI software, which connects neural networks from all the Bach chorales to code, thus creating a harmonic relationship between sounds and generates new situations with AI intelligence. The machine itself aesthetically displays a sequence and rhythm, which brings the performance a layer of visualization while it is playing. I think it is fascinating to see symphonic music composed and displayed in such a way, with all of its components on a screen light up to curate the performance. 

LO 10 – Computer Music

For this week’s Looking Outwards, I decided to take a look at the work of British singer-songwriter and audio engineer Imogen Heap. I specifically looked at her “MI.MU gloves” (2014) and the various ways that she uses this innovative computational musical instrument to compose and perform her music. MI.MU gloves are a wearable instrument that utilizes mapping technologies to translate hand movements into musical compositions. For instance, specific hand movements trigger changes in pitch, filters, and scales. The technical elements of the gloves include flex sensors, orientation sensors, a wifi device, and software that uses MIDI and OSC to coordinate movements and sounds.

I find this project fascinating and admire Heap’s work because of how revolutionary it is in bridging the gap between the analog and the digital through creating a more natural relationship between the artist and the computer. The gloves completely transform musical performance and experience, allowing artists to incorporate sound and movement seamlessly. They are also somewhat accessible, as they are available for purchase online, and many musical artists have used the gloves in their music/performances.

MI.MU gloves
how MI.MU gloves were developed
Imogen Heap performance; at 9:05, she describes and gives a demo with the gloves

LO-10

Badlands by Don Ritter is a sound art project that pairs sound with images of the Canadian Badlands. The image aspect of the project is controlled by live music; the speed at which the image moves is based on the tempo and pitch of the music being played. I was very interested in this project because the music controls the image, not the other way around. Most projects that I have looked at up to this point are based around the image first. In order to do this, Ritter uses a software called o8, which is based on Orpheus. The software is made to interpret and analyze the music that is being played in order to control the image. This also allows for artistic interpretation: Ritter (or anyone with the proper set up) could set the image to any music that he wanted and the project would be different than it would be with any other musical selection.

A link to a diagram that shows the technical requirements and setup, along with other information about Don Ritter’s Badlands project (2001)

Don Ritter’s website

LO 10: Computer Music

American Folk Songs Album

Benoit Carré is a french musician who created Skygga, his avatar alias, for AI-generated music. The Album “American Folk Songs” was released in 2019, using Flow Machines tools developed by Sony CLS. In this playlist, Carré “revisits American traditional folk songs with a prototype of an AI harmonization tool.” It takes acapella recording from many classic American folk singers and uses AI “to flesh out the melodies and the lyrics of the songs, enriching them with lush harmonies and sounds generated by AI that have never been heard before.” I really admire Carré’s work as it is an intriguing marriage of old folk music and modern electronic music. One song that I really enjoyed from the playlist is “Black Is the Color,” featuring the voice of Pete Seeger, a legendary folk singer. I was first introduced to Seeger during high school as he was a proud alumnus of the school. Hearing his voice with a new twist, therefore,  is very cool and interesting to me.

Creator: Benoit Carré

Year: 2019

Link: https://open.spotify.com/album/6NbX54oOpEZhSOjfdSYepw?si=qh6e45bQTMSh1LC4IX-R6w

Looking Outwards 10 : Computer Music

Charli XCX in performance

Charlie XCX, also known as Charlotte Emma Aitchison, is a professional singer, songwriter, music video director, and record producer. She was born in Cambridge and her music focuses on the musical styles of gothic pop, synth-pop, dance-pop, electropop, pop-punk, and alternative pop. During her early career, her music possessed a mix of darkness and witch-house styles. Most of her songs contain a technical or computational aspect to it and her work remains very consistent. I admire how consistent Charlie XCX has been with her musical styles. Her work presents a clear idea of how passionate and interested she is in computational music. Even in the music industry, technology seems to have a very powerful role. I am curious about what is to come in the future as technology continues to advance, and how that advancement would impact its role in many fields.

Looking Outwards-10

The project I am discussing this week is called “Weather Thingy–Real Time Climate Sound Controller.” Weather Thingy is created by Adrien Kaeser and is a sound controller that uses real time climate related events to control the settings of musical instruments. The device has two main parts–a weather station connected on a tripod microphone and a controller connected to the weather station. This machine has three climate sensors which includes a rain gauge, wind vane, anemometer. The interface of the machine displays the date from the 4 different sensors. This project is super interesting to me because it took a real life issue at hand and translated it into sound. I liked how the music can change based on the data it collects and it was really nice to see how the creator was able to take a musical approach towards this topic.

Project 10 Sonic Story

Story: it rains (rain sound plays), and the sprout grows (grow sound plays) and bloom the flower (bloom sound plays). Then, the cloud clears up and the bird goes by (bird sound plays). At the end, the sun gets bigger (ending sound plays).

sketch

//Jae Son
//Section C
//story: it rains, and the sprout grows and bloom the flower. 
// Then, the cloud clears up and the bird goes by. 
// At the end, the sun gets bigger

var rain;
var grow;
var bloom;
var bird;
var sun;
var sprout;
var flower;
var birdimg;
var sunimg;

function preload() {
  //sounds
  rain = loadSound("https://courses.ideate.cmu.edu/15-104/f2020/wp-content/uploads/2020/11/rain.wav");
  grow = loadSound("https://courses.ideate.cmu.edu/15-104/f2020/wp-content/uploads/2020/11/grow.wav");
  bloom = loadSound("https://courses.ideate.cmu.edu/15-104/f2020/wp-content/uploads/2020/11/bloom.mp3");
  bird = loadSound("https://courses.ideate.cmu.edu/15-104/f2020/wp-content/uploads/2020/11/bird.wav");
  sun = loadSound("https://courses.ideate.cmu.edu/15-104/f2020/wp-content/uploads/2020/11/sun.wav");
  //images
  sprout = loadImage("https://i.imgur.com/jhYJcR1.png");
  flower = loadImage("https://i.imgur.com/o6nzV38.png");
  birdimg = loadImage("https://i.imgur.com/amZXis1.png");
  sunimg = loadImage("https://i.imgur.com/2W1rzB0.png");
}

function setup() {
  createCanvas(600, 400);
  useSound();
  frameRate(20); 
}

function soundSetup() { // setup for audio generation
  rain.setVolume(0.6);
}

function draw() {
  //blue background
  background(189,209,255); 
  noStroke();
  imageMode(CENTER);
  
  //animation
    if (frameCount < 5) {
        cloud(200,100);
        image(sprout,width/2,height-20,67,104);
    } else if (frameCount >= 5 & frameCount <10) {
        raindrop(250,150,0);
        cloud(200,100);
        image(sprout,width/2,height-20,67,104);
    } else if (frameCount >= 10 & frameCount <15) {
        raindrop(250,150,100);
        cloud(200,100);
        image(sprout,width/2,height-20,67,104);
    } else if (frameCount >= 15 & frameCount <50) {
        raindrop(250,150+frameCount*3,255);
        cloud(200,100);
        image(sprout,width/2,height-20,67,104);
    } else if (frameCount >= 50 & frameCount < 100) {
        cloud(200,100);
        image(sprout,width/2,height-50-frameCount/3,67,104);
    } else if (frameCount >= 100 & frameCount < 110){
        cloud(200,100);
        image(flower,width/2,height-80,67,104);
    } else if (frameCount >= 110 & frameCount < 270){
        image(sunimg,width/2,100,90,90);
        cloud(400-frameCount*2,100);
        image(flower,width/2,height-80,67,104);
        image(birdimg,-200+frameCount*3,200,84,57);
    } else if (frameCount >=270 & frameCount <300){
        image(sunimg,width/2,100,70+frameCount/5,70+frameCount/5)
        image(flower,width/2,height-80,67,104);
    } else {
        image(sunimg,width/2,100,130,130);
        image(flower,width/2,height-80,67,104);
    }
    
  //brown ground
    fill(165,85,85);
    rect(0,height-40,600,40);
    
  //sound play
    if (frameCount == 2) {
      rain.play();
    } else if (frameCount == 50) {
      grow.play();
    } else if (frameCount == 102) {
      bloom.play();
    } else if (frameCount == 110) {
      bird.play();
    } else if (frameCount == 250) {
      bird.stop();
    } else if (frameCount == 270) {
      sun.play();
    }
    
}

function cloud(x,y) { //cloud shape draw
  push();
  translate(x,y);
  noStroke();
  fill(235,242,255);
  ellipse(0,0,100);
  ellipse(87,0,115);
  ellipse(160,0,95);
  pop();
}

function raindrop(x,y,t) { //rain drops shape draw
  push();
  rectMode(CENTER);
  translate(x,y);
  noStroke();
  fill(100,178,255,t);
  rect(0,10,10,50);
  rect(40,0,10,50);
  rect(80,15,10,50);
  pop();
}