Emily Zhou –– Composition

tortle

var ttl;

function setup() {
    createCanvas(400, 400);
}

function coolShape(ttl) {
    var count = 100;
    for (var i = 0; i < width; i++) {
        ttl.forward(10);
        var mx = map(mouseX, 50, 400, 200, 400);
        ttl.left(mx * 0.01 * sin(radians(i)));
        count += 1;
    }
}

function draw() {
    background("Lavender");
    for (var i = 0; i < 20; i++) {
        ttl = makeTurtle(width / 2, height / 2);
        ttl.left((360 / 20) * i);
        ttl.penDown();
        var R = map(mouseX, 0, width, 200, 255);
        ttl.setColor(color(R, 255, 255));
        ttl.setWeight(3);
        coolShape(ttl);
    }
}

//////////////////////////////


function turtleLeft(d){this.angle-=d;}function turtleRight(d){this.angle+=d;}
function turtleForward(p){var rad=radians(this.angle);var newx=this.x+cos(rad)*p;
var newy=this.y+sin(rad)*p;this.goto(newx,newy);}function turtleBack(p){
this.forward(-p);}function turtlePenDown(){this.penIsDown=true;}
function turtlePenUp(){this.penIsDown = false;}function turtleGoTo(x,y){
if(this.penIsDown){stroke(this.color);strokeWeight(this.weight);
line(this.x,this.y,x,y);}this.x = x;this.y = y;}function turtleDistTo(x,y){
return sqrt(sq(this.x-x)+sq(this.y-y));}function turtleAngleTo(x,y){
var absAngle=degrees(atan2(y-this.y,x-this.x));
var angle=((absAngle-this.angle)+360)%360.0;return angle;}
function turtleTurnToward(x,y,d){var angle = this.angleTo(x,y);if(angle< 180){
this.angle+=d;}else{this.angle-=d;}}function turtleSetColor(c){this.color=c;}
function turtleSetWeight(w){this.weight=w;}function turtleFace(angle){
this.angle = angle;}function makeTurtle(tx,ty){var turtle={x:tx,y:ty,
angle:0.0,penIsDown:true,color:color(128),weight:1,left:turtleLeft,
right:turtleRight,forward:turtleForward, back:turtleBack,penDown:turtlePenDown,
penUp:turtlePenUp,goto:turtleGoTo, angleto:turtleAngleTo,
turnToward:turtleTurnToward,distanceTo:turtleDistTo, angleTo:turtleAngleTo,
setColor:turtleSetColor, setWeight:turtleSetWeight,face:turtleFace};
return turtle;}

I wanted to use turtle graphics to create an interesting line composition that interacts with the mouse based on shape and colour. I had fun experimenting with slight differences in numerical values that drastically changed the result.

Screenshots of the composition at varying mouse positions:

Dani Delgado Looking Outwards – 11

The laptop orchestra during a performance

The project I chose to research this week was the Stanford Laptop Orchestra (or SLOrk for short). This orchestra, which creates full performances by having people using controllers to generate music from more than laptops,  was founded in 2008 by Ge Wang and other students, faculty, and staff at Stanford University’s Center for Computer Research in Music and Acoustics. They use the ChucK programming language in order to create/ synthesize sounds and design the instruments.

The controllers that can be used to generate music. The knobs at the end can be pulled and twisted by the performers which then communicates to the laptops

I find this project to be fascinating, as it requires similar skills as a normal orchestra does (in terms of cohesion, practice, and performance) but utilizes a completely new medium. I was honestly very excited to stumble across this because I was expecting to find a sole person composing music, but instead, I found this entire group working to synthesize music live which is impressive.

^A video of one of their performances

Their website

Eliza Pratt – Project 11

click to redraw!

/*
Eliza Pratt
Section E
elpratt@andrew.cmu.edu
Project 11
*/

var face = 5; //face size
var faceY = 100; //starting y coordinate for face
var w = 2; //stroke weight
var eye = {LX: 0, LY: 0, RX: 0, RY: 0}; // eye coordinates
var lipX; // lip x position
var lipY; //lip y position

function setup() {
    createCanvas(480, 360);
    frameRate(10);
}

function draw() {
    background("beige");

    for (var i = 0; i < 3; i++) {
      var turt = makeTurtle(90 + i * 150, faceY);
      turt.penDown();
      turt.setColor("black");

      //draw face features
      faceBrowsNose(turt);
      eyes(turt);
      glasses(turt);
      lips(turt);
    }

    noLoop();

}

//redraw face when the mouse is pressed
function mousePressed() {
  draw();
}


//draws face, eyebrows and nose
function faceBrowsNose(ttl) {
  //FACE
  for (var i = 0; i < 180; i++) {
    turtlepressure(ttl);
    ttl.right(360 / 100 + random(-2.5, 2.5));
    ttl.forward(face + random(-face / 3, face / 3));
  }
  ttl.left(20);

  //LEFT EYEBROW
  for (var i = 0; i < 50; i++) {
    turtlepressure(ttl);
    ttl.right(360 / 100);
    ttl.forward(2 + random(-2, 2));
    //save coordinates at top of brow to assign eye position
    if (i == 25) {
      eye.LX = ttl.x;
      eye.LY = ttl.y + random(10, 25);
    }
  }
  //LEFT NOSE
  for (var i = 0; i < 10; i++) {
    turtlepressure(ttl);
    ttl.right(random(-0.5, .5));
    ttl.forward(1);
  }
  //BOTTOM NOSE
  for (var i = 0; i < 50; i++) {
    turtlepressure(ttl);
    ttl.left(360 / 100);
    ttl.forward(0.5 + random(-1, 1));
    //save bottom of nose coordinates for lip position
    if (i == 25) 
      lipY = ttl.y + random (20, 30);
      lipX = ttl.x + random (-30, 10);
  }
  //RIGHT NOSE
  for (var i = 0; i < 10; i++) {
    turtlepressure(ttl);
    ttl.right(random(-0.5, .5));
    ttl.forward(1);
  }
  //RIGHT EYEBROW
  for (var i = 0; i < 50; i++) {
    turtlepressure(ttl);
    ttl.right(360 / 100);
    ttl.forward(2 + random(-2, 2));
    if (i == 25) {
      eye.RX = ttl.x;
      eye.RY = ttl.y  + random(10, 25);
    }
  }
}


//draws eyes
function eyes(ttl) {
    ttl.penUp();
    ttl.goto(eye.LX, eye.LY);
    ttl.penDown();

    //left eye
    for (var i = 0; i < 100; i++) {
      turtlepressure(ttl);
      ttl.right(360 / 50);
      ttl.forward(.5 + random(-0.75, 0.75));
    }

    ttl.penUp();
    ttl.goto(eye.RX, eye.RY);
    ttl.penDown();

    //right eye
    for (var i = 0; i < 100; i++) {
      turtlepressure(ttl);
      ttl.right(360 / 50);
      ttl.forward(.5 + random(-0.75, 0.75));
    }
}


//draws glasses
function glasses(ttl) {
  ttl.penUp();
  ttl.goto(eye.LX + random(10, 18), eye.LY);
  ttl.penDown();

  //lens 1
  ttl.face(90);
  for (var i = 0; i < 100; i++) {
    turtlepressure(ttl);
    ttl.right(360 / 50 + random(-4, 4));
    ttl.forward(2 + random(-0.75, 0.75));
  }

  ttl.penUp();
  ttl.goto(eye.RX - random(10, 18), eye.RY);
  ttl.face(330);
  ttl.penDown();

  //lens 2
  ttl.face(270);
  for (var i = 0; i < 100; i++) {
    turtlepressure(ttl);
    ttl.right(360 / 50 + random(-4, 4));
    ttl.forward(2 + random(-0.75, 0.75));
  }
}


//draws lips
function lips(ttl) {

  ttl.penUp();
  ttl.goto(lipX, lipY);
  ttl.penDown();
  ttl.face(310);

  //TOP LEFT
  for (var i = 0; i < 20; i++) {
    turtlepressure(ttl);
    ttl.right(360 / 100);
    ttl.forward(1 + random(-1, 1));
  }
  ttl.left(50);

  //TOP RIGHT
  for (var i = 0; i < 20; i++) {
    turtlepressure(ttl);
    ttl.right(360 / 100);
    ttl.forward(1 + random(-1, 1));
  }
  ttl.face(180);

  //LINE
  for (var i = 0; i < 30; i++) {
    turtlepressure(ttl);
    ttl.forward(1);
    ttl.right(random(-2, 2));
  }
  ttl.face(90);

  //BOTTOM LIP
  for (var i = 0; i < 50; i++) {
    turtlepressure(ttl);
    ttl.left(360 / 100);
    ttl.forward(1 + random(-1, 1));
  } 
}


//varies stroke weight to create "hand drawn" effect
function turtlepressure(turtle) {
  w += random(-0.4, 0.4);
  if (w <=0) w = 0.4;
  else if (w>= 3) w = 2.7;
  turtle.setWeight(w);

} 

/////////////////////////////////////////////////////////////////
function turtleLeft(d) {
    this.angle -= d;
}


function turtleRight(d) {
    this.angle += d;
}


function turtleForward(p) {
    var rad = radians(this.angle);
    var newx = this.x + cos(rad) * p;
    var newy = this.y + sin(rad) * p;
    this.goto(newx, newy);
}


function turtleBack(p) {
    this.forward(-p);
}


function turtlePenDown() {
    this.penIsDown = true;
}


function turtlePenUp() {
    this.penIsDown = false;
}


function turtleGoTo(x, y) {
    if (this.penIsDown) {
      stroke(this.color);
      strokeWeight(this.weight);
      line(this.x, this.y, x, y);
    }
    this.x = x;
    this.y = y;
}


function turtleDistTo(x, y) {
    return sqrt(sq(this.x - x) + sq(this.y - y));
}


function turtleAngleTo(x, y) {
    var absAngle = degrees(atan2(y - this.y, x - this.x));
    var angle = ((absAngle - this.angle) + 360) % 360.0;
    return angle;
}


function turtleTurnToward(x, y, d) {
    var angle = this.angleTo(x, y);
    if (angle < 180) {
        this.angle += d;
    } else {
        this.angle -= d;
    }
}


function turtleSetColor(c) {
    this.color = c;
}


function turtleSetWeight(w) {
    this.weight = w;
}


function turtleFace(angle) {
    this.angle = angle;
}


function makeTurtle(tx, ty) {
    var turtle = {x: tx, y: ty,
                  angle: 0.0, 
                  penIsDown: true,
                  color: color(128),
                  weight: 1,
                  left: turtleLeft, right: turtleRight,
                  forward: turtleForward, back: turtleBack,
                  penDown: turtlePenDown, penUp: turtlePenUp,
                  goto: turtleGoTo, angleto: turtleAngleTo,
                  turnToward: turtleTurnToward,
                  distanceTo: turtleDistTo, angleTo: turtleAngleTo,
                  setColor: turtleSetColor, setWeight: turtleSetWeight,
                  face: turtleFace};
    return turtle;
}

Ever since our second variable face assignment where we saw examples of Moka’s generative faces, I’ve wanted to code doodles that look blind contours! I had a lot of fun playing with the turtles and got some pretty great results from randomizing different things. Also, by randomizing the stroke weight for each point I was able to make my own “brush” of sorts. It was challenging to have any control over the randomness factor, but here are some of the beautiful creations that came out of it before I decided to do multiple faces:

friend with hair
derp
sleepy friend
abstract friend

Vicky Zhou – Looking Outwards 11

4G Network into Musical Sounds

For this week’s looking outward centered on computational music, I decided to focus on a project by Andrius Sarapovas: a kinetic generative music installation. This installation turns a 4G network into a series of musical sounds, based on n algorithm and through 77 segments distributed through the physical exhibition space. Each segment consists of a metal bar, a sound activator, a sound damper, a resonator, and mechatronics, that pick up on the signals in the 4G network. The placement of these segments were optimized and based on the actual physical exhibition space.

I appreciate this project because it intakes very impersonal data that we commonly and frequently discard, and/or don’t understand, and transforms it into a medium that we are more familiar with, and can enjoy.

Rachel Lee Looking Outwards 11 Section E

Screenshot from the anti game, with machine intelligence posing and asking questions to create a generative song.

Atlås from binaura on Vimeo (2017).

This week, I decided to investigate Atlås, an anti game environment that creates generative music, by the creative computing collective binaura. Atlås was built using the p5.js library, while simultaneously investigating the autonomy of algorithms and machine intelligence. At the crux, this project generates sounds that correspond to certain qualities of  answers to questions that are solved by machine intelligence. While this concept in itself is fascinating, what I find the most interesting about this project is how the artistic collective was able to generate an appropriate soundscape for the types of questions generated, as the questions are often quite lofty deal with cognitive processes, human communication and environmental factors (these topics are not the easiest things to talk about). Further, I was impressed by how binaura was able to create a compelling visual narrative to complement the experience, which was guided appropriately by the tempo and feeling of the generated music. Overall, I really admire how well the project was executed, especially with respects to the sensitivity of the relationship between the user, the computer, the music, and visual elements of the anti game environment.

Christine Chen-Looking Outwards-11

Above shows a video of Adrien Kaeser’s real time climate sound controller which he developed in 2018. Link: https://vimeo.com/292088058

While scrolling through the various projects related to computational sound art, I came across one that grabbed my attention- real time climate sound controller. Adrien Kaeser from ECAL, a university of art and design based in Renens, designed this sound controller. This device utilizes aspects of real time weather to alter and control the designed music instruments. The entire device is composed of two major parts- a weather station on a tripod microphone and a custom-built controller connected to the weather station. The weather station receives and senses weather information and passes it on to the controller. The controller transforms the data into one that can be interpreted by instruments. The user could also alter the device, through modifying qualities such as amplitude, to control the output of sounds. The device is made with Arduino mega, Arduino leonardo, Weather Meters, SparkFun ESP32 Thing Environment Sensor Shield, ESP32, SparkFun MIDI Shield, High Speed Optocoupler 6n138, Encoder rotary knobs, Colored buttons and Arduino TFT touch screen. Software includes Arduino, C++ and MIDI protocol.

What I love most about this project is how the creator utilizes weather, the state of the atmosphere at a place and time, into another abstract form which gives people a different experience. It is as if weather is compressed into an audio form. Through allowing users to alter the sound through the controller, the creator make sure that generated sound would not be entirely random and create a composition of muddled music. The controller helps user to alter the output to form sound that is pleasing to them. It is just inspiring to see how such a large, incontrollable data could be transferred and transformed.

The controller allows users to alter the output of sound.

Yingying Yan- Project 10- Landscape

sketch

/*
Yingying Yan
Section E
yingyiny@andrew.cmu.edu
Project - 10
*/

var snowman = [];

function setup() {
	createCanvas(480, 240);
	for (var i = 0; i < 4; i++) {
		var rx = random(width);
		snowman[i] = makeSnowman(rx);
	}
	frameRate(10);
}

function draw() {
	background("green");
	//background
	displayHorizon();
	//snowman
	updateAndDisplaySnowman();
	removeSnowmanThatHaveSlippedOutOfView();
	addNewSnowmanWithSomeRandomProbability();
}

function updateAndDisplaySnowman() {
	for(var i = 0; i < snowman.length; i++) {
		snowman[i].move();
		snowman[i].display();
	}
}

function removeSnowmanThatHaveSlippedOutOfView() {
	var snowmanKeep = [];
	for (var i = 0; i < snowman.length; i++) {
		if(snowman[i].x + 50 > 0) {
			snowmanKeep.push(snowman[i]);
		}
	}
	snowman = snowmanKeep;
}

function addNewSnowmanWithSomeRandomProbability() {
	var newSnowmanPercent = 0.006
	if (random(0,1) < newSnowmanPercent) {
		snowman.push(makeSnowman(width))
	}
}
//move towards the left 

function snowmanMove() {
	this.x += this.speed;
}
//function that draws the snowman

function snowmanDisplay() {
	push();
	fill(255);
	noStroke();
	var sizeBottom = 35;
	var sizeMiddle = 25;
	var sizeTop = 20;
	var yy = height-35;
	//translate(this.x, height - 35);
	translate(this.x, 0);
	//bottom circle
	ellipse(0, yy - sizeBottom / 2, sizeBottom, sizeBottom);
	//middle circle
	ellipse(0, yy - sizeBottom - sizeMiddle / 2 +5 , sizeMiddle, sizeMiddle);
	// //top circle
	// ellipse(0, yy - sizeBottom - sizeMiddle - sizeTop / 2 + 10, sizeTop, sizeTop);
	push();
	fill(0);
	ellipse(0 - 5, yy - sizeBottom - sizeMiddle / 2 + 2, 2, 2);
	ellipse(0 + 5, yy - sizeBottom - sizeMiddle / 2 + 2, 2, 2);
	noFill();
	stroke(0)
	ellipse(0, yy - sizeBottom - sizeMiddle / 2 + 5, 4, 4);
	line(15, yy - sizeBottom / 2, 30, yy - 40);
	line(-15, yy - sizeBottom / 2, -30, yy - 40);
	pop();	
	pop();
}

//define all the objects and variables

function makeSnowman(positionOg) {
	var sman = {
		x: positionOg,
		//y: 380,
		speed: -1.0,
		move: snowmanMove,
		display: snowmanDisplay
	}
	return sman;
}

//background
function displayHorizon() {
	fill("lightblue");
	rect(-1,-1, width + 1, height - 39);
}

add cap

I wanted to render some snow scene because I love the snow. Unfortunately, I can barely finish the project. But I have a snowman! I mean lots of snowmen. I think this project is hard and really makes me think about “object”. I am still in the process of understanding the code. I think for my final project if I would do something similar, it will be much better than this.

Carley Johnson Looking Outwards 11

The group I came across, iii, is “an artist-run platform supporting radical interdisciplinary practices engaging with image, sound, space and the body.” They do residencies, and support artists, but the specific project I’ll be looking at is a totally immersive installation called “The Intimate Earthquake Archive”. The art piece uses almost every sense through vests and compositions derived from seismic recordings, interactive radio broadcasts, and sandstone earth core samples and wooden scaffolding set up around the people inside.

In this photo you can see the scaffolding and the vests worn by participants.

This project is really interesting because it plays with sound in so many ways. There are radio broadcasts as well as recordings of an Earthquake in Groningen, but the vest are the most interesting. Based on movement and position in the space, they omit sounds and rumbles that affect different parts of the body. I like how their website describes these tactile vests: “allows the wearer to explore the subtle rumbles of the earthquakes on the body.”

The truth about sound is we love it- we love music- and there is no doubt that what we listen to affects the state of our body. But often, this is not an idea explored in relation to art or so firmly attached to what we feel. I would love to wear one of these vests. Feeling and hearing the soft rumble of an earthquake in my stomach as well as all around me sounds at once terrifying and calming.

I have to suppose that the algorithms used employ motion capture graphics, so as to track the wearers progress through the Earthquake, and possibly some complex math in the transducer speakers inside the vest to know when and wear to trigger a rumble.

 

Emily Zhou – Looking Outwards – 11

Orbiter is an interactive sound environment designed by FIELD studio. It is an installation that invites visitors to lie down and observe a representation of stars from below. By pointing upward, visitors can insert new stars into orbit with unique visual and sound qualities.


Orbiter: Interactive Sound Environment / Documentation

The music is played on a scale of concentric circles. The bigger you let a star grow before you pull back your hand to insert it into orbit, the louder it plays. In terms of computation, the software is based on computer vision technology. The software used incorporates real-time analysis of a camera image of the player as well as generating 6-channel-audio and video signals. I admire the interactive quality of both sound and visuals in the work. I imagine it to be an immersive experience for the viewer.

Alice Fang – Looking Outwards – 11

A demonstration of Weather Thingy

Weather Thingy is a climate sound controller which can affect how a musician performs music in real time based on current weather conditions. Created by Adrien Kaeser, using Arduino, weather sensors and C++, Weather Thingy allows “listerners to feel the impact of the climate on the composition.” Consisting of two parts, a weather station and a controller, the sound that is produced varies based on wind speed, rain and precipitation levels, wind direction, and UV level. The four weather variables in turn affect the pan, chorus, LFO (flow frequency oscillation), and delay. The controller converts data from the climate into midi data that can be interpreted by the instruments.

I think this project is interesting because it encapsulates some of the forces in nature to effect how music is performed. Instead of having purely electronic synthesizers, there’s this weird, beautiful combination of utilizing technology to have something beyond our control to create music. I also think the real-time capabilities of this is super cool; imagine if this project was scaled up and a whole orchestra was affected.