alchan-Project 11-Composition

turtles

var turtle1;
var turtle2;
var turtle3;

var forwardAmt;
var turnAmt;

var turtles = [];
var colorBank = ["lightskyblue", "darksalmon", "gold", "coral", "skyblue", "cadetblue", "yellowgreen", "tomato"]
var colors = [];

var turtlePosition;

function setup() {
  createCanvas(480, 480);
  background(220);

  blendMode(OVERLAY);

  frameRate(10);

}

function draw() {
  for(var i = 0; i < turtles.length; i++) {
    forwardAmt = random(1, 50);
    turnAmt = random(-100, 100);
    turtles[i].setColor(colors[i]);
    turtles[i].setWeight(10);
    turtles[i].penDown();
    turtles[i].forward(forwardAmt);
    turtles[i].right(turnAmt);
    turtles[i].penUp();
  }
}

function mousePressed() {
  turtles.push(makeTurtle(mouseX, mouseY));
  colors.push(random(colorBank));
  print(colorBank);
  print(colors);
}

function keyPressed() {
  if (key === "R") {
    turtles = [];
    blendMode(NORMAL);
    background(220);
    blendMode(OVERLAY);
  }
}

// turtle graphics
function turtleLeft(d){this.angle-=d;}function turtleRight(d){this.angle+=d;}
function turtleForward(p){var rad=radians(this.angle);var newx=this.x+cos(rad)*p;
var newy=this.y+sin(rad)*p;this.goto(newx,newy);}function turtleBack(p){
this.forward(-p);}function turtlePenDown(){this.penIsDown=true;}
function turtlePenUp(){this.penIsDown = false;}function turtleGoTo(x,y){
if(this.penIsDown){stroke(this.color);strokeWeight(this.weight);
line(this.x,this.y,x,y);}this.x = x;this.y = y;}function turtleDistTo(x,y){
return sqrt(sq(this.x-x)+sq(this.y-y));}function turtleAngleTo(x,y){
var absAngle=degrees(atan2(y-this.y,x-this.x));
var angle=((absAngle-this.angle)+360)%360.0;return angle;}
function turtleTurnToward(x,y,d){var angle = this.angleTo(x,y);if(angle< 180){
this.angle+=d;}else{this.angle-=d;}}function turtleSetColor(c){this.color=c;}
function turtleSetWeight(w){this.weight=w;}function turtleFace(angle){
this.angle = angle;}function makeTurtle(tx,ty){var turtle={x:tx,y:ty,
angle:0.0,penIsDown:true,color:color(128),weight:1,left:turtleLeft,
right:turtleRight,forward:turtleForward, back:turtleBack,penDown:turtlePenDown,
penUp:turtlePenUp,goto:turtleGoTo, angleto:turtleAngleTo,
turnToward:turtleTurnToward,distanceTo:turtleDistTo, angleTo:turtleAngleTo,
setColor:turtleSetColor, setWeight:turtleSetWeight,face:turtleFace};
return turtle;}

I was inspired by the look of stylized subway maps (dots connected by segments in particular) and wanted to create something that partially mimicked that look. I decided to randomize the angles and segment lengths and incorporate user interaction: click to add more turtles/ paths, and press R to reset the canvas.

dayoungl Project-11

sketch

//Sharon Lee
//dayoungl@andrew.cmu.edu
//Section E
//Project 11 - freestyle turtle

// makeTurtle(x, y) -- make a turtle at x, y, facing right, pen down
// left(d) -- turn left by d degrees
// right(d) -- turn right by d degrees
// forward(p) -- move forward by p pixels
// back(p) -- move back by p pixels
// penDown() -- pen down
// penUp() -- pen up
// goto(x, y) -- go straight to this location
// setColor(color) -- set the drawing color
// setWeight(w) -- set line width to w
// face(d) -- turn to this absolute direction in degrees
// angleTo(x, y) -- what is the angle from my heading to location x, y?
// turnToward(x, y, d) -- turn by d degrees toward location x, y
// distanceTo(x, y) -- how far is it to location x, y?
var t1;
var t1;

function setup(){
  createCanvas(400,400);
  background(220);
  noLoop();

  t1 = makeTurtle(width/2, height/2); //start drawing from the centre
  t1.setColor(0);
  t1.setWeight(1.5);
  t1.penDown();
  for (var i = 0; i < width; i ++){
    //randomize stroke colour & direction
    var x = floor(random (1,4));
    if (x == 1){
      t1.setColor("yellow");
      t1.forward(12);
      t1.right(90);
    }
    if(x == 2){
      t1.setColor("red");
      t1.forward(12);
      t1.left(10); // adding a bit of randomness to 90 degree angles
    }

    if(x == 3){
      t1.setColor("blue");
      t1.back(20);
      t1.left(90);
    }
  }
}

//turtle API
function turtleLeft(d) {
    this.angle -= d;
}
 
 
function turtleRight(d) {
    this.angle += d;
}
 
 
function turtleForward(p) {
    var rad = radians(this.angle);
    var newx = this.x + cos(rad) * p;
    var newy = this.y + sin(rad) * p;
    this.goto(newx, newy);
}
 
 
function turtleBack(p) {
    this.forward(-p);
}
 
 
function turtlePenDown() {
    this.penIsDown = true;
}
 
 
function turtlePenUp() {
    this.penIsDown = false;
}
 
 
function turtleGoTo(x, y) {
    if (this.penIsDown) {
      stroke(this.color);
      strokeWeight(this.weight);
      line(this.x, this.y, x, y);
    }
    this.x = x;
    this.y = y;
}
 
 
function turtleDistTo(x, y) {
    return sqrt(sq(this.x - x) + sq(this.y - y));
}
 
 
function turtleAngleTo(x, y) {
    var absAngle = degrees(atan2(y - this.y, x - this.x));
    var angle = ((absAngle - this.angle) + 360) % 360.0;
    return angle;
}
 
 
function turtleTurnToward(x, y, d) {
    var angle = this.angleTo(x, y);
    if (angle < 180) {
        this.angle += d;
    } else {
        this.angle -= d;
    }
}
 
 
function turtleSetColor(c) {
    this.color = c;
}
 
 
function turtleSetWeight(w) {
    this.weight = w;
}
 
 
function turtleFace(angle) {
    this.angle = angle;
}
 
 
function makeTurtle(tx, ty) {
    var turtle = {x: tx, y: ty,
                  angle: 0.0, 
                  penIsDown: true,
                  color: color(128),
                  weight: 1,
                  left: turtleLeft, right: turtleRight,
                  forward: turtleForward, back: turtleBack,
                  penDown: turtlePenDown, penUp: turtlePenUp,
                  goto: turtleGoTo, angleto: turtleAngleTo,
                  turnToward: turtleTurnToward,
                  distanceTo: turtleDistTo, angleTo: turtleAngleTo,
                  setColor: turtleSetColor, setWeight: turtleSetWeight,
                  face: turtleFace};
    return turtle;
}











For this project, I wanted to create something simple yet interesting. Instantly, I was reminded of legos because how it works is very simple – you simply stack the blocks on top of each other. However, there are millions of ways to apply this simple mechanism to create so many different things. Another source of inspiration for the project came from one of my favourite painter, Mondrian. The three primary colours combined with 90 degree angles are indications of my inspiration from Mondrian’s works.

Sheenu-Looking Outwards-11

This is a segment from Animusic, a series of musical and computational 3D animations. This particular one is named “Pipe Dream” and features numerous balls shooting out of pipes and hitting guitar strings, bells, xylophones, drums, and cymbals. Each segment in the series follows a certain artistic theme, a certain genre of music, and a certain type of orchestra. Electronic music would have a sci-fi theme and its orchestra would mainly consist of synthesizers and electronic drums. Classical music would have an orchestra consisting of violins, brass, and woodwinds.

As a child, I was always fascinated by the variety, creativity, and autonomy that is displayed in the Animusic series. At the time, the idea of robots playing music was a fascinating subject to me and it still is today. However, what I didn’t realize was that the animation itself was already, in a way, a robot playing music.

The Animusic animations are not animated by hand, but rather are animated and controlled by the computer through listening to the music. The software used to make the whole animation come to life is a custom made engine named “MIDImotion”. Because the songs are in MIDI format, the program responds to the data that is sent from the song file and translates it into animation for certain instruments. This is how the animation can show so many things happening at once; animating all of this by hand would be extremely difficult.

I recommend looking up and viewing all the other Animusic segments on YouTube. There are many other fascinating segments out there that are just as good as the one I’ve shown above.

katieche-looking outwards 11

Carsten Nicolai

Berlin based German artist and musician, Carsten Nicolai, works in a transitional field between music, art, and science. He seeks to override the individualized sensory perceptions by creating works that appeal to multiple senses at a time (i.e. using sound and light frequencies to appeal to ears and eyes simultaneously). In terms of computation, he often uses mathematical patterns like random, grids, error, consistency, etc.

In his 2016 installation, Reflektor Distortion, viewers can both hear the sounds being played, and see the light bars seemingly move in the water bowl. It works by creating sound waves through a speaker that shifts the water in the bowl accordingly, therefore creating the illusion that the series of light bars being reflected in the water are also moving. The result is a mesmerizing movement of lights that coincide with whatever sound is being displayed. I think that the light bas are a very effective and beautiful way to display sound waves, contrary to the stereotypical up-down cosine/sine looking sound waves that we’re all used to seeing.

jwchou-LookingOutwards-10

EarSketch

A promotional image about EarSketch.

EarSketch is an online web application that teaches students how to code by making music. Users can code in either Python or Javascript. It was created because people thought in order to encourage people to learn how to code and to increase its appeal, they should use a universally understood and loved language: music. I really admire the interface, because it’s modern and fairly simple. If it looked unfinished or overly technical, I imagine that would turn people off from using the app. The app also features a HUGE range of samples, containing everything from dubstep to eight-bit to west coast hip hop.

EarSketch is designed/created by a team that is primarily based out of Georgia Tech.

While the sample themselves might not be computational, users can change the music by setting their own tempo in the code, which is computational. There are also a lot of functions that users can use to analyze tracks for their different properties.

Because the software is used to create music, the creators themselves did not insert any of their own personal artistic sensibilities into the project. Rather, they designed the software in order to allow others to insert their creative sensibilities.

A video that introduces EarSketch:

alchan-Looking Outwards 11

example of the Continuator (2000) by François Pachet in use, with Albert van Veenendaal on piano

The Continuator (2000) is a computational music generation system by Francois Pachet. It works in conjunction with a musician to improvise real-time musical sequences, creating melodies that may be indistinguishable to those played by the human musician (in the video shown, it’s being used as part of a “Musical Turing Test” to see if humans can tell the computer-generated music apart from the human-generated music).

I’m really interested in the way the Continuator works in conjunction with a musician, as it uses the musician’s playing as a reference point to come up with new but stylistically similar tunes. I like the idea of using algorithms in partnership with human creativity, as that seems to open up more possibilities than just trying to create an algorithm that will recreate human abilities perfectly.

agusman-LookingOutwards-“Sound Objects” by Zimoun

Zimoun Sound Objects Compilation Video

Studio Zimoun’s Website

This series of “sound objects”, created by installation artist Zimoun are architectural soundscapes constructed from simple and functionally components. These components have ranged from ping pong balls, chains, cardboard boxes, springs and slats of wood, usually “activated” or displaced using an array of simple servo motors. While some of their more elaborate collaborative pieces incorporate plotters and hot plates, the majority of these sound objects call not to their complex build but the sonically resonant qualities of the commonplace materials used, especially in mass quantities. The architectural systems constructed from these individual sound objects articulate the tension between the orderly and the chaotic (or the chaotic within the orderly). Rather than a true sound being produced, these “sound objects” are characterized as emitting more of an acoustic hum that feels industrial, yet elegant.

I am extremely interested in the fabrication and facilitated performance of large-scale patterns- how intimate sonic and visual experiences can unfold from an environment, rather than an encapsulated piece of media. I also love how, through analog materials and motion, the “computational” aspects of this piece are downplayed in a very elegant way. To me, this really emphasizes what computational art is actually about- not the depictions of technology but of ideas. That being said, large-scale installations with functionality like these “sound objects” could not be accomplished easily without computation. The vastness of these patterns play to the computer’s ability to process large quantities of information and execute many outputs simultaneously.

adev_Project_10

adev_Project_10

//Aisha Dev
//adev@adnrew.cmu.edu
//Section E
// Project 10


var frames = [];
var sm1 = - 0.00006;
var sm2 = - 0.000065;
var sm3 = - 0.00005;
var sm4 = - 0.0001;
var sm5 = - 0.0003;
var detail1 = 0.005;
var detail2 = 0.003;
var detail3 = 0.003;
var detail4 = 0.001;
var detail5 = 0.0015;

var trees = [];
var bwTrees = 0;

// var playlist = ["Across The Unverse", "Aint No Sunshine", "All Along The Watchtower", "American Pie", "Anywhere Is", "Bennie and The Jets", "Blackbird", "Blowin in The Wind",
// "Blue Jean Baby", "Bridge Over Troubled Water", "Budapest", "A Case of You", "Comfortably Numb", "Catch the Wind", "Cowgirl in the Sand", "Crazy Love", "Come Together", "Dancing in the Dark",
// "Dont Think Twice Its Alright", "Hallelujah", "Hear of Gold", "Heavenly Day", "Hey Jude", "Ho Hey", "Home", "I See Fire", "Flight Attendant", "Just Like A Woman", "Liar", "Knights in White Satin",
// "Lady Lady Lay", "Layla", "Leave Your Lover", "Lodi", "The Long and Whinding Road", "Long as I Can See The Light", "Losing You", "Sospesa", "Me and Bobby Mcgee", "More Than a Woman",
// "Mr Tambourine Man", "No Woman No Cry", "Oh Sister", "Old Man", "Only Love", "Over The Creek", "Piece of My Heart", "Proud Mary", "Roxanne", "Something", "Song To Woody", "Songbird", "Sound of Silence", 
// "Start Over", "Stop This Train", "Take Me to Curch", "The Thrill is Gone", "The Times They Are a Changin", "Fast Cars", "Underneath", "While My Guitar Gently Weeps", "Wholl Stop The Train", "Yesterday",
// "Wish You Were Here", "You Are So Beautiful", "Our House", "Fortunate Son", "Warning Sign", "Like A Rolling Stone", "Jokerman"];
// var index = 0; 



function setup() {
    createCanvas(480,200);
   
 for (var i = 0; i < 20; i++){ 
        var rx = random(width);
        trees[i] = makeTree(rx);
    }
      
    frameRate(10);

}
 
function draw() {
    //functions
    makeBackground();
    makeMountains();
    //songs();
   drawTree();
    addTree();

   
}



function makeBackground(){
   background(30, 50, 100);


   
}


function makeMountains(){
    noStroke();
    
    //highest mountain range
    fill(174, 153, 139);
    beginShape(); 
    for (var mOne = 0; mOne < width; mOne++) {
        var speedOne = (mOne * detail1) + (millis() * sm1);
        var peaksOne = map(noise(speedOne), 0,1, 0, height);
        vertex(mOne,peaksOne-70); 
    }

    vertex(width,height);
    vertex(0,height);
    endShape();
    
    //second highest mountain range
    fill(128, 121, 113);
    beginShape();
    for (var mTwo = 0; mTwo < width; mTwo++) {
        var speedTwo = (mTwo * detail2) + (millis() * sm2);
        var peaksTwo = map(noise(speedTwo), 0,1, 0, height);
        vertex(mTwo,peaksTwo-30); 
    }
    vertex(width,height);
    vertex(0,height);
    endShape();
        
    //middle mountain range
    fill(93, 84, 86);
    beginShape(); 
    for (var mThree = 0; mThree < width; mThree++) {
        var speedThree = (mThree * detail3) + (millis() * sm3);
        var peaksThree = map(noise(speedThree), 0,1, 0, height);
        vertex(mThree,peaksThree+10); 
    }
    vertex(width,height);
    vertex(0,height);
    endShape();


     //second - lowest mountain range
    fill(67, 60, 67);
    beginShape(); 
    for (var mFour = 0; mFour < width; mFour++) {
        var speedFour = (mFour * detail3) + (millis() * sm4);
        var peaksFour = map(noise(speedFour), 0,1, 0, height);
        vertex(mFour,peaksFour+30); 
    }
    vertex(width,height);
    vertex(0,height);
    endShape();


      //lowest mountain range
    fill(48, 45, 48);
    beginShape(); 
    for (var mFive = 0; mFive < width; mFive++) {
        var speedFive = (mFive * detail3) + (millis() * sm5);
        var peaksFive = map(noise(speedFive), 0,1, 0, height);
        vertex(mFive,peaksFive+60); 
    }
    vertex(width,height);
    vertex(0,height);
    endShape(); 

}

function drawTree() {
    for (var i = 0; i < trees.length; i++){
        trees[i].move();
        trees[i].display();
    }
}

function makeTree(x) {
    var tree = {
        birth: x,
        size: random(10, 30),
        speed: 2.0,
        move: TreeMove,
        display: TreeDisplay,
        height: random(30, 60),
        color: [120, 150, 100, 90]
    }
    
    return tree;
}

function TreeMove() {
    this.birth += this.speed;
}

function TreeDisplay() {
    var treeHeight = 50; 
    fill(this.color); 
    noStroke();
    push();
    translate(this.birth, height - this.height);
    ellipse(0, 0, this.size, this.size);
    ellipse(7, 10, this.size, this.size+2);
    ellipse(-9, 2, this.size, this.size);
    stroke(200, 180, 140);
    strokeWeight(2);
    line(0, 0, 0, this.height);
    line(0, this.size/5, this.size/6, this.size/20);
    if (this.size > 30) {
        line(0, this.size/3, -this.size/6, this.size/6);
    }
    pop();
}

function addTree() {
    var newTree = 5; 
    if (random(0,1) < newTree) {
        bwTrees = bwTrees + 30;
        if (bwTrees == 4) { 
            trees.push(makeTree(width)); 
             bwTrees = 0; //reset
        }
    }

}


// function songs(){
// 	fill(181, 165, 138);
// 	textSize(10);
// 	textFont("Times New Roman");
// 	text(playlist[index],10, 460);

// }



This assignment was a bit challenging for me. I had been missing road trips with my family so I decided to re-create one in the Himalayas, complete with the high mountain ranges and pine trees. I wanted to add running text of the playlist we tend to listen to so I’m still working on that.

adev_LookingOutwards_10

Memory of Form and Matter

Chris Sugrue

I think this piece was really fascinating to me because it is a physical sculpture that doesn’t actually change its form. Even though it technically static, it feels completely dynamic. It is created through a series of parametric, algorithmically generated forms and is put together in a way that is viewed differently from every angle.

I think this hybrid between digital and physical, and data-driven art is really quite interesting. This is what Chris Sugrue does, she is an artist and programmer who creates these novel interactions, these experimental interfaces. She studied Art and Technology at Parsons, New York and has since done multiple residencies, from Barcelona to New York. She was instrumental in creating EyeTracker, for ALS patients that went on to win multiple design and innovation awards.

She currently teaches at Parsons Paris.

mecha-lookingoutwards-11

Taurus in the style of Vivaldi by David Cope

For this week, I decided to look into the work of David Cope. As a music professor at the University of California, Santa Cruz, Cope writes algorithms and programs that take in music and outputs new compositions in the style of the original input.

The example above uses his experiments in musical intelligence in order to recreate the style of Antonio Vivaldi. His program relied on processes such as deconstruction, analyzing signatures, and finding compatibility in works in order to create new compositions. In the case of this example, the music video accompanying this piece was also algorithmically created.

I think that the concept of using new technologies in order to recreate styles of old composers is incredibly interesting, yet works surprisingly well. However, I think that one flaw to this sort of technique is that algorithms can only analyze how each of the notes are treated, but do not take into account the emotions that any given composer has as they create.