elizabew – project – 11 – composition

sketch

//Elizabeth Wang
//Section E
//elizabew@andrew.cmu.edu
//Project 11

var myTurtle = [];//array to store turtles
var movingTurtle = []; //array to store moving turtle
var d = 100; //degrees to keep movingTurtle moving
var dl = 50;//degrees to keep movingTurtle moving

function setup() {
  createCanvas(480, 480);
  background(32,50,103);
  frameRate(10);
}

function draw() {
  for(var i = 0; i < movingTurtle.length; i++) {
    //moving "bats"
    movingTurtle[i].setColor(color(25,61,mouseX));
    movingTurtle[i].setWeight(.5);
    movingTurtle[i].penDown();
    movingTurtle[i].forward(10);
    movingTurtle[i].right(d);
    movingTurtle[i].forward(2);
    movingTurtle[i].left(dl);
    movingTurtle[i].forward(15);
    d = d + 5; //makes the turtle move around the canvas continuously
    dl = dl + 2;
  }

  for(var i = 0; i < myTurtle.length; i++) {
    myTurtle[i].setColor(color(255,248,193));
    myTurtle[i].setWeight(2);
    myTurtle[i].penDown();
    myTurtle[i].forward(20); //flower/star shape
    myTurtle[i].right(90);
    myTurtle[i].forward(40);
    myTurtle[i].right(45);
    if (i % 10 == 0) { //every 10, large star
      myTurtle[i].setColor(color(255,224,9));
      myTurtle[i].forward(50);
      myTurtle[i].left(180);
  }
}

}

function mousePressed() {
  myTurtle.push(makeTurtle(mouseX, mouseY));//turtle appears when mouse is pressed
}

function mouseDragged(){
  movingTurtle.push(makeTurtle(mouseX, mouseY)); //turtle follows mouse dragging
}

function turtleLeft(d) {
  this.angle -= d;
}


function turtleRight(d) {
  this.angle += d;
}


function turtleForward(p) {
  var rad = radians(this.angle);
  var newx = this.x + cos(rad) * p;
  var newy = this.y + sin(rad) * p;
  this.goto(newx, newy);
}


function turtleBack(p) {
  this.forward(-p);
}


function turtlePenDown() {
  this.penIsDown = true;
}


function turtlePenUp() {
  this.penIsDown = false;
}


function turtleGoTo(x, y) {
  if (this.penIsDown) {
    stroke(this.color);
    strokeWeight(this.weight);
    line(this.x, this.y, x, y);
  }
  this.x = x;
  this.y = y;
}


function turtleDistTo(x, y) {
  return sqrt(sq(this.x - x) + sq(this.y - y));
}


function turtleAngleTo(x, y) {
  var absAngle = degrees(atan2(y - this.y, x - this.x));
  var angle = ((absAngle - this.angle) + 360) % 360.0;
  return angle;
}


function turtleTurnToward(x, y, d) {
  var angle = this.angleTo(x, y);
  if (angle < 180) {
      this.angle += d;
  } else {
      this.angle -= d;
  }
}


function turtleSetColor(c) {
  this.color = c;
}


function turtleSetWeight(w) {
  this.weight = w;
}


function turtleFace(angle) {
  this.angle = angle;
}


function makeTurtle(tx, ty) {
  var turtle = {x: tx, y: ty,
                angle: 0.0,
                penIsDown: true,
                color: color(128),
                weight: 1,
                left: turtleLeft, right: turtleRight,
                forward: turtleForward, back: turtleBack,
                penDown: turtlePenDown, penUp: turtlePenUp,
                goto: turtleGoTo, angleto: turtleAngleTo,
                turnToward: turtleTurnToward,
                distanceTo: turtleDistTo, angleTo: turtleAngleTo,
                setColor: turtleSetColor, setWeight: turtleSetWeight,
                face: turtleFace};
  return turtle;
}

Image using only mousePressed
Image using both mousePressed and mouseDragged

For this project, I wanted to create turtles that look like stars in the night sky, while also adding texture that could imitate fog. I made it so that the fog would change color as well in order to somewhat mirror the way fog changes tone depending on where the light source is.

At first I drew different star patterns on paper and then transferred my two favorite into the program. I made it so that smaller stars appear more often than the larger stars in an effort to imitate the “north star” with smaller stars surrounding it. I’m fairly happy with how it turned out, but if I could, it might have been fun to play with adding more visually different stars into the mix.

amui1-LookingOutwards-11

For this week’s Looking Outwards, I researched the Japanese artist, Notuv.

Caption: Above, he is pictured playing at one of his live sets.

I specifically chose to do my Looking Outwards on his piece, Fucertc, released in 2013. It can be found in the video below:

I admire Notuv and this piece because he combines sound control with a unique visual design. He controls the sound with a program called MaxMSP and the visuals with OpenFrameworks. I particularly admire this piece, Fucertc because he uses such a minimalistic approach with only a 2 step “vibe” and percussional sounds. I also like how as each measure comes, there is a new sound introduced, but with the same consistent background beat. The full writeup for Fucertc can be found here.

rfarn-lookingOutward-11

For this weeks looking outwards post, I decided to take a look at Bleep Space. Bleep Space is a project created by Andy Wallace and Dan Friel. It’s a toy that lets users to sequence various geometric animations along with sounds, allowing the user to create visual and auditory patterns.

people interacting with table top Bleep Space

The purpose of the interactive toy is not so much to create perfect tunes, but more to experiment with various sounds and their accompanying graphics. A tabletop installation was also created with many buttons surrounding a screen and is currently touring in different locations in New York. As users press buttons, different images and motion graphics appear on the screen. Originally, the program was written in openFrameWorks. However, the tabletop version had some edits and changes, taking away more complex features and making it more game-like with arcade features such as a timer to clear the screen.

http://www.creativeapplications.net/sound/bleep-space-ios-sequencer-toy-and-tabletop-arcade/

Jdbrown – Looking Outwards 11 – Sound Art

I’m a really big fan of sound art that deals with wearable technology and the body’s role in music-making. Using surface transducers (little machines which vibrate at a frequency depending on their input, i.e. little vibrating circles that turn surfaces into speakers), the Human Harp is an example of an instrument that incorporates gesture as well as space into its performative practice.

It’s a fairly simple set-up, but it’s one of the projects that has most inspired me.

Josh

 

selinal-Looking-Outwards-11

Surround Sounds by Christian Marclay

Surround Sounds is an art installation by Christian Marclay which entraps viewers in a room with projections that play “music” through their animation of visual onomatopoeias. The room is actually silent, but by using the animations to take over the space, sound can be envisioned, or felt in an alternative way. This piece shows an interesting connection between sound art and music, because the art under the category of sound art simply does not make sound. I admire this aspect of the piece. I am not sure if many algorithms were used directly in this piece, but Marclay worked with a team of animators in Adobe After Effects.

svitoora – Looking Outward 11

Break Free by Taryn Southern is a music track and video art that is created using artificial intelligence. I admire the merging of human and machine in this project. Although the music track is generated via artificial intelligence the vocal lyrics are still handwritten by the artist herself.  The raw footage is most likely shot by hand, by edited via artificial intelligence. The video art is also further manipulated in an algorithm similar to Google’s deep mind. Since the song is set to pop music tune, it begs the question of what does making music mean in an age of automation. Can one automate the art of music making? If so, what does it mean to humans who once thought that art and music were the highest echelons of human creativity? Could good music and art be deconstructed simply into just a pattern of stimuli that simply engages the human mind at an aesthetic level? These are the question of the age of automation.

hannahk2-LookingOutwards-11

For this week’s Looking Outwards, I chose Maxime Causeret’s computer generated video and music work, “Order from Chaos”. The work is a blend of computationally generated biological forms and simulations, and computer generated music. Causeret aimed to represent the idea of emergence in the audio and visuals in his video, and even generated the rhythm of the track in an emergent manner. He took audio samples of rain falling, and mapped the transients for the drips, and forced the mapped points towards the nearest drumming grid positions using the computer. The result was that a steady rhythm emerged, and over this audio he added layers and layers of different instruments. He used computer animation to simulate complex biological processes such as endosymbiosis, flocking behavior, etc. The creators artistic sensibilities are clearly manifested in the final work, seeing his other works and his fascination with biological processes on the microscopic scale, and his use of bright, thin lines. I admire this project because of its consistency in the theme of emergence, in all aspects of the video including visuals, sound, etc. The forms displayed are simply mesmerizing and morph into each other beautifully, and the music is enchanting. The whole video seems otherworldly to me, and I really admire that.

 

Max Cooper – Order from Chaos – Official Video by Maxime Causeret from Max Cooper on Vimeo.

Looking Outwards 11 – Yugyeong Lee

SYN-Phon is a sound performance based on graphical notation by Candaş Şişman that reflects role of art in communication and comprehension. The performance is based on intimate collective findings in Budapest by Candas himself. In communicating music through visual graphics, the project hopes to reach out for audience through sensual expressive language. The project is inspiring in that it reflects the creator’s artistic sensibilities through his representation of certain type of music that allows the audience to explore and question how the graphical representation ties in with the sound. While watching the video, the viewer can also think through how he or she will represent the atmosphere of the sound and understand the vibe of the music through visual representation through algorithm.

http://www.csismn.com/SYN-Phon

LookingOutwards11-jooheek

The Classyfier – AI detects situation and appropriates music

Website: http://www.creativeapplications.net/processing/the-classyfier-ai-detects-situation-and-appropriates-music/

This project is called “The Classyfier”, a table that detects what beverages people are drinking around them, and chooses music that fits with that certain beverage. There is a microphone on the table that catches the characteristic sounds of the beverage, and goes through a pre-inputed list of songs that are categorized as hot beverages, wine, and beer. It goes through and chooses a song that fits your beverage, and you can choose the song in the certain category by knocking on the table. The objective of this project is to create a smart object that combines machine learning and natural sounds to create an ambience of different situations.

I thought this project was interesting because it uses computation to create a musical environment that is dependent on an object, in this case a beverage. The thought and idea of a computer analyzing what you are drinking and creating music that fits that drink is very innovative and clever. Now, you won’t have to go through all of your music to find the right music for your mood; you can just use “The Classyfier”.

NatalieKS-LookingOutwards-11

This is a video of a performance using The Reactable in Turin, Italy, 2009.

The Reactable is an electronic instrument created in 2003 by a research team in Pompeu Fabra University in Barcelona, Spain. The system has an interactive interface, where players can place and move objects. These objects, when placed on the interface and connected to other objects, play different sounds. The instrument utilizes the various sound waves and synthesizers connected to the objects to display the sounds on the interface. By moving and reconnecting different objects together, players can create unique compositions of various sounds. While I’m not sure specifically what algorithms the researchers used, I understand they utilized samples and soundbites and programmed physical objects to be able to interact with and produce those sounds.

This product is especially cool because it is a new instrument. You can use it to create songs or sound effects, and the multi-purpose aspects of it make it really accessible and fun. I love how it is first-and-foremost designed to entertain, and how user-focused it is. Not only does it feature an interesting set of sounds and controls, it utilizes those sounds to create a visual composition upon the interface.

Fun Fact: Icelandic musician Björk incorporated this instrument into her world tour performances in 2008-2009.