Bettina-Project11-SectionC

sketch

// Bettina Chou
// yuchienc@andrew.cmu.edu
// section c
// project 11 -- freestyle turtles

///////////////TURTLE API///////////////////////////////////////////////////////////////

function turtleLeft(d) {
    this.angle -= d;
}
 
 
function turtleRight(d) {
    this.angle += d;
}
 
 
function turtleForward(p) {
    var rad = radians(this.angle);
    var newx = this.x + cos(rad) * p;
    var newy = this.y + sin(rad) * p;
    this.goto(newx, newy);
}
 
 
function turtleBack(p) {
    this.forward(-p);
}
 
 
function turtlePenDown() {
    this.penIsDown = true;
}
 
 
function turtlePenUp() {
    this.penIsDown = false;
}
 
 
function turtleGoTo(x, y) {
    if (this.penIsDown) {
      stroke(this.color);
      strokeWeight(this.weight);
      line(this.x, this.y, x, y);
    }
    this.x = x;
    this.y = y;
}
 
 
function turtleDistTo(x, y) {
    return sqrt(sq(this.x - x) + sq(this.y - y));
}
 
 
function turtleAngleTo(x, y) {
    var absAngle = degrees(atan2(y - this.y, x - this.x));
    var angle = ((absAngle - this.angle) + 360) % 360.0;
    return angle;
}
 
 
function turtleTurnToward(x, y, d) {
    var angle = this.angleTo(x, y);
    if (angle < 180) {
        this.angle += d;
    } else {
        this.angle -= d;
    }
}
 
 
function turtleSetColor(c) {
    this.color = c;
}
 
 
function turtleSetWeight(w) {
    this.weight = w;
}
 
 
function turtleFace(angle) {
    this.angle = angle;
}
 
 
function makeTurtle(tx, ty) {
    var turtle = {x: tx, y: ty,
                  angle: 0.0, 
                  penIsDown: true,
                  color: color(128),
                  weight: 1,
                  left: turtleLeft, right: turtleRight,
                  forward: turtleForward, back: turtleBack,
                  penDown: turtlePenDown, penUp: turtlePenUp,
                  goto: turtleGoTo, angleto: turtleAngleTo,
                  turnToward: turtleTurnToward,
                  distanceTo: turtleDistTo, angleTo: turtleAngleTo,
                  setColor: turtleSetColor, setWeight: turtleSetWeight,
                  face: turtleFace};
    return turtle;
}

///////////////BEGINNING OF CODE///////////////////////////////////////////////////////////////

function preload() {
    img = loadImage("https://i.imgur.com/UB3R6VS.png")
}

function setup() {
  createCanvas(300,480);
  img.loadPixels(); //load pixels of image but don't display image
  background("#ffccff");
}

var px = 0; //x coordinate we're scanning through image
var py = 0; //y coordinate we're scanning through image
var threshold = 90; //image is bw so threshold is set to high brightness

function draw() {
  var col = img.get(px, py); //retrives RGBA value from x,y coordinate in image
  var currentPixelBrightness = brightness(col);
  var t1 = makeTurtle(px,py);
  var t2 = makeTurtle(px + 20, py); //offset by 10 pixels to the right
  strokeCap(PROJECT);
  t1.setWeight(5);
  t1.setColor("#ccffff");
  t2.setWeight(1);
  t2.setColor("#33cc33");
  if (currentPixelBrightness > threshold) { //does not draw lines in negative space
      t1.penUp();
      t2.penUp();
  }
  else if (currentPixelBrightness <= threshold) { //only draws lines to fill in positive space
      t1.penDown();
      t2.penDown();
  }
  t1.forward(1);
  t2.forward(1);
  px += 1;
  if (px >= width) { //brings px to 0 again
    px = 0;
    py += 10; //starts a new row
  }
}

I was inspired by the following piece of work and considered how computation could create such image treatments as opposed to manually setting the lines and offsets.

I would have had no idea where to start if not for the deliverables prompt suggesting that we could have turtles draw things in relation to an image. I decided to build upon the pixel brightness techniques we learned in previous weeks to make the turtle penDown() when the image is black and penUp() when the image is white. Thus, in a black and white image it is easy to recreate it using the lines method.

Above is the quick image I put together in illustrator to have my turtle trace

Above is a screenshot of the finished image from this particular code. Line weight, colors, amounts, and offsets could easily be manipulated to create a variety of imaging. There could even be multiple images referenced to create a more complex drawing.

afukuda-Project11-Composition

sketch

/* 
 * Name | Ai Fukuda 
 * Course Section | C 
 * Email | afukuda@andrew.cmu.edu
 * Project | 11
 */ 

var myTurtle; 

function setup() {
  createCanvas(400, 400);
  background(193, 228, 221);

  myTurtle = makeTurtle(width/2, height/2);  // set turtle at center of canvas 
  myTurtle.setColor(color(140, 164, 212));   // set stroke color 
  myTurtle.setWeight(1); 
  myTurtle.penDown();                        // initialize turtle 

  frameRate(2);
}


function draw() {
  var sideLength = 20;             // set initial side length of rectangle 

  for (i=0; i<50; i++) {
    myTurtle.forward(sideLength);
    myTurtle.right(90);
    myTurtle.forward(sideLength);
    myTurtle.right(90);
    myTurtle.forward(sideLength);
    myTurtle.right(90);
    myTurtle.forward(sideLength);
    myTurtle.right(90);

    myTurtle.penUp();              // put pen up while rotation of rectangles are occuring 
    myTurtle.right(15);            // rotate rectangle by 30 degrees 
    myTurtle.penDown();            // put pen down 

    sideLength *= 1.05;            // increase side length by 1.05  
  }
}


// Turtle graphics implementation for p5.js:
function turtleLeft(d) {
    this.angle -= d;
}

function turtleRight(d) {
    this.angle += d;
}

function turtleForward(p) {
    var rad = radians(this.angle);
    var newx = this.x + cos(rad) * p;
    var newy = this.y + sin(rad) * p;
    this.goto(newx, newy);
}

function turtleBack(p) {
    this.forward(-p);
}

function turtlePenDown() {
    this.penIsDown = true;
}

function turtlePenUp() {
    this.penIsDown = false;
}

function turtleGoTo(x, y) {
    if (this.penIsDown) {
      stroke(this.color);
      strokeWeight(this.weight);
      line(this.x, this.y, x, y);
    }
    this.x = x;
    this.y = y;
}

function turtleDistTo(x, y) {
    return sqrt(sq(this.x - x) + sq(this.y - y));
}

function turtleAngleTo(x, y) {
    var absAngle = degrees(atan2(y - this.y, x - this.x));
    var angle = ((absAngle - this.angle) + 360) % 360.0;
    return angle;
}

function turtleTurnToward(x, y, d) {
    var angle = this.angleTo(x, y);
    if (angle < 180) {
        this.angle += d;
    } else {
        this.angle -= d;
    }
}

function turtleSetColor(c) {
    this.color = c;
}

function turtleSetWeight(w) {
    this.weight = w;
}


function turtleFace(angle) {
    this.angle = angle;
}

function makeTurtle(tx, ty) {
  var turtle = {x: tx, y: ty,
  angle: 0.0, 
  penIsDown: true,
  color: color(128),
  weight: 1,
  left: turtleLeft, right: turtleRight,
  forward: turtleForward, back: turtleBack,
  penDown: turtlePenDown, penUp: turtlePenUp,
  goto: turtleGoTo, angleto: turtleAngleTo,
  turnToward: turtleTurnToward,
  distanceTo: turtleDistTo, angleTo: turtleAngleTo,
  setColor: turtleSetColor, setWeight: turtleSetWeight,
  face: turtleFace};
  return turtle;
}

For this project I used this week’s lab as an underlying base, as I wanted to develop it further and make it dynamic and more intricate, since I saw a potential for it to become a compelling piece of work. Using ‘Turtle Example 2’ as a guide, each loop creates an array of rotated squares, which is overall rotated to gradually fill the canvas. While working on this project, I was playing around with the value of angle of rotation, and I was intrigued with how a slight change in angle of rotation causes a significant change in the overall affect the aggregate conveys. In the current configuration the angle of rotation is set to 15, which conveys a spiraling, sea-shell like geometry. While an angle of rotation of 30 conveys a more radial aggregation (see below for visuals).

 

 

 

 

[screenshot of final project]

 

 

 

 

[screenshot of project with angle of rotation of 30]

 

 

 

 

[screenshot of lab assignment + initial sketch of project]

 

Project 11- Turtles!

Customwall

var myTurtle; //my turtyle variable
var distToM; //variable for distance between turtle and mouse 
var MyTurtles = []; //turtle array 

function setup() {
    createCanvas(480, 480); //canvas size   
    background(0); //background to black
    frameRate(10); //frame rate to 10

    for (var v =0; v < 6; v ++){
    for (var i = 0; i < 12; i ++){
        var turs = makeTurtle(20+i*(width/12),40+v*(height/6)); //create turtles in array 
        MyTurtles.push(turs);
    }
}
}

function draw() {
    for (var t = 0; t < MyTurtles.length;t++){
        distToM =MyTurtles[t].distanceTo(mouseX,mouseY); // get distance of mouse to each turtle
        var redist = map(distToM,0,600,0,360); //remap the distance 
        MyTurtles[t].penDown(); //pendown to true
        MyTurtles[t].setWeight(2); //set weight to 2
        if (redist < 180){ // if remappe is less than 180 
            MyTurtles[t].right(redist); // turn right with remapped value
        }
        else 
       { MyTurtles[t].left(redist); //if not turn left with remapped value
       }

        MyTurtles[t].forward(5); //constantly moving 5 steps
       
    }
}
function mousePressed(){
    var ran = int(random(0,72)) //get random turtle in array
    MyTurtles[ran].setColor(color(random(0,255),random(0,255),random(0,255))); //random color when mouse pressed 
}


function turtleLeft(d){this.angle-=d;}
function turtleRight(d){this.angle+=d;}
function turtleForward(p){var rad=radians(this.angle);var newx=this.x+cos(rad)*p;
var newy=this.y+sin(rad)*p;this.goto(newx,newy);}
function turtleBack(p){
this.forward(-p);}
function turtlePenDown(){this.penIsDown=true;}
function turtlePenUp(){this.penIsDown = false;}
function turtleGoTo(x,y){
if(this.penIsDown){stroke(this.color);strokeWeight(this.weight);
line(this.x,this.y,x,y);}this.x = x;this.y = y;}
function turtleDistTo(x,y){
return sqrt(sq(this.x-x)+sq(this.y-y));}
function turtleAngleTo(x,y){
var absAngle=degrees(atan2(y-this.y,x-this.x));
var angle=((absAngle-this.angle)+360)%360.0;return angle;}
function turtleTurnToward(x,y,d){var angle = this.angleTo(x,y);if(angle< 180){
this.angle+=d;}else{this.angle-=d;}}
function turtleSetColor(c){this.color=c;}
function turtleSetWeight(w){this.weight=w;}
function turtleFace(angle){
this.angle = angle;}
function makeTurtle(tx,ty){var turtle={x:tx,y:ty,
angle:0.0,penIsDown:true,color:color(128),weight:1,left:turtleLeft,
right:turtleRight,forward:turtleForward, back:turtleBack,penDown:turtlePenDown,
penUp:turtlePenUp,goto:turtleGoTo, angleto:turtleAngleTo,
turnToward:turtleTurnToward,distanceTo:turtleDistTo, angleTo:turtleAngleTo,
setColor:turtleSetColor, setWeight:turtleSetWeight,face:turtleFace};
return turtle;}

Iterations: 

For this project, I have thought of the wall paper project that we have done in past. I wanted to see if I could come up with a randomly generating turtles spaced apart that will create certain pattern to create custom wall paper  based on movement of the turtle.

I made the turtles’ color to change randomly whenever user clicks on.

For the movement, I wanted them to keep the constant speed but different direction based on the mouse position. I have calculated the distance from each turtle to the mouse location and let the turtles to turn based on the distance and its decision factor on to turn right or left.

I have came up with two different iteration playing with the code.

Enjoy.

jiaxinw-Looking Outwards 11- Computer Music

A.I. Duet by Yotam Mann

Someone is trying AI Duet with the keyboard

Yotam Mann created this experiment for letting people play a duet with a computer. When the user presses some keys on the keyboard, the computer will respond to your melody. I like how this experiment showed a potential of letting human beings interact with computers to create artistic works. One thing surprised Yotam Mann a lot was that some people didn’t wait for the response but tried to play music at the same time with the computer, which was really like a real-time duet with another person.

In this project, Yotam Mann used machine learning to let the computer “learn” how to compose. He used neural networks and gave the computer tons of examples of melody. The computer analyzed the notes and timings and gradually built a map for the relationships between them.  So that when the melody was given to the computer, it can give a response to people based on the map.

Here is the video of A.I. Duet

If you want to know more, please go to : https://experiments.withgoogle.com/ai/ai-duet

Looking outwards 11

I have choose the Google’ Magenta project as computer Music. I am very much interested in machine’s capability on learning realm that is considered to be human’s. The Magenta research project is aimed to push limits of what AI can do in arts. In order to achieve such an intelligence for the AI, they have tried many different machine- learning techniques, such as recurrent neural networks, convolutional neural networks, variational methods, adversarial training methods, and reinforcement learning.

They used NSynth algorithm, which uses neural networks to synthesize new sounds, on notes generated by different instruments.

This is such an inspiring and provoking project that allows more thought to be brought up on limits of AI and possibilities.

kyungak-lookingoutwards-11

(Simon de Diesbach, Jonas Lacôte, Laura Perrenoud,  “The Computer Orchestra”, 2013)

The creators of “The Computer Orchestra” initially met  at University of Art and Design of Lausanne in Switzerland during their Bachelor’s degree in media and interaction design. This project started as a small project, but later grew into an amazing piece that was exhibited in various parts of the world.

“The Computer Orchestra” is an interactive installation that allows viewers to conduct their own orchestra.  Users can upload or download music samples and integrate them to their own taste using the simple interface within in the computer. The sounds also change according to space. Kinect connected to the center of the computer detects motion of the user’s hands and position and interacts accordingly.

I personally admire the interactivity of this artwork. Being able to produce an orchestra that usually needs a handful of people with computers sounds amazing. The original thought and the combination of interactiveness made the artwork accessible and fun. I feel like this artwork was also educational, which makes it extra worthy to exhibit.

karinac-LookingOutwards-11

‘Mothership’ by Mason Bates

 

This is absolutely one of my favorite pieces of computer music.  Mason Bates, the composer of ‘Mothership’, is a famous artist who integrates technology with music.  This particular work uses synthesized sounds of multiple aircraft to create an alien sound of a mothership.

Mason Bates has to record and transfer those recordings into sound files that could be embedded into his laptop. Though the mechanical sounds were not computer-generated, he still had to edit a lot of the sounds to make it the way he wanted it to sound.

I was inspired by this piece and Mason Bates because it perfectly balances out the computer-generated sounds and the melodies and harmonies of the orchestra. Many times, computer generated music would be too heavy on either the tech side or the orchestral side, often clashing with one another. I think this piece is a great use of both.

katieche-looking outwards 11

Carsten Nicolai

Berlin based German artist and musician, Carsten Nicolai, works in a transitional field between music, art, and science. He seeks to override the individualized sensory perceptions by creating works that appeal to multiple senses at a time (i.e. using sound and light frequencies to appeal to ears and eyes simultaneously). In terms of computation, he often uses mathematical patterns like random, grids, error, consistency, etc.

In his 2016 installation, Reflektor Distortion, viewers can both hear the sounds being played, and see the light bars seemingly move in the water bowl. It works by creating sound waves through a speaker that shifts the water in the bowl accordingly, therefore creating the illusion that the series of light bars being reflected in the water are also moving. The result is a mesmerizing movement of lights that coincide with whatever sound is being displayed. I think that the light bas are a very effective and beautiful way to display sound waves, contrary to the stereotypical up-down cosine/sine looking sound waves that we’re all used to seeing.

creyes1-LookingOutwards-11


Brian Foo’s “Two Trains,” where the song varies depending on median household income for each neighborhood the 2 Train stops in

Created in 2016, Data-Driven DJ is a project by Brian Foo to create musical works through computation and interpretation of real-world data in new and experimental ways. The project is a collection of ten tracks, each sampling data from a broad swath of topics from social to cultural to environmental. Using a blend of programs such as ChucK (a programming language for real-time sound synthesis and music creation), Audacity, Python, Hydrogen (drum machine application), and Processing, each track that Foo creates is compelling and unique, and makes powerful statements when data is not only visualized in his videos, but synthesized into song. What I really like about Foo’s work is that while they may not be catchy, fleshed out songs, each track is fascinating and sends a powerful communication in regards to its subject. Even though the songs are essentially controlled by algorithm, Foo’s artistic touch is still incredibly evident, and the entire body of work shines for that reason.


Brian Foo’s “Rhapsody in Grey,” which uses brain wave data during a seizure to inform the song’s composition

Foo made his process for creating his tracks available as open-source on Github, and you can find more of him on Twitter, Facebook, Soundcloud, and Vimeo.

jwchou-LookingOutwards-10

EarSketch

A promotional image about EarSketch.

EarSketch is an online web application that teaches students how to code by making music. Users can code in either Python or Javascript. It was created because people thought in order to encourage people to learn how to code and to increase its appeal, they should use a universally understood and loved language: music. I really admire the interface, because it’s modern and fairly simple. If it looked unfinished or overly technical, I imagine that would turn people off from using the app. The app also features a HUGE range of samples, containing everything from dubstep to eight-bit to west coast hip hop.

EarSketch is designed/created by a team that is primarily based out of Georgia Tech.

While the sample themselves might not be computational, users can change the music by setting their own tempo in the code, which is computational. There are also a lot of functions that users can use to analyze tracks for their different properties.

Because the software is used to create music, the creators themselves did not insert any of their own personal artistic sensibilities into the project. Rather, they designed the software in order to allow others to insert their creative sensibilities.

A video that introduces EarSketch: