hycai@andrew.cmu.edu – [OLD FALL 2018] 15-104 • Introduction to Computing for Creative Practice https://courses.ideate.cmu.edu/15-104/f2018 Professor Roger B. Dannenberg • Fall 2018 • Introduction to Computing for Creative Practice Sat, 12 Sep 2020 00:17:52 +0000 en-US hourly 1 https://wordpress.org/?v=4.9.25 Hannah Cai—Final Project https://courses.ideate.cmu.edu/15-104/f2018/2018/12/08/hannah-cai-final-project/ https://courses.ideate.cmu.edu/15-104/f2018/2018/12/08/hannah-cai-final-project/#respond Sat, 08 Dec 2018 05:01:11 +0000 https://courses.ideate.cmu.edu/15-104/f2018/?p=39182 Continue reading "Hannah Cai—Final Project"]]>

(The audio seems to only work in wordpress on Chrome).

instructions:
hover over points to hear their pitches.
click points to activate them.
click points again to deactivate them.
clicked points that are close enough to each other will link to each other. (try not to link too many or the program will lag lol)
explore and enjoy the soundscape!

/* Hannah Cai
Section C
hycai@andrew.cmu.edu
Final Project
*/

//particle position arrays
var particleNumber = 200; //number of particles
var psize = 1.5; //particle size
var px = []; //particle x position
var py = []; //particle y position
var pz = []; //particle z position
var distanceToPoint; //dist from (mouseX, mousY) to (px, py)
var amplitude = 3.14 * 3; //amplitude of bobbing animation
var waveSpeed; //speed of bobbing animation
var theta = 0; //plugin for sin()


//particle sound arrays
var threshold = 100; //minimum distance between mouse and particle to trigger glow/sound
var notes = [130.81, 146.83, 164,81, 174.61, 196, 220, 246.94, //pitches of whole notes from C3
            261.63, 293.66, 329.63, 349.23, 392.00, 440, 493.88, 
            523.25, 587.33, 659.25, 698.46, 783.99, 880, 987.77, 
            1046.5, 1174.66, 1318.51, 1396.91, 1567.98, 1760, 2093]; //to C7
var ppitch = []; //pitch values for each particle
var pOsc = []; //oscillator for each particle
var pvolume = 0; //volume of each particle
var pOscOn = []; //array of booleans for if the oscillators are on

//misc other particle arrays
var pClicked = []; //array of booleans for if the particle was clicked
var glowSize; //size of particle glow

//arrays for cursor
var xarray = [0, 10, 20, 30, 40, 50];
var yarray = [0, 10, 20, 30, 40, 50];
var s;

//arrays for camera and perspective
var camX;
var camY;
var camZ;
var rotateX;
var rotateY;

//arrays for lines between particles
var connect = [];
var connectionThreshold = 500;

function setup() {
    createCanvas(windowWidth, windowHeight, WEBGL); //fit canvas to window size

    //set up variables; store in arrays
    for (i = 0; i < particleNumber; i++) {
        px.push(random(-width * 0.8, width * 0.8));
        py.push(random(-height, height));
        pz.push(random(-width, height / 2));
        ppitch.push(notes[floor(random(0, notes.length))]);
        pOscOn.push(false);
        pClicked.push(false);
        makeOsc(i);
    }
}

function makeOsc(index) {
    myOsc = new p5.SinOsc();
    myOsc.freq(ppitch[index]);
    pOsc.push(myOsc); //store oscillators in pOsc array
}

function playOsc(index) {
    var maxVolume = 0.01;
    pvolume = constrain(pvolume, 0, maxVolume);
    //turn clicked particles permanently on
    if (pClicked[index] === true) { 
        pvolume = maxVolume;
    } else { 
    //unclicked particles get louder as the mouse gets closer
        pvolume = map(distanceToPoint, threshold, 0, 0, maxVolume);
    }
    //make particles with lower pitches louder, so all ranges are heard clearly
    var factor =  map(ppitch[index], ppitch[0], ppitch[ppitch.length - 1], 5, 1);
    pvolume *= factor;
    pOsc[index].amp(pvolume);
}

function stopOsc(index) {
    pOsc[index].stop();
}

function draw() {
    background(0);
    noStroke(); //get rid of default black stroke

    //map camera position to mouse position to simulate orbit control
    camX = map(mouseX, 0, width, -width / 2, width / 2);
    camY = map(mouseY, 0, height, -height / 2, height / 2);
    camZ = (height/2.0) / tan(PI*30.0 / 180.0);
    camera(camX, camY, camZ, 0, 0, 0, 0, 1, 0);

    //set up particles
    for (i = 0; i < particleNumber; i++) {
        drawLines(i); //draw lines between clicked particles
        //create bobbing movement

        waveSpeed = map(pz[i], -width, height, 20000, 70000); //create parallax effect
        theta += (TWO_PI / waveSpeed);
        if (theta > amplitude) {
            theta = -theta;
        }
        py[i] += sin(theta);

        push();
        translate(px[i], py[i], pz[i]);
        drawGlow(i); //draw glow of each particle
        //draw each particle
        fill(255); 
        smooth();
        sphere(psize);
        pop();

        //play a particle's oscillator if the mouse's 
        //distance is less than the threshold
        if (distanceToPoint <= threshold) {
            if (pOscOn[i] == false) {
                pOsc[i].start();
                pOscOn[i] = true;
            }
            playOsc(i);
        }

        //stop a particle's oscillator if the mouse's 
        //distance is greater than the threshold
        if (distanceToPoint > threshold & pClicked[i] == false) {
            stopOsc(i);
            pOscOn[i] = false;
        }
    }

    //cursor
    noCursor(); //turn off the cursor icon, display below instead
    //this is basically the code from the snake lab we did
    for (var i = 0; i < xarray.length; i++) {
        fill(255, 255, 200);
        s = 8 - (xarray.length - i);
        ellipse(xarray[i], yarray[i], s, s);
    }
    xarray.push(mouseX - width / 2);
    yarray.push(mouseY - height / 2);
    if (xarray.length > 8) {
        xarray.shift();
        yarray.shift();
    }
}

function drawGlow(index) {
    push();
    noStroke();
    //rotate the (flat) ellipses to face the cameras to simulate 3d glow
    rotateX(radians(map(camY, -height / 2, height / 2, 40, -40)));
    rotateY(radians(map(camX, -width / 2, width / 2, -45, 45)));

    //calculate distance from mouse to each point
    distanceToPoint = dist(mouseX - width / 2, mouseY - height / 2, px[index], py[index]);
    
    //clicked particles have a pulsing glow;
    //unclicked particles glow when the mouse hovers close to them
    if (pClicked[index] === true) {
        glowSize = map(sin(theta), TWO_PI, 0, psize, 100);
    } else {
        glowSize = map(distanceToPoint, 100, psize, psize, 100);
    }
    //draw the actual glow (a radial alpha gradient)
    for (r = psize; r < glowSize; r += 1.5) {
        fill(255, 255, 200, map(r, psize, glowSize, 2, 0));
        ellipse(0, 0, r);
    }
    pop();
}

function drawLines(index) {
    push();
    //push the indices of clicked particles in the "connect" array;
    //turn off/remove particles from the array if clicked again
    if (pClicked[index] == true & ! connect.includes(index)) {
        connect.push(index);
    } else if (pClicked[index] == false) {
        connect.splice(index, 1);
    }

    //connect groups of particles that are clicked if the distance between is less than the threshold
    stroke(255);
    strokeWeight(1);
    noFill();
    for (i = 0; i < connect.length; i++) {
        for (j = i + 1; j < connect.length; j++) {
            if (dist(px[connect[i]], py[connect[i]], pz[connect[i]], 
                px[connect[j]], py[connect[j]], pz[connect[j]]) < connectionThreshold) {
                beginShape(LINES);
                vertex(px[connect[i]], py[connect[i]], pz[connect[i]]);
                vertex(px[connect[j]], py[connect[j]], pz[connect[j]]);
                endShape();
            }
        }
    }
    noStroke();
    pop();
}

//if window is resized, refit the canvas to the window
function windowResized() {
  resizeCanvas(windowWidth, windowHeight);
}

function mouseClicked() {
    for (i = 0; i < particleNumber; i++) {
        distanceToPoint = dist(mouseX - width / 2, mouseY - height / 2, px[i], py[i]);
        //toggle pClicked on and off if mouse clicks within 10 pixels of a particle
        if (distanceToPoint < 10 & pClicked[i] == false) {
            pClicked[i] = true;
        } else if (distanceToPoint < 10 & pClicked[i] == true) {
            pClicked[i] = false;
        }
    }
}

Here’s a zip file for the fullscreen version.
final project fin

There are still a few bugs I’m aware of that I don’t know how to fix:
1. sometimes the links will flicker, adding another grouped point sometimes fixes it
2. sometimes the volume is louder than it should be upon refreshing/starting the program. I constrained the volume to try and avoid this but it didn’t seem to help
3. sometimes all the oscillators start off turned on upon refreshing/starting the program (if you move your mouse close to a point, the sine wave will start and stop, instead of fading in and out).

Generally, refreshing the page fixes all of these bugs, so please refresh the page if you notice any of the above!

I enjoyed this project a lot. Even though I spent a lot of time struggling and debugging, I feel like I learned a lot about both WEBGL and using sound/oscillators. I’m pretty satisfied in the final visual effects as well, although unfortunately, the program will start to lag if too many linked points are formed. Also unfortunately, my aim with this project was to familiarize myself more with objects, but I was completely stuck trying to format things in objects so I made everything with a ton of arrays instead. I definitely want to revisit this project in the future and format it properly with objects. In general, I definitely want to keep adding to this project because it’s still pretty clunky and buggy right now. I was planning to add a start screen, instructions, the ability to record audio, and different modes (eg a “wander” mode where the cursor moves around on its own), but I didn’t have time to even try implementing most of those before the deadline. In the future, though, I definitely want to try and make this something that could be a standalone interactive website (and add it to my portfolio, lol).

In general, I loved this class and I learned a lot! Thank you to Dannenberg and all the TAs!

]]>
https://courses.ideate.cmu.edu/15-104/f2018/2018/12/08/hannah-cai-final-project/feed/ 0
Hannah Cai—Project 12—Proposal https://courses.ideate.cmu.edu/15-104/f2018/2018/11/17/hannah-cai-project-12-proposal/ https://courses.ideate.cmu.edu/15-104/f2018/2018/11/17/hannah-cai-project-12-proposal/#respond Sat, 17 Nov 2018 23:06:12 +0000 https://courses.ideate.cmu.edu/15-104/f2018/?p=37645 Continue reading "Hannah Cai—Project 12—Proposal"]]>

For my project, I’m planning to do some kind of interactive, generative audio visualization. My most concrete idea as of now is to create a field of particles in WEBGL, with each particle having a sine wave of a random frequency embedded in it. Users would be able to “look around” the viewing field with orbit control, hover over a particle to hear its pitch, and click and drag between particles to form “links.” Linked particles would emit their pitches, allowing the user to build “constellations” of harmonies. I want the particles to feel dynamic and alive, so I’ll probably implement some sort of noise into their movement, as well as create reactive interactions, such as glowing when a mouse is over a particle, or when a particle is linked. This idea will probably require the use of objects, which are one of my weak points. Hopefully, completing this project will strengthen my familiarity with objects. I’m also almost completely unfamiliar with WEBGL, but I’ve been interested in it for a while, so this will be a good opportunity to explore it.

If I have time, I’d want to take this idea further and try to gamify it in some way, as well as add more recognizable visuals into it (for example, have the particles be a night sky above a hill).

]]>
https://courses.ideate.cmu.edu/15-104/f2018/2018/11/17/hannah-cai-project-12-proposal/feed/ 0
Hannah Cai—Looking Outwards—12 https://courses.ideate.cmu.edu/15-104/f2018/2018/11/17/hannah-cai-looking-outwards-12/ https://courses.ideate.cmu.edu/15-104/f2018/2018/11/17/hannah-cai-looking-outwards-12/#respond Sat, 17 Nov 2018 22:40:11 +0000 https://courses.ideate.cmu.edu/15-104/f2018/?p=37630 Continue reading "Hannah Cai—Looking Outwards—12"]]>

I was inspired by these projects:


For my project, I was originally thinking of just doing something like the first video, which would basically just be integrating sound into my generative landscape. However, I wanted to something with generative audio, and not just a simple audiovisual, like the glowing dot, which I personally don’t think is very exciting. I then thought of doing something with particles. Ideally, my end product’s visuals would be something like the second video, with particles that feel dynamic and “alive”. However, instead of just being static recorded segments, I want my project to be interactive, reacting to mouse position or something like that. I also want the user to be able to interact with the view/perspective, so I’m thinking about using WEBGL and orbit control.

]]>
https://courses.ideate.cmu.edu/15-104/f2018/2018/11/17/hannah-cai-looking-outwards-12/feed/ 0
Hannah Cai—Project 11—Composition https://courses.ideate.cmu.edu/15-104/f2018/2018/11/09/hannah-cai-project-11-composition/ https://courses.ideate.cmu.edu/15-104/f2018/2018/11/09/hannah-cai-project-11-composition/#respond Sat, 10 Nov 2018 04:57:20 +0000 https://courses.ideate.cmu.edu/15-104/f2018/?p=37170 Continue reading "Hannah Cai—Project 11—Composition"]]>

click to generate a new tree!

/* Hannah Cai
Section C
hycai@andrew.cmu.edu
Project-11-Composition
*/

//turtle code
function turtleLeft(d) {
    this.angle -= d;
}
function turtleRight(d) {
    this.angle += d;
}
function turtleForward(p) {
    var rad = radians(this.angle);
    var newx = this.x + cos(rad) * p;
    var newy = this.y + sin(rad) * p;
    this.goto(newx, newy);
}
function turtleBack(p) {
    this.forward(-p);
}
function turtlePenDown() {
    this.penIsDown = true;
}
function turtlePenUp() {
    this.penIsDown = false;
}
function turtleGoTo(x, y) {
    if (this.penIsDown) {
      stroke(this.color);
      strokeWeight(this.weight);
      line(this.x, this.y, x, y);
    }
    this.x = x;
    this.y = y;
}
function turtleDistTo(x, y) {
    return sqrt(sq(this.x - x) + sq(this.y - y));
}
function turtleAngleTo(x, y) {
    var absAngle = degrees(atan2(y - this.y, x - this.x));
    var angle = ((absAngle - this.angle) + 360) % 360.0;
    return angle;
}
function turtleTurnToward(x, y, d) {
    var angle = this.angleTo(x, y);
    if (angle < 180) {
        this.angle += d;
    } else {
        this.angle -= d;
    }
}
function turtleSetColor(c) {
    this.color = c;
}
function turtleSetWeight(w) {
    this.weight = w;
}
function turtleFace(angle) {
    this.angle = angle;
}
function makeTurtle(tx, ty) {
    var turtle = {x: tx, y: ty,
                  angle: 0.0, 
                  penIsDown: true,
                  color: color(0),
                  weight: strokeWeight(w),
                  left: turtleLeft, right: turtleRight,
                  forward: turtleForward, back: turtleBack,
                  penDown: turtlePenDown, penUp: turtlePenUp,
                  goto: turtleGoTo, angleto: turtleAngleTo,
                  turnToward: turtleTurnToward,
                  distanceTo: turtleDistTo, angleTo: turtleAngleTo,
                  setColor: turtleSetColor, setWeight: turtleSetWeight,
                  face: turtleFace};
    return turtle;
}

/////my code

var angle;
var x;
var y;
var w = 5;
var minAngle = 1;
var maxAngle = 30;
var minRatio = .6;
var maxRatio = .9;

function setup() {
    createCanvas(480, 480);
    background(250);
    strokeJoin(MITER);
    strokeCap(PROJECT);
    frameRate(1);
}

function tree(length, turtle, w, r, l) {
  if (length > 10) { //create recursive branches
    ratio = random(minRatio, maxRatio)
    turtle.forward(length);
    turtle.setWeight(w * ratio)
    turtle.right(r);
    tree(length * ratio, turtle, w*ratio, random(minAngle,maxAngle), random(minAngle, maxAngle));
    turtle.left(r+l);
    tree(length * ratio, turtle, w*ratio, random(minAngle, maxAngle), random(minAngle, maxAngle));
    turtle.right(l);
    turtle.setWeight(w / ratio)
    turtle.back(length);
  } else { //draw flowers!
    turtle.setColor("Pink");
    turtle.setWeight(10);
    turtle.forward(10);
    turtle.setColor(0);
    turtle.setWeight(w);
    turtle.back(10);
  }
  noLoop();
}

function draw() {
    var turtle = makeTurtle(width / 2, height);
    //trunk
    turtle.penDown();
    turtle.right(270);
    turtle.forward(length);
    //branches
    tree(100, turtle, w, random(minAngle,maxAngle), random(minAngle,maxAngle))
}

//refresh the canvas when mouse is clicked
function mouseClicked() {
  setup();
  draw();
}

For this project, I was inspired by recursive trees, and I wanted to see if I could make one with turtle. Although it took a long time to figure out, I’m really proud of the end result! In the future, I’d want to try and animate the tree or make it interactive, although it might turn out to be too computationally expensive. Overall, I feel like I learned a lot about turtles and recursive functions, and I had a lot of fun!

]]>
https://courses.ideate.cmu.edu/15-104/f2018/2018/11/09/hannah-cai-project-11-composition/feed/ 0
Hannah Cai—Looking Outwards—11 https://courses.ideate.cmu.edu/15-104/f2018/2018/11/09/hannah-cai-looking-outwards-11/ https://courses.ideate.cmu.edu/15-104/f2018/2018/11/09/hannah-cai-looking-outwards-11/#respond Fri, 09 Nov 2018 05:16:41 +0000 https://courses.ideate.cmu.edu/15-104/f2018/?p=36622 Continue reading "Hannah Cai—Looking Outwards—11"]]>

For this week’s Looking Outwards, I chose this sample of music, which was generated by David Cope’s “Experiments in Musical Intelligence” computer program. It’s termed as a “Bach-style chorale” in the title, which, based on the comments, confused and angered a lot of the audience, who said it sounded nothing like Bach. One commenter clarified this distinction: ” A “Bach Chorale” refers to a style, not a composer – pretty much any SATB vocal piece utilizing the rules of counterpoint. That’s why a computer can do it — once you lay in one of the voices, you really can fill in the rest “by the numbers”. But don’t confuse that with actually composing or creating. And no offense but any real lover of Bach can tell that this sounds nothing like him — it has none of his intrigue or quirkiness (some of which has to do with his deliberate breaking of said rules). It sounds incredibly bland and boring to me. Which is exactly what I’d expect from computer-generated music.” ”

I found the reactions to this experiment more interesting than the audio itself, which I enjoyed, but didn’t find really special. I guess that personally, I feel like music needs some kind of human creativity or spontaneity/randomness to truly be “art.” Because this piece only “fills in” harmonies based on preset rules, it is less interesting/valuable to me, and apparently to quite a few other people as well. I still find the experiment impressive though, and I’d love to experiment with some kind of generative music in the future, if I can manage to learn how to.

]]>
https://courses.ideate.cmu.edu/15-104/f2018/2018/11/09/hannah-cai-looking-outwards-11/feed/ 0
Hannah Cai—Project 10—Landscape https://courses.ideate.cmu.edu/15-104/f2018/2018/11/03/hannah-cai-project-10-landscape/ https://courses.ideate.cmu.edu/15-104/f2018/2018/11/03/hannah-cai-project-10-landscape/#respond Sun, 04 Nov 2018 01:29:38 +0000 https://courses.ideate.cmu.edu/15-104/f2018/?p=36393 Continue reading "Hannah Cai—Project 10—Landscape"]]>

/* Hannah Cai
Section C
hycai@andrew.cmu.edu
Project-10-Landscape
*/

var trees = []; //array for bigger trees (in the back)
var bushes = []; //array for smaller trees 
                 //I just called them bushes to make things easier
var speed = 0.00001; //leftward shift rate of mountains

function setup() {
  createCanvas(480, 480);

  // create initial trees; make 5 big and 5 small trees
  for (var t = 0; t < 5; t++){
    var tx = random(width);
    trees[t] = makeTree(tx);
    var bx = random(width);
    bushes[t] = makeBush(bx);
  }
  frameRate(10); //set tree and bush frameRate to 10
}

//make tree at x
function makeTree(tx) {
  var trees = {x: tx,
              draw: treeDraw}
  return trees;
}

//make bush at x
function makeBush(bx) {
  var bushes = {x: bx,
              draw: bushDraw}
  return bushes;
}

//draw tree
function treeDraw() {
  noStroke();
  //leaves
  fill(14, 90, 117);
  triangle(this.x - 20, 398, this.x + 20, 398, this.x, 320);
  //trunk
  stroke(13, 77, 94);
  line(this.x, 330, this.x, 400);
  //movement
  this.x -= 1;

  //make new trees
  var newTree = 0.0025; //probability for new tree "birth"
  if (random(0, 1) < newTree) {
      trees.push(makeTree(width + 20)); //push new tree into trees array
  }
}

//draw bush
function bushDraw() {
  noStroke();
  //leaves
  fill(28, 65, 72);
  triangle(this.x - 15, 403, this.x + 15, 403, this.x, 330);
  //trunk
  stroke(13, 77, 94);
  line(this.x, 340, this.x, 405);
  //movement
  this.x -= 1.2;

  //make new bushes
  var newBush = 0.0025; //probability for new bush "birth"
  if (random(0, 1) < newBush) {
      bushes.push(makeBush(width + 15)); //push new bush into bushes array
  }
}

function draw() {
  background(228, 239, 242);
  noStroke();

  //orange gradient layer
  for (var y = 100; y < 400; y++) { //for this specific y interval,
    var a = map(y, 100, 400, 0, 255); //map y interval to alpha
    stroke(240, 178, 158, a); 
    line(0, y, width, y); //draw lines with mapped alphas
  }

  //sun
  fill(240, 178, 158);
  ellipse(240, 200, 25);

  //mountain layer 1
  beginShape(); 
  stroke(149, 189, 207);
  var variance1 = 0.001;
  for (i = 0; i < width; i++) {
    var t = (i * variance1) + (millis() * speed);
    var y = map(noise(t), 0, 1, 100, height);
    line(i, y, i, height); 
    }
  endShape();

  //fog layer 1
  for (var y = 200; y < 400; y++) {
    var b = map(y, 200, 400, 0, 255);
    stroke(187, 208, 214, b);
    line(0, y, width, y);
  }

  //mountain layer 2
  beginShape(); 
  stroke(85, 170, 200);
  var variance2 = 0.0015;
  for (j = 0; j < width; j++) {
    var t = (j * variance2) + (millis() * speed);
    var y = map(noise(t), 0, 1, 150, height);
    line(j, y, j, height); 
    }
  endShape();

  //fog layer 2
  for (var y = 200; y < 480; y++) {
    var b = map(y, 200, 480, 0, 255);
    stroke(187, 208, 214, b);
    line(0, y, width, y);
  }

  //draw trees using the treeDraw function
  for (var u = 0; u < trees.length; u++) {
    trees[u].draw();
  }

  //fog layer 3
  for (var y = 350; y < 480; y++) {
    var b = map(y, 350, 480, 0, 255);
    stroke(187, 208, 214, b);
    line(0, y, width, y);
  }

  //ground layers
  noStroke();
  fill(117, 144, 139);
  rect(-1, 400, width + 1, 10);
  fill(63, 84, 77);
  rect(-1, 405, width + 1, 80);

  //draw bushes using the bushDraw function
  for (var v = 0; v < bushes.length; v++) {
    bushes[v].draw();
  }

  //removes trees when they go off the left edge of the screen;
  //stores the trees still on screen in a new array
  var treesToKeep = [];
  for (var i = 0; i < trees.length; i++){
    if (trees[i].x + 20 > 0) {
      treesToKeep.push(trees[i]);
    }
  }
  trees = treesToKeep;

  //removes bushes when they go off the left edge of the screen;
  //stores the bushes still on screen in a new array
  var bushesToKeep = [];
  for (var v = 0; v < bushes.length; v++){
    if (bushes[v].x + 20 > 0) {
      bushesToKeep.push(bushes[v]);
    }
  }
  bushes = bushesToKeep;

}

I enjoyed this project, even though I initially struggled a lot with what kind of landscape I wanted to do. I wanted to make a landscape that looked believable, and since we basically all only had one terrain generation template, it was hard for me to pick a landscape I felt like I could create and be satisfied with. I ended up looking through landscape photos and using one for inspiration and reference. It was really helpful to be able to pull colors from the photo, as well as realistic landscape qualities.

I’m pretty proud with how my landscape turned out, especially with the fog (which I learned how to code alpha gradients for). I really want to try using this kind of concept to generate backgrounds that could be used for games or for open-world environments. I might explore some of that in the future (maybe for my final project?).

]]>
https://courses.ideate.cmu.edu/15-104/f2018/2018/11/03/hannah-cai-project-10-landscape/feed/ 0
Hannah Cai—Looking Outwards—10 https://courses.ideate.cmu.edu/15-104/f2018/2018/11/02/hannah-cai-looking-outwards-10/ https://courses.ideate.cmu.edu/15-104/f2018/2018/11/02/hannah-cai-looking-outwards-10/#respond Fri, 02 Nov 2018 20:34:31 +0000 https://courses.ideate.cmu.edu/15-104/f2018/?p=35939 Continue reading "Hannah Cai—Looking Outwards—10"]]>


Stranger Visions

I chose the project “Stranger Visions” by Heather Dewey-Hagborg. I was drawn to, and impressed by, the fact that she created her own software to generate digital “portraits” of strangers based on their DNA (which she collected by picking up random pieces of gum, hair, etc from streets). She started this controversial project in 2012 as a means to call attention to “the developing technology of forensic DNA phenotyping, the potential for a culture of biological surveillance, and the impulse towards genetic determinism.” These predictions came true two years later, when police and crime investigators started analyzing DNA as a part of trying to determine the culprits of crimes.

Heather received a PhD in Electronic Arts from Rensselaer Polytechnic Institute, and in her bio states that she is “interested in art as research and critical practice,” which I find very interesting as a design student. Art is normally seen as very different from design; as purely aesthetic and usually meaningless (in terms of real-world application). Heather turns that notion on its head. I would call her a researcher and data visualizer, and not an artist; but I still find her practical approach to art interesting and admirable.

]]>
https://courses.ideate.cmu.edu/15-104/f2018/2018/11/02/hannah-cai-looking-outwards-10/feed/ 0
Hannah Cai—Looking Outwards—09 https://courses.ideate.cmu.edu/15-104/f2018/2018/10/26/hannah-cai-looking-outwards-09/ https://courses.ideate.cmu.edu/15-104/f2018/2018/10/26/hannah-cai-looking-outwards-09/#respond Fri, 26 Oct 2018 19:13:02 +0000 https://courses.ideate.cmu.edu/15-104/f2018/?p=35053 Continue reading "Hannah Cai—Looking Outwards—09"]]>


link to the article

This post is a response to rjpark’s Looking Outwards 04 on the Artist Andrius Šarapovas’ room-sized interactive musical sculpture. The installation uses an algorithm to take 4G data and convert it into sound, consisting of 16 notes: C, D, F and G, spread across four octaves.

rjpark commented mostly on the inherent simplicity yet external complexity of the project—how the 77 individual units combined and placed throughout the room created such a complex soundscape. While I agree on the impressiveness of the generated soundscape, I think the installation is actually inherently pretty complex, due to all the filters and “instruments” those 16 notes are being played from (a metal bar, sound activator, sound damper, resonator, and mechatronics). I’m actually really curious how the network data was converted into sounds, and what choices were made in the formulation of the algorithm. Do the four notes mean anything in correlation to the data, or were the just chosen to create an appealing chord? What do the different textures of sound stand for? Overall, I like the end product a lot, but like many other projects which map data to a different output (sculpture, sound, visuals, etc), I wish there was more correlation between the input and output; otherwise the end products feel beautiful, but arbitrary.

]]>
https://courses.ideate.cmu.edu/15-104/f2018/2018/10/26/hannah-cai-looking-outwards-09/feed/ 0
Hannah Cai—Project 09—Portrait https://courses.ideate.cmu.edu/15-104/f2018/2018/10/26/hannah-cai-project-09-portrait/ https://courses.ideate.cmu.edu/15-104/f2018/2018/10/26/hannah-cai-project-09-portrait/#respond Fri, 26 Oct 2018 18:17:34 +0000 https://courses.ideate.cmu.edu/15-104/f2018/?p=35019 Continue reading "Hannah Cai—Project 09—Portrait"]]>

(move mouse around to paint, click and drag to erase.)

/* Hannah Cai
Section C
hycai@andrew.cmu.edu
Project-09-Portrait
*/

var img;

//brush start position is top left corner of the canvas
var x = 0;
var y = 0;

//load image
function preload() {
    img = loadImage("https://scontent.fagc2-1.fna.fbcdn.net/v/t1.0-9/44824245_1935019013257397_3442166024294629376_n.jpg?_nc_cat=106&_nc_ht=scontent.fagc2-1.fna&oh=20689f4ef54a6fc95922b240a2579026&oe=5C552C4F");
    
    //sorry for long link, it's from facebook,
    //I tried url shorteners but they messed up the program
}

function setup() {
    createCanvas(480, 480);
    background("RosyBrown");

    //load pixels from image
    img.loadPixels();
}

function draw() {
    
    //get the color of the image at the current position (x, y)
    var col = img.get(x, y);

    //click and drag to erase
    if (mouseIsPressed) {
        noStroke();
        fill("RosyBrown");
        ellipse(mouseX, mouseY, random(5, 20));

    } else { //if mouse isn't pressed,

        //draw an ellipse with random size at (x, y) with the color from col
        noStroke();
        fill(col);
        ellipse(x, y, random(5, 20));

        //ellipse follows mouse around; move mouse around to paint
        var dirX = mouseX - x;
        var dirY = mouseY - y;
        x += dirX / 5;
        y += dirY / 5; 
    }
}



I tried a lot of different variations for this project. The one I settled on is pretty simple in terms of the code, but I liked its aesthetic the most. This program invites the user to “paint” the portrait themself (including the option to erase content by clicking and dragging) adding an element of interactive discovery and intimacy.

Here’s what a “painting” could look like:

And here are some of the variations I did before choosing the final one:



]]>
https://courses.ideate.cmu.edu/15-104/f2018/2018/10/26/hannah-cai-project-09-portrait/feed/ 0
Hannah Cai—Looking Outwards—08 https://courses.ideate.cmu.edu/15-104/f2018/2018/10/19/hannah-cai-looking-outwards-08/ https://courses.ideate.cmu.edu/15-104/f2018/2018/10/19/hannah-cai-looking-outwards-08/#respond Sat, 20 Oct 2018 03:57:24 +0000 https://courses.ideate.cmu.edu/15-104/f2018/?p=34440 Continue reading "Hannah Cai—Looking Outwards—08"]]>

I was drawn to Brian House’s work while viewing his website, which is filled with unconventional projects such as Tanglr, a chrome extension that links your own web browsing with that of a random stranger, and Conversnitch, a small “lightbulb” that discretely records conversation and posts bits of them to Twitter. Compared to my assigned projects in design, the unexpectedness and unconventionality of Brian’s work is really refreshing. I’d like to try doing exploratory work like that.

Brian has a background in computer science and sound, which results in a lot of experiments that generate sound from data. He describes himself as “an artist who investigates more-than-human temporalities.” Listening to his presentation, it was more dry than I thought it would be based on his eclectic projects, and a bit meandering. He didn’t really describe his work in an exciting or passionate way, and it was pretty objective — which I would expect more from projects that were assigned by others, and not self-driven. The content is interesting, it’s just formatted more like a essay than sharing your own art/work. I would present my own work differently.

Some of my favorite projects of Brian House are Fight Logic, Conversnitch, Animas, and Everything that happens will happen today.

]]>
https://courses.ideate.cmu.edu/15-104/f2018/2018/10/19/hannah-cai-looking-outwards-08/feed/ 0