hschung-Final-Project

sketch

//Heidi Chung
//hschung
//Section A
//Final-Project

var clouds = [];
var bobaY = 0; // height of boba
var bobaDy = 0; // the vertical velocity of boba
var offset = 0;
var themeSong; //variable for song to be played
var blush = 200; //variable to make boba change color and "blush"

function preload() {
  themeSong = loadSound("https://courses.ideate.cmu.edu/15-104/f2017/wp-content/uploads/2017/12/bobasong.mp3");
  //preloading the song Boba Beach by Grynperet
}

function newcloud(px, py, pw) { //object for clouds
    return {x: px, y: py, w: pw};
}

function setup() {
    createCanvas(450, 400);
    clouds.push(newcloud(600, 200, 200));//adding a new cloud
    themeSong.play(); //play the song
    amplitude = new p5.Amplitude(); //capturing the volume of the song
}

// compute the location of the right end of a cloud
function cloudRight(p) {
    return p.x + p.w;
}

// return the last cloud object
function cloudLast() {
    return clouds[clouds.length - 1];
}

function draw() {
    background(253, 225, 200);
    var bobaX = width / 2;
    drawGreeting(); //calling the greeting and its "shadow" to be drawn
    drawBoba(); //calling the boba to be drawn
      push();
      noStroke(); //multiple layers to achieve pleasant transparency
      fill(255, 99);//white sea foam
      rect(0, height - 70, width, 70);
      fill(177, 156, 217, 90);//lilac sea foam 2
      rect(0, height - 80, width, 80);
      fill(220, 200, 200, 90);//sea foam 3
      rect(0, height - 90, width, 90);
      fill(220, 200, 200);//solid background of sea
      rect(0, height - 60, width, 60);
      fill(177, 156, 217, 90);//the taro sea
      rect(0, height - 60, width, 60);
      pop();
    stroke(255);
    strokeWeight(3);
    var level = amplitude.getLevel();
    var cloudSize = map(level, 0, 1, 30, 50);
    //clouds' height wiggles in response to song volume
    for (var i = 0; i < clouds.length; i++) {
        var p = clouds[i];
        noStroke();
        fill(255); //white clouds
        ellipse(p.x - offset, p.y, p.w, cloudSize);
    }

    // if first cloud is offscreen to left, remove it
    if (clouds.length > 0 & cloudRight(clouds[0]) < offset) {
        clouds.shift();
    }
    // if last cloud is totally within canvas, make a new one
    if (cloudRight(cloudLast()) - offset < width) {
        var p = newcloud(cloudRight(cloudLast()), // start location
                            random(50, 320), // height of new cloud
                            140); // all clouds have width 140 for now
        clouds.push(p); //add to array of clouds
    }
    noStroke();
    // move the "landscape"
    // move and draw the "boba"
    //which cloud is current? linear search (!) through clouds
    var pindex = 0;
    while (cloudRight(clouds[pindex]) - offset + 20 < bobaX) {
        pindex += 1;
    }
    //now pindex is index of the cloud in the middle of canvas
    //find the cloud height
    var py = clouds[pindex].y;
    //if boba is above a cloud, fall toward it, but don't go past it
    if (bobaY <= py) {
        bobaY = min(py, bobaY + bobaDy); // change Y by Dy
    } else { // if we are below the cloud, fall to ground
      //to avoid this, once we are below a cloud, force Dy non-negative
      if (bobaDy < 0) {
          bobaDy = 0;
      }
      bobaY = min(height, bobaY + bobaDy);
    }
    //if the boba falls from a cloud, it will "jump" to the next cloud
    if (bobaY >= height) {
        bobaY = 0;
    }
    //move the "landscape"
    offset += 3;
    //accelerate boba with gravity
    bobaDy = bobaDy + 1;
}

function drawGreeting() {
  var level = amplitude.getLevel();
  var greetingSize = map(level, 0, 1, 25, 30); //control limits of greeting size
  for (i = 0; i < width; i++) {
    fill(210, 120, 120);
    textSize(greetingSize);
    text("welcome  to  boba  beach", width/5 + i, height/4 + i); //streaked greetings
  }
  fill(255, 196, 197); //light pink greeting
  strokeWeight(3);
  textSize(greetingSize);
  text("welcome  to  boba  beach", width/5, height/4);
}

function drawBoba() {
  var bobaX = width / 2;
  var level = amplitude.getLevel();
  var bobaSize = map(level, 0, 1, 35, 85); //control limits of boba size
  var eyeSize = map(level, 0, 1, 5, 10); //control limits of eye size
  //traits fluctuate in size depending on the song's volume.
  var mouthSize = map(level, 0, 1, 7, 15); //control limits of mouth size

  fill(blush - 100, 120, 120); //boba "blushes" when you make it jump by pressing any key
  noStroke();
  fill(230, 130, 140);
  ellipse(bobaX, bobaY - 20, bobaSize, bobaSize); //jumping boba
  fill(0);
  ellipse(bobaX - 7, bobaY - 20, eyeSize, eyeSize); //left eye
  ellipse(bobaX + 11, bobaY - 25, eyeSize, eyeSize); //right eye
  fill(120, 0, 32); //happy mouth
  arc(bobaX + 4, bobaY - 20, mouthSize + 3, mouthSize + 3, 0, PI + 50, CHORD);
}

function keyPressed() {
  bobaDy = -15; //velocity set to up when key is pressed
    if (blush === 200) { //changing the color of the boba
      blush = 255; //if the boba's R value is 200, and a key is pressed,
    } else {       //change the R value to 255, and vice-versa.
      blush = 200;
    }
}

For my final project, wanted to create a cute, simple, and clean animation with an interactive touchpoint- perhaps by pressing keys or clicking the mouse. My inspiration was to create something that brings the user a sense of delight. I wanted to animate with AfterEffects using the soundtrack “Boba Beach” and I attempted to over the summer, but didn’t finish, and that led me to want to create a cute thing for the final project. I was inspired by the delight and familiarity of platform games, and thought it would mesh well with my idea of having cute visuals that you can interact with if you so choose. I chose not to make the boba “die” when it falls off the clouds, like a classic platform game.

The jumping boba wiggles in size depending on the volume of the song, as do its eyes and mouth. The clouds passing by also wiggle. When you press any key, the boba will change color slightly and “blush” and it jumps as well. The greeting in the background also wiggles with the volume. I am pleased with the cute aesthetics and interaction I was able to make.

doodlings for composition ideating
I made these assets in Illustrator and had tried animating them with AfterEffects. This also served as color scheme inspiration.
screenshot: a little boba floating on a cloud above a sea of taro tea

hschung-Final-Project-Proposal

For my final project, I’d like to create a cute, simple, and clean animation with an interactive touchpoint- perhaps by pressing certain keys or clicking in certain areas. I’m not exactly sure what I want the keypressed action to trigger, but I would like a cute and simple interaction to happen, one that brings the user a sense of delight. Perhaps you press a key and a character on the screen blushes or jumps. I wanted to animate using the soundtrack “Boba Beach” and I attempted to over the summer, but didn’t finish. I had a pink beach and little bobas with faces drawn on them. I thought it’d be interesting if I could animate them in a simple manner using code! Part of the reason I was so hesitant to try to animate this idea was because AfterEffects intimidated me, so I’m excited to see what I can make with my new knowledge of p5.js.

little boba pearls with faces chilling on a beach. maybe they’ll jump when you press keys, or change expression.

hschung-LookingOutwards-12

One project I really admire is the puzzle game Monument Valley, developed by Ustwo Games. It was based on concept drawings by company artist and designer Ken Wong. Monument Valley‘s visuals were inspired by Japanese prints, minimalist sculpture, and other indie games. Looking at their clean, simplistic visuals makes me happy. I also really love their color schemes, and I like that their game gives people a sense of delight. I also appreciate the game’s meaningful visualization of the relationship between mother and child, and that it illustrates a metaphor for the “ever-evolving relationship between child and parent: one that shifts from reliance, to mutual respect, to a reversal of caregiving.” Monument Valley came out in 2013, and Monument Valley 2 came out in 2017.

In contrast to this game of puzzles, I also really like another phone game called Neko Atsume. It was developed by Hit-Point Co. and came out in 2014.  This game allows you to play with cats who visit your virtual backyard, and take pictures of them and “collect” all the cats. I like this game because there are simple interactions to allow me to feel delight and become attached to the cute cats, such as feeding them or taking “snapshots” of them. And although there aren’t any puzzles for me to solve like in Monument Valley, I still find myself attracted to the game and enjoying the simple, cute interactions. These two projects also have very different aesthetics, and I find myself enjoying both.

A screenshot of one of the puzzles from Monument Valley. I really like the color schemes and geometric aesthetic.

Monument Valley 2 Article

A screenshot of Neko Atsume. The cats interact with objects you set up in your yard.

What Neko Atsume is

hschung-LookingOutwards-11

My LookingOutwards-04 was more about uniquely produced music, so I am taking the opportunity to explore sound art.

As I was searching for computational sound art, I came across a website for EarSketch- a program used to teach students about computer science through coding music. Students are taught to code in Python or Javascript, and learn how to use loops, compose beats, and add effects to make music.

EarSketch hosts a national competition every year to encourage students to code music creatively. I liked a winning submission from the 2017 competition, titled Mid Day Parade Competition Song, created by student Robert Marcez. I thoroughly enjoy his song because it actually sounds good, like a real song. It’s full of complexities, crescendos, and even a beat drop of sorts, that make it feel like a full song. More than that, I’m impressed that this high school student made a song from scratch via the medium of code, which is something I actually haven’t thought much about before. He was able to manipulate different components of the song, make his own functions to easily manipulate them, and creatively exercise his knowledge of code. It reminds me there are many ways to blend the fields of science and art.

Robert’s coded song- press the play button to listen! It’s quite good.
https://earsketch.gatech.edu/earsketch2/#?sharing=2K1nSohUQd3YaLpD4Zvohw

A list of the winners from 2017. Robert’s song is posted first, and there’s a blurb from him about what he was thinking as he created his song.
https://earsketch.gatech.edu/landing/#

hschung-LookingOutwards-10

I looked at a project by Kimchi and Chips, a Seoul-based art studio founded by Mimi Son and Elliot Woods. Their projects play with material and immaterial modes of existence, and combine the disciplines of code, form, material, concept, and mechanism.

Mimi Son was born in Seoul and currently lives and works there. She has taken on the roles of designer, curator, professor, storyteller, and artistic director in various countries and institutions. She has a master’s degree in Digital Media Art and Design at MiddleSex University and Interaction Design at CIID. She is currently the Adjunct Professor at Ehwa Women’s University in Seoul, and works at Kimchi and Chips at the same time.

I found their project Litescape intriguing because it attempts to make a 3D representation of something we usually cannot experience in visual depth- sounds. By using a 3D projection system based on the original Wiremap project by Albert Hwang, Litescape allows motion graphics and visual information to take physical, visible form, occupying the same real world measurable space as its audience. I think it does a good job of immersing its audience into the unique environment of sounds, light, color, and depth. It’s really interesting to me that they tried to quantify, or rather, give physical attributes to a thing so naturally abstract, such as sound. Sound is something we constantly experience, and I think this installation accentuates just how much vividness and depth sounds are capable of, by illustrating them in a different, colorful, visual way.

http://www.kimchiandchips.com/#litescape

Litescape 3D from Elliot Woods on Vimeo.

hschung-Project-10

When I thought about the term “generative landscape,” I was immediately taken back to a trip I took with my family to Las Vegas, and the vast, beautiful landscapes we’d seen as we drove through the desert. The mountains were large and far away, and the clouds were passing through the mountains. I thought I might do something like that for this project. I also wanted to have trees in the landscape. I also wanted to have sparkling stars, and that transformed into snowflakes. As I played with the trees, I ended up having them “shiver” in the cold, and also jump as if they were dancing. I got very playful as I thought it would be fun to have a more fantasy-like winter landscape. I think it’s funny that I depicted trees, dancing and alive, in a season where they are the least lively- and that dancing makes them seem as if they are enjoying the snow like humans do.

sketch

//Heidi Chung
//Section A
//hschung@andrew.cmu.edu
//Project 10
var trees = [];
var Y_AXIS = 1;
var X_AXIS = 2;
var b1, b2, c1, c2;

function setup() {
  createCanvas(400, 400);
  //create an initial collection of clouds
  for (var i = 0; i < 6; i++) {
    var randomTreeX = random(width);
    trees[i] = makeTree(randomTreeX);
  }
  frameRate(10);
}

function draw() {
  background(52, 71, 106); //220, 160, 150 light peachy pink

  noStroke();
  mountains();
  displayHorizon();

  updateAndDisplayTrees();
  removeTreesThatHaveSlippedOutOfView();
  addNewTreesWithSomeRandomProbability();
  //makeSparkles(); //calling sparkle-making function //i called makeSparkles()
  //again in displayTrees() because that made more snowflakes appear..
  //i'm not sure why they don't appear as frequently when called from setup().
}

function mountains() {
  fill(120, 205, 205); //aqua mountain
  ellipse(240, 280, 500, 370);

  fill(0, 255, 255, 90);//leftmost mountain
  ellipse(-50, 380, 500, 500);

  fill(150, 180, 230); //lavender blue mountain
  ellipse(400, 380, 450, 250);
}

function updateAndDisplayTrees() {
  //update the tree's positions and display them
  for (var i = 0; i < trees.length; i++) {
    trees[i].move();
    trees[i].display();
  }
}

function removeTreesThatHaveSlippedOutOfView() {
  var treesToKeep = []; //copying the clouds i want to keep into a new array
  for (var i = 0; i < trees.length; i++) {
      if (trees[i].x + trees[i].breadth > 0) {
          treesToKeep.push(trees[i]);
    }
  }
  trees = treesToKeep; //keeping track of remaining clouds
}

function addNewTreesWithSomeRandomProbability() {
  var newTreeLikelihood = 0.005;
  if (random(0,1) < newTreeLikelihood) {
    trees.push(makeTree(width));
  }
}

function treeMove() {
  this.x += this.speed;
}

function treeDisplay() {
  var treeHeight = 30*round(random(2, 8));
  var treeTopWidth = random(55, 80);

  fill(255, 90);
  stroke(0);
push();
  translate(this.x, height - 40);
  noStroke();
  ellipse(20, -treeHeight, treeTopWidth, treeHeight); //treetops
  stroke(151, 152, 157);
  strokeWeight(7);
  line(20, -treeHeight, 20, treeHeight + 20); //tree trunks
pop();

makeSparkles(); //calling it here because it makes snowflakes more frequent, than when setup() calls it
}

function makeSparkles() {
  var sparkleX = random(5, width); //sparkles
  var sparkleY = random(5, height-40);
  var sparkleWidth = random(5, 20);

  noStroke();
  fill(255, 90); //transparent snowflakes
  ellipse(sparkleX, sparkleY, sparkleWidth, sparkleWidth);
  fill(255); //opaque snowflakes with different randomness
  ellipse(random(5, width), random(5, height-40), sparkleWidth-3, sparkleWidth-3);
}

function makeTree(birthLocationX) {
  var shiveringTree = {x: birthLocationX,
            breadth: 50,
            speed: -1.0,
            //nFloors: round(random(2, 8)),
            move: treeMove,
            display: treeDisplay}
  return shiveringTree;
}

function displayHorizon() {
  stroke(0);
  //line(0, height - 40, width, height - 40);
  noStroke();
  // fill(255, 90); //100, 160, 160
  // rect(0, height-40, 500, height-40); //ground
  fill(255); //100, 160, 160
  rect(0, height-80, 500, height-40);
}

mountain and clouds sketch
As I was trying to decide if I should change from a pinkish color scheme to a bluish one, I used this image as inspiration.
A screenshot of the winter scene.

hschung-LookingOutwards-09

I looked at my friend Tiffany’s post about Marpi’s meditative installation art named “The Wave.” The display uses open source shaders by Jaume Sanchez powered by Three.js. It’s a serene, sizeable touchscreen display of waves, made up of soft, curved shapes and strokes. People can interact with the display and guide the movement of the water particles. I wasn’t sure if they really behaved like water particles, but it’s interesting that Marpi calls the shapes water particles; though the shape and behavior doesn’t necessarily follow the behavior of real water, I can bring myself to believe that if we had magical powers to control water with gestures, that it would move in this manner. I agree with Tiffany that it’s a very pleasant and soothing visual to look at. The slow movement and soft curves are comforting and satisfying to watch as they change direction and shape. The video below showing people interacting with “The Wave” was posted a year ago.

The Wave from Marpi on Vimeo.

Link to my friend’s Looking Outwards post:

thlai-Looking-Outwards-05

hschung-Project-09

sketch

//Heidi Chung
//Section A
//hschung@andrew.cmu.edu
//Project-09

var baseImage;

function preload() {
    var myImageURL = "https://i.imgur.com/FLTyc1P.jpg?1"; // a picture of myself
    baseImage = loadImage(myImageURL); //loading the image
}

function setup() {
    createCanvas(480, 480);
    background(255);
    baseImage.loadPixels(); //loading the image's pixels
    frameRate(20);
    noStroke();
}

function draw() {
    image(baseImage, 0, height, baseImage.width/2, baseImage/height/2);
    var px = random(width); //random assembly of circles being drawn in regards to x
    var py = random(height);//random assembly of circles being drawn in regards to y
    var ix = constrain(floor(px), 0, width-1);
    var iy = constrain(floor(py), 0, height-1);
    var theColorAtLocationXY = baseImage.get(ix, iy); //extracting the colors of the image

    noStroke();
    fill(theColorAtLocationXY); //colors of circles match the image's colors at the appropriate positions
    var circleWidth = random(5, 30); //variable set so the circles have the same random width and height
    ellipse(px, py, circleWidth, circleWidth); //instead of making ellipses

    var theColorAtTheMouse = baseImage.get(mouseX, mouseY);
    fill(theColorAtTheMouse); //drawing with the mouse using the image's colors at the pixels

    var whatsUpTextX = pmouseX-25; //the position of the text relative to the mouse
    var whatsUpTextY = pmouseY-20;
    textSize(15); //the text says "what's up" in japanese
    text("どうしたの?", whatsUpTextX, whatsUpTextY); //what's up is dragged along by the mouse
}

function mousePressed() {
  var theColorAtTheMouse = baseImage.get(mouseX, mouseY); //extracting the colors of the image again
  strokeWeight(3); //white stroke around the text to see it better
  stroke(255); //the text says "are you okay?" in korean
  textSize(30); //when you click, a big "are you okay?" is drawn
  text("괜찮아?", pmouseX - 45, pmouseY);
}

I really enjoy post-impressionism, especially Van Gogh’s art. I love that he painted scenery and people with purposeful strokes and artistic, imprecise interpretation, especially in a time where people did not appreciate his art because it wasn’t “accurate” or exact to real life images. I love the smudges and smears he made in his works. So I thought it’d be fun to make a self-portrait that feels impressionistic. When you drag the mouse across the canvas, it “paints” words that say “what’s wrong” or “what’s up” in Japanese. When you click the mouse, it “paints” a word that says “are you okay” in Korean. I can read/speak both Japanese and Korean, and I thought it’d be interesting to have both languages in my project- to say that even if you can’t understand something, you can still visually appreciate it, like you can with Van Gogh’s works.

After many circles have been drawn
You can drag the mouse to “draw” on the canvas with the words that ask “what’s up?” in Japanese
When you click, the word that appears says “are you okay?” in Korean

hschung-LookingOutwards-08

I listened to the lecture given by Kate Hollenbach, who works at Oblong. She is a media programmer and artist based in Los Angeles. She works with interactive media and systems involving gesture and space. I appreciate that she is both involved in art and programming, because that’s something I have not explored before taking this course.

She discussed how their group works with gestural and spatial interactions in products and installations. I find it so interesting and inspiring that we are at a point where we can manipulate displays and interact with them, simply through gesture. I think it adds an interesting dimension to how people interact with the visuals they’re looking at- whether it’s video or pictures. It makes you involve your body’s behavior more thoroughly, I think, than the action of swiping through screens with your fingers does. It also creatively reimagines how designers can change the relationship between the user, the product, and their environment, and how the user can be enjoyably immersed in their experience.

Hollenbach presented Oblong’s projects by showing people actively experiencing their products. They also addressed what value their projects could add to the users’ experiences. I appreciate that she talked about how people harbor fear about their projects, and that part of their objective is to get people to trust the interfaces and systems they’re interacting with.
I liked the pointer they they produced because it has subtle differences from an average pointer, yet has a lot of personality as a result of their efforts to make it feel organic and responsive. The little movements of the pointer and the responsive nature makes it feel fun and endearing. It’s not something that moves the world, but it is delightfully designed. It’s in the video below.

INSTINT 2014 – Kate Hollenbach from Eyeo Festival // INSTINT on Vimeo.

http://www.katehollenbach.com/

hschung-LookingOutwards-07

I looked at the work of Periscopic, who describe themselves as a socially-conscious data visualization firm that helps people promote awareness and transparency of information.
On Periscopic’s website, Wes Bernegger explains the process behind the making of a “Feather” Visualization. They used Microsoft Emotion API, which takes images of faces as input and returns a set of emotions for each image- which is interesting because we’re able to rely on machines to examine human faces and pick up what emotions seem present according to facial expressions, which can be so diverse and may need much interpretation to understand. With that technology, Periscopic examined the past inaugural addresses of the presidents, which pulled out emotive data- from 8 emotions. They then plugged their data into Processing to create a visual representation of what they found, which turned into a feather form.
Interestingly enough, Donald Trump’s feather was much more negative and droopy than the feathers of the other presidents’ addresses.
I appreciate that this group attempted addressing emotive data, and that they appropriately used an emotive shape to express it. It’s an easy way to have people interpret their findings at a glance.

http://www.periscopic.com/news/emotions-inauguration