Ian Kaneko Project 11: Landscape

ikaneko Generative Landscape

var star = []; // stores the star objects
var ufo = []; // stores UFO's
var moonY = 400; // where the moon surface is


function setup() {
    createCanvas(480, 480);

    // Pushes background stars on to array
    for(i = 0; i < 20; i ++) {
        star.push(makeStar());
    }

}

function draw() {
    
    noStroke();
    background(0);

    // Populates the background with stars of random location, size, and color
    for(i = 0; i < star.length; i ++) {
        var s = star[i];
        s.draw();
    }

    // Draws sun in the top right
    fill(255, 220, 70);
    circle(410, 50, 60);

    // Draws UFOs offscreen and has them travel left until they vanish
    for(u = 0; u < ufo.length; u ++) {
        var p = ufo[u];
        p.draw();
        p.move();
    }

    removeUFO(); // Removes UFOs when they get offscreen
    addUFO(); // Keeps the remaining UFOs


    // Draws the moon surface at the bottom of the canvas
    fill(220);
    rect(0, moonY, width, height - moonY);
    fill(180);
    ellipse(30, 460, 180, 40);
    ellipse(300, 420, 220, 25);
    

 }

// Returns the star object that gets pushed into the array
function makeStar() {
    return {x: random(0, 410), y: random(moonY) - 20, size: random(10),
        draw: drawStar, color: color(250, 200, random(255)) };
}

// Draws stars of random color, location, and size
function drawStar() {
    fill(this.color);
    circle(this.x, this.y, this.size);
}

// Returns the UFO object that gets pushed into the array
function makeUFO() {
    return {x: width + 100, y: random(moonY), width: random(60, 130),
        draw: drawUFO, color: color(random(255), random(255), random(255)),
        speed: random(-2, -0.5), move: moveUFO};
}

// Draws UFOs of random color, width, location, and speed
function drawUFO() {
    fill(this.color);
    ellipse(this.x, this.y - 10, this.width / 2, 30);
    fill(240);
    ellipse(this.x, this.y, this.width, 20);
}

// This makes sure that the screen isnt flooded with 60 UFOs a second
function addUFO() {
    // Small chance of adding a UFO
    var newUFOChance = 0.007;
    if (random(0, 1) < newUFOChance) {
        ufo.push(makeUFO());
    }
}

// Takes UFOs off the array when they go offscreen
function removeUFO() {
    var ufoToKeep = [];
    for(i = 0; i < ufo.length; i ++) {
        if (ufo[i].x > 0 - ufo[i].width) {
            ufoToKeep.push(ufo[i]);
        }
    }
    ufo = ufoToKeep;
}

// Moves each UFO from left to right
function moveUFO() {
    this.x = this.x + this.speed;
}

Initial sketch of my idea for a landscape

Although for this project I didn’t really create the illusion that the viewer is moving. I went by this project’s other name of “stuff passing by”. I wanted this project to feel like you were standing on the moon watching UFOs fly across space. UFOs of random color, size, and speed should fly across the screen from right to left. Also the stars in the background will be different each time you refresh the page. The hardest part about this project for me was keeping track of all the function that I had to make in order to fulfill the requirements of the project. Overall I’m pretty proud of how true to my original idea i was able to stay. While the sketch above is pretty simple, I think it translated very well to an animated project.

Monica Chang – Project 11 – Generative Landscape

sketch

//Monica Chang
//mjchang@andrew.cmu.edu
//Section D
//Project 11 - Generative Landscapes

//LANDSCAPE DESCRIPTION:
// SURPRISE! THERE IS HIDDEN LAVA BEHIND THE FIRST TERRAIN!

var tethered = [];
var terrainSpeed = 0.0008;// speed of orange terrain and middle terrain
var terrainSpeedThree = 0.0007; // speed of very back mountain
var terrainDetail = 0.008;
var terrainDetailTwo = 0.001;
var terrainDetailThree = 0.02; //smoothness of the terrains

function setup() {
    createCanvas(480, 480);
    frameRate(20);
    //initial lava
    for (i = 0; i < 30; i++) {
        var tx = random(width);
        var ty = random(300, height);
        tethered[i] = makeTethered(tx, ty);
    }
}

function draw() {
    //lavendar background
    background(236, 225, 250);

    //arranging the landscape elements(three terrains, lava spots)
    renderTerrainTwo(); // middle, low-opacity mountain
    renderTerrainThree(); // third mountain in the very back
 
    updateAndDisplayTethered(); //hidden lava behind the front terrain

    renderTerrainOne(); // first terrain int he very front

}



function displayTethered() {
    //drawing the "tethered" lava
    noStroke(); //no outline
    fill(255, 11, 5); //red tethered coat color
    push();
    translate(this.x0, this.y0); //locate lava body at x0, y0
    ellipse(5, 5, 10, 5); //tethered lava body
    pop();
}

function makeTethered(birthLocationX, birthLocationY) {
    var theTethered = {x0: birthLocationX, 
                y0: birthLocationY, 
                tx: random(0, width), 
                ty: random(300, height),
                speed: -3.0,
                move: moveTethered,
                display: displayTethered}
    return theTethered;
}

function moveTethered() {
    this.x0 += this.speed; //speed of lava moving
    if (this.x0 <= -10) { //new lava appears at the right as they disappear to the left
        this.x0 += width + 10;
    }
}

function updateAndDisplayTethered() {
    for(i = 0; i < tethered.length; i++) {
        tethered[i].move();
        tethered[i].display();
    }
}


function renderTerrainThree(){
    // drawing the terrain in the back
    noStroke();
    fill(51, 16, 84); 
    beginShape(); 
    for (i = 0; i < width; i++) {
        var t = (i * terrainDetailThree) + (millis() * terrainSpeedThree);
        //terrains y coordinate
        var y = map(noise(t), 0, 1.5, height / 8, height);
        //keep drawing terrain
        vertex(i, y);
    }
    //terrain constraints
    vertex(width, height);
    vertex(0, height);
    endShape();
}

function renderTerrainTwo() {
    // drawing terrain number two(in the middle)
    noStroke();
    fill(71, 11, 6, 200); //low-opacity color of maroon
    beginShape();
    for(var a = 0; a < width; a++){
        var b = (a * terrainDetail) + (millis() * terrainSpeed);
        var c = map(noise(b), 0, 1, 0, height / 4);
        vertex(a, c);
    }
    vertex(width, height);
    vertex(0, height);
    endShape(CLOSE);

}


function renderTerrainOne() {
    //drawing the terrain in the very front
    noStroke();
    fill(235, 64, 52);
    beginShape();
    for(var x = 0; x < width; x++){
        var t = (x * terrainDetailTwo) + (millis() * terrainSpeed);
        var y = map(noise(t), 0, 1, 0.55, height + 100);
        vertex(x, y);
    }
    vertex(width, height);
    vertex(0, height);
    endShape(CLOSE);
  
}

I was originally inspired by the horror film, ‘Us’, which was released this year and wanted to illustrate the “Tethered”. However, they ended up looking more like lava due to the color so I ended up creating a landscape with holes of lava. This project was really fun and helpful in making me understand objects better.

my sketch!

Sarah Choi- Looking Outwards – 11

Camille Utterback is an internationally acclaimed artist and pioneer in the field of digital interactive art. She explains her work as “an attempt to bridge the conceptual and the corporeal.” Represented by Haines Gallery in San Francisco, she is currently an Assistant Professor of Art Practice at Stanford University. She went to Williams College for a BA in Art and achieved a Master’s degree in Interactive Telecommunications at NYU’s Tisch School of the Arts. In May of 2018, she created Precarious for the National Portrait Gallery exhibition called Black Out: Silhouettes Then and Now. It was an interactive installation tracing human silhouettes with an algorithmic apparatus on a backlit screen. 

Using contemporary digital tools, it actively traced the audiences’ figures continuously. Through custom coded interactive drawing systems, she built an algorithmically generated visual language she had been working on for many years. The points of the silhouettes exert a force on others creating this ongoing momentum to keep redrawing outlines and forming bodies together into one shared space. Through this piece of art, Utterback tries to redefine the aspect of personal boundaries as her audience is able to explore what happens when these barriers are broken creating a more open and welcoming atmosphere. 

She believes ur bodies portray abstract symbolic systems and how functions like communication and language echo our physical self. Forming the relationship with interfaces and representational systems of our machines, Utterback uses interactive and computational mediums to depict different aspects of ourselves in her works of art.

CJ Walsh – Looking Outwards 11 – Women Practitioners

Video explaining the creative process for the myThread Pavilion

The person I have chosen to focus on this week is Jenny Sabin, an experimental architect with her own studio based in Ithaca, NY. While I found many of her projects to be super interesting, I wanted to focus on the myThread Pavilion, which was designed for Nike. I was drawn to this project by the video above, which goes into detail about the creative process and research conducted that lead to the final form. Through a series of creative workshops, her team wanted to make connections between physical activity and architecture. Using data from a workshop focused on exercise and movement, Sabin created personalized algorithms that translated the data into methods of weaving and creating pattern. These developed patterns and methods were then created at a large scale for the pavilion structure.

Sabin shown in front of myThread Pavilion

I think it is a really compelling project because of all the elements that went into creating it. It combines a mixture of branding, creative workshopping, computation, data physicalization and environments/architectural design. I am really inspired by both the creative process and the physical itself.

Link: http://www.jennysabin.com/mythread-pavilion

Jenny Sabin

Jenny Sabin is considered to be at the forefront of innovation in architecture in the 21st century. Her practice often focuses on the intersection of architecture and science, pulling information from biology and mathematics to inform the structural possibilities of material. In addition to working at her firm, she is also a professor of architecture at Cornell.

Her education path is pretty interesting. She completed her undergrad at the University of Washington, getting degrees in ceramics and interdisciplinary visual art. She then went on to get her masters of architecture from the University of Pennsylvania. I think that its really interesting that she didnt begin studying architecture as an undergrad. She discovered this path after already starting a different one and has become very successful with a line of work she was really passionate about.

Overall, I find her practice and work to be super exciting and it is cool to see a blend of so many different mediums, styles and elements. Her work really reflects the pathways she has taken in order to create really amazing spaces.

Mari Kubota- Looking Outwards- 11

Lumen (2017) day time

Jenny E. Sabin is an architectural designer whose work is at the forefront of a new direction for 21st century architectural practice. She is principal of Jenny Sabin Studio, an experimental architectural design studio based in Ithaca and Director of the Sabin Design Lab at Cornell AAP, a design research lab with specialization in computational design, data visualization and digital fabrication.

Lumen (2017) night time

Lumen (2017) by Jenny Sabin is a digitally fabricated architectural work that won the 2017 Museum of Modern Art’s PS1 Young Architect’s Program. The structure is a socially and environmentally responsive lightweight knitted fabric that adapts to the densities of bodies, heat, and sunlight. Lumen is composed out of tubular structures and a canopy of cellular components employs recycled textiles, photo-luminescent and solar active yarns that absorb, collect, and deliver light. The structure also incorporates an automatic misting system that activates depending on the proximity of a visitor. 

Video of building process

Jacky Tian’s LookingOutwards-10

Computer Music —— The Emerging Art of Algorithmic Music

Ville-Matias Heikkila, a Finnish artist and computer programmer, has been experimenting with algorithmic music with the help of computer programs. He proposed that we need to not only listen to the music, but also be able to visualize it to enhance the impact that music can bring to us.

Heikkila says that sometime, codes and algorithms can generate surprisingly interesting music by repeating only two or three arithmetic operations. Therefore, he is really interested in creating audio and visual artworks with simple programs but in a rather disorganized way.

Jacky Tian’s Project 10

sketch

// Project 10
//Yinjie Tian
//yinjiet@andrew.cmu.edu
//Section D

var unit = 50
var angle = 0

function preload() {
    s1 = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/11/jtsound1.wav");
    s2 = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/11/jtsound2.wav");
    s3 = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/11/jtsound3.wav");
    
}


function setup() {
    // you can change the next 2 lines:
    createCanvas(640, 480);
    createDiv("p5.dom.js library is loaded.");
    //======== call the following to use sound =========
    useSound();
}


function soundSetup() { // setup for audio generation
    // you can replace any of this with your own audio code:
    myOsc = new p5.TriOsc();
    myOsc.freq(880.0);
    myOsc.setType('sine');
    myOsc.amp(0.1);
    myOsc.start();
}



    // you can replace any of this with your own code:
function draw() {
    background(0);
    fill(mouseX, mouseY, 150);
    rect(0, 0, width, 160);
    fill(150, mouseY, mouseX);
    rect(0, 160, width, 160);
    fill(mouseY, 150, mouseX);
    rect(0, 320, width, 160);
    var len = 480 - mouseX
    var sta = 640 - mouseY 
    var freq = mouseX * 10

    strokeWeight(4)
    stroke(170, mouseX * 0.1, 50);
    line(unit, sta * 0.1, unit, len);

    strokeWeight(4)
    stroke(170, mouseX * 0.2, 50);
    line(unit * 2, sta * 0.2, unit * 2, len);

    strokeWeight(4)
    stroke(170, mouseX * 0.3, 50);
    line(unit * 3, sta * 0.3, unit * 3, len);

    strokeWeight(4)
    stroke(170, mouseX * 0.4, 50);
    line(unit * 4, sta * 0.4, unit * 4, len);

    strokeWeight(4)
    stroke(170, mouseX * 0.5, 50);
    line(unit * 5, sta * 0.5, unit * 5, len);

    strokeWeight(4)
    stroke(170, mouseX * 0.6, 50);
    line(unit * 6, sta * 0.6, unit * 6, len);

    strokeWeight(4)
    stroke(170, mouseX * 0.7, 50);
    line(unit * 7, sta * 0.7, unit * 7, len);

    strokeWeight(4)
    stroke(170, mouseX * 0.8, 50);
    line(unit * 8, sta * 0.8, unit * 8, len);

    strokeWeight(4)
    stroke(170, mouseX * 0.9, 50);
    line(unit * 9, sta * 0.9, unit * 9, len);

    strokeWeight(4)
    stroke(170, mouseX, 50);
    line(unit * 10, sta, unit * 10, len);

    strokeWeight(4)
    stroke(170, mouseX * 1.1, 50);
    line(unit * 11, sta * 1.1, unit * 11, len);

    strokeWeight(4)
    stroke(170, mouseX * 1.2, 50);
    line(unit * 12, sta * 1.2, unit * 12, len);

    strokeWeight(4)
    stroke(170, mouseX * 1.3, 50);
    line(unit * 13, sta * 1.3, unit * 13, len);

    strokeWeight(2)
    stroke(70, mouseX * 0.1, mouseY* 0.1);
    line(unit + 25, sta * 0.1, unit, len);

    strokeWeight(2)
    stroke(70, mouseX * 0.2, mouseY* 0.15);
    line(unit * 2 + 25, sta * 0.2, unit * 2, len);

    strokeWeight(2)
    stroke(70, mouseX * 0.3, mouseY* 0.2);
    line(unit * 3 + 25, sta * 0.3, unit * 3, len);

    strokeWeight(2)
    stroke(70, mouseX * 0.4, mouseY* 0.25);
    line(unit * 4 + 25, sta * 0.4, unit * 4, len);

    strokeWeight(2)
    stroke(70, mouseX * 0.5, mouseY* 0.3);
    line(unit * 5 + 25, sta * 0.5, unit * 5, len);

    strokeWeight(2)
    stroke(70, mouseX * 0.6, mouseY* 0.35);
    line(unit * 6 + 25, sta * 0.6, unit * 6, len);

    strokeWeight(2)
    stroke(70, mouseX * 0.7, mouseY* 0.4);
    line(unit * 7 + 25, sta * 0.7, unit * 7, len);

    strokeWeight(2)
    stroke(70, mouseX * 0.8, mouseY* 0.45);
    line(unit * 8 + 25, sta * 0.8, unit * 8, len);

    strokeWeight(2)
    stroke(70, mouseX * 0.9, mouseY* 0.5);
    line(unit * 9 + 25, sta * 0.9, unit * 9, len);

    strokeWeight(2)
    stroke(70, mouseX, mouseY* 0.55);
    line(unit * 10 + 25, sta, unit * 10, len);

    strokeWeight(2)
    stroke(70, mouseX * 1.1, mouseY* 0.6);
    line(unit * 11 + 25, sta * 1.1, unit * 11, len);

    strokeWeight(2)
    stroke(70, mouseX * 1.2, mouseY* 0.65);
    line(unit * 12 + 25, sta * 1.2, unit * 12, len);

    strokeWeight(2)
    stroke(70, mouseX * 1.3, mouseY* 0.7);
    line(unit * 13 + 25, sta * 1.3, unit * 13, len);

    fill(120, 80, mouseX * 0.5); // control rect color explicitly
    stroke(0);
    push();
    translate(mouseX, mouseY);
    rotate(radians(angle));
    rectMode(CENTER); // center rect around 0,0
    rect(0, 0, 50, 50);
    pop();
    angle = angle + mouseX * 0.05;

    var freq = mouseX * 10
    var amp = len
    myOsc.freq(freq);
    
}

function mousePressed() {

    if (mouseY > 0 & mouseY < 160){
        s1.play();
    } else {
        s1.pause();
    }

    if (mouseY > 160 & mouseY < 320){
        s2.play();
    } else {
        s2.pause();
    }

    if (mouseY > 320 & mouseY < 480){
        s3.play();
    } else {
        s3.pause();
    }


}

For this project, I used my project 3 as the base image. I created 3 mouse press zones in different colors and when someone presses mouse on these three zones, three different sounds will play. The sound will pause if someone presses the mouse on a different zone.

Sammie Kim— Looking Outwards—11

Angela Washko is a digital artist who creates experimental games and entertainment that often revolves around feminist themes. One project that really stood out to me is called The Game, which won the Impact Award at Indiecade. As a feminist video game, this project presents an “exploration of consent and the politics, tactics and practices of the male pick-up artist and seduction community.” The format resembles a dating simulator, where players experience several seduction techniques deriving from instructional books and seduction coaches (pickup artists. The pickup gurus attempt to seduce the player, where six prominent coaches try to gain the player’s attention at a bar—an opportunity for players to explore the complex social behavior and psychology behind dating, as well as experience being a femme-presenting individual exploring this difficult and risky path. I found this game to provoke a reflective process step by step, as it allows us to virtually explore and manipulate, while simultaneously complicit in the frequent dehumanizing behavior. This game is unique as it’s composed entirely of scenarios moving on, providing a digital narrative that satirizes this convoluted system of power and desire in the world of contemporary sex and dating.

Exhibition of The Game at The Museum of Moving Image
People playing through The Game in the museum
A dialogue scene captured in The Game

Link to the artist’s website:  https://angelawashko.com/section/437138-The-Game-The-Game.html

Cathy Dong-Project-10-Interactive Sonic Sketch

sketch

/*  Cathy Dong
    yinhuid
    section D
    project-10-Interactive Sonic Sketch 
*/

var myOsc; //piano sound
var mySnd; //wave sound
var keyNum1 = 7; //lower key num
var keyNum2 = 5; //upper key num
var keyY = 0; //key y start from 0
var size = 20; //ball size


function preload() {
    // load wave sound
    mySnd = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/11/sea.wav");
    mySnd.setVolumn(0.1);
}


function setup() {
    createCanvas(480, 360);
    //call to use sound
    useSound();
}

// piano sound
function soundSetup() { // setup for audio generation
    myOsc = new p5.Oscillator();
    // piano sound setting
    myOsc.amp(5);
    myOsc.freq(0);
    myOsc.start();
}


function draw() {
    background(0);
    //draw piano
    pianoDraw();
    //draw yellow dot
    pressDraw();
    // play piano when pressed on
    if (mouseIsPressed) {
        mySnd.play();
        // upper keys
        if (mouseY < height / 3 * 2) {
            var keyWidth = width / keyNum1 / 2;
            var keyGap = width / keyNum1 / 4 * 3;
            // upper 1
            if (mouseX > keyGap & mouseX < keyGap + keyWidth) {
                myOsc.freq(277.18);
            }
            //upper 2
            else if (mouseX > keyGap * 2 + keyWidth / 2 & mouseX < keyGap * 2 + keyWidth * 1.5) {
                myOsc.freq(311.13);
            }
            //upper 3
            else if (mouseX > keyGap * 3 + keyWidth * 3 & mouseX < keyGap * 3 + keyWidth * 4) {
                myOsc.freq(369.99);
            }
            //upper 4
            else if (mouseX > keyGap * 4 + keyWidth * 3.5 & mouseX < keyGap * 4 + keyWidth * 4.5) {
                myOsc.freq(415.3);
            }
            //upper 5
            else if (mouseX > keyGap * 4 + keyWidth * 5.5 & mouseX < keyGap * 4 + keyWidth * 6.5) {
                myOsc.freq(466.16);
            }
        }
        // lower keys
        else if (mouseY > height / 3 * 2) {
            var keyWidth = width / keyNum1;
            // lower 1
            if (mouseX > 0 & mouseX < keyWidth) {
                myOsc.freq(261.63);
            }
            // lower 2
            else if (mouseX > keyWidth & mouseX < keyWidth * 2) {
                myOsc.freq(293.66);
            }
            //lower 3
            else if (mouseX > keyWidth * 2 & mouseX < keyWidth * 3) {
                myOsc.freq(329.63);
            }
            //lower 4
            else if (mouseX > keyWidth * 3 & mouseX < keyWidth * 4) {
                myOsc.freq(349.23);
            }
            //lower 5
            else if (mouseX > keyWidth * 4 & mouseX < keyWidth * 5) {
                myOsc.freq(392.00);
            }
            //lower 6
            else if (mouseX > keyWidth * 5 & mouseX < keyWidth * 6) {
                myOsc.freq(440.00);
            }
            //lower 7
            else if (mouseX > keyWidth * 6 & mouseX < width) {
                myOsc.freq(493.88);
            }
        }
    }

}

//draw mouse location as a yellow dot
function pressDraw() {
    noStroke();
    fill('yellow');
    ellipse(mouseX, mouseY, size, size);
}

//draw piano keyboards
function pianoDraw(){
    //lower keys
    stroke(0);
    strokeWeight(1);
    fill(255);
    for (var i = 0; i < keyNum1; i++) {
        var keyWidth = width / keyNum1;
        var keyX = i * keyWidth;
        var keyHeight = height;
        rect(keyX, keyY, keyWidth, keyHeight);
    }

    //upper keys
    fill(0);
    for (var j = 0; j < keyNum2; j++) {
        var keyWidth = width / keyNum1 / 2;
        var keyGap = width / keyNum1 / 4 * 3;
        var keyX = keyGap * (j + 1) + keyWidth * j / 2;
        var keyHeight = height / 3 * 2;
        //left two
        if (j < 2) {
            rect(keyX, keyY, keyWidth, keyHeight);
        }
        //right three
        else {
            var newX = keyX + keyWidth * 2;
            rect(newX, keyY, keyWidth, keyHeight);
        }
        
    }
}

This project creates keyboard with different pitches as a piano. A beach setting sound with waves and laughter is added as background.

 

CJ Walsh – Looking Outwards 10 – Computer Music

Dezeen video about Imogen Heap’s Mi Mu gloves

For this week’s Looking Outwards I decided to focus on Imogen Heap, a British singer-songwriter and audio engineer. I really admire her work as an artist because she has taken her musical vision beyond just creating music but has experimented with new ways in which to create. One of the projects she is best known for is the Mi Mu gloves, which use mapping technology to transfer the movement of the hands into musical compositions. In the video above, Heap explains many of the different uses for the gloves and the different movements that create changes in pitch, filter of the sound, and many other elements. I find the product to be really interesting and it adds a really cool dynamic to the way an artist can perform on stage. Heap has talked a lot about the fact that she hated having to have so much equipment on stage in order to create the kind of music she wanted to perform. The gloves give her the ability to not be locked down in a location and create a music experience that envelops both sound and movement.

While I dont know too much about the details of the programming, the gloves use a network of bendable sensors that track the movement of the hand and fingers. This in addition to create an invisible map of the users space allows the software to recognize shifts in motion and attribute those to different music elements and sound qualities.

Video explaining the process of making the gloves

Overall I just find this to be a really intriguing project. I think that Heap’s vision toward the future of music and performance is really interesting because I dont think its something we see from a lot of musical artists. This enables the user to create a wide variety of new musical experiences and its really interesting to see how work like this will develop into the future.

Website: https://mimugloves.com