Final Project — Yoshi Torralva

sketch

//Yoshi Torralva
//yrt@andrew.cmu.edu
//Final Project 
//Section E

//storefront mockup
var mockup;
//input of camera variable
var cameraData;
//points for the hypotrochoid
var pixelPoints = 1000;
//location of hypotrochoid x position array
var placeCurvesX = [];
//location of hypotrochoid y position array
var placeCurvesY = [];
// array to store list of colors at 16 points
var getColors = [];
// array to store brightness of pixels at 16 points
var getBrightness =[];
// varible to move the 16 hypotrochoid's in indivudal rotation
var hypotrochoidsRotate = -1;
// scaled canvas for the interaction in the mockup
var SquareW = 154;

//lading the storefront mockup image
//vector image created by me with Adobe Illustrator 
function preload() {
    mockup = loadImage("https://i.imgur.com/pTMuI2T.png");
}
//connecting cameraData variable to createCapture
function setup() {
    createCanvas(600, 480);
    cameraData = createCapture(VIDEO);
    cameraData.hide();
    noStroke();
}

function draw() {
    background(172, 34, 104);
    //loading pixels into the canvas
    cameraData.loadPixels();
    //reflecting and scaling the camera data
    //when visitors move to the right/left, it properly reflects  movements
    push();
    translate(150, 0);
    scale(-1, 1);
    //sizing cameraData to the screen
    image(cameraData, 0,0, 154, 154);
    pop();
    //calling the hypotrochoids function to draw 16 hypotrochoids
    hypotrochoids(); 
    //scaling the image from imgur
    //placing it above hypotrochoids so that hypotrochoids dont go over the screen
    push();
    scale(0.24);
    image(mockup, 0, 0);
    pop();
}
//creating a function to draw a singular hypotrochoid
//setting four parameters for x position, y position, the curves color at one of the 16 points
//and the brightness of one of the 16 points
function drawCurve(x,y, curveColor, curveBrightness) {
    push();
    //translating with x and y for call parameters
    translate(x, y);
    //using the brightness in the map() of h and a to adjust size based on camera
    var h = map(curveBrightness - 100, 0, height, 0, curveBrightness + 100);
    var a = map(curveBrightness - 50, 0 , width, 0, curveBrightness + 50);
    //adding curveBrightness with 50 for more detail
    curveBrightness = curveBrightness + 50;
    var b = a / curveBrightness;
    //a small strokeWeight for clarity between curveBrightness
    strokeWeight(0.05);
    noFill();
    //using the curveColor parameter 
    //adding red and blue to display the hypotrochoids as pink/red
    stroke(curveColor + 200, 0, curveColor + 30);
    //calling all the variables in a nested for loop to draw the hypotrochoid
    beginShape();
    // adding 5 extra points to close the hypotrochoid
    for(var i = 0; i < pixelPoints + 5; i++) {
        var t = map(i, 0, pixelPoints, 0, TWO_PI); {
        x = (a-b) * cos(t) + h * cos(((a-b)/ b) * t)
        y = (a-b) * sin(t) - h * sin(((a-b) / b) * t)
        rotate(hypotrochoidsRotate);
        //slight rotation implemented
        hypotrochoidsRotate = hypotrochoidsRotate + 0.0001;
        //scale of hypotrochoids set to normal
        vertex(x, y);
    }
}
    endShape();
    pop();
}
// using nested for loops to place replicate hypotrochoid into 16 hypotrochoids
function hypotrochoids(){
//translating hypotrochoids function to fit into the square screen
push();
translate(302, 185);
    //4 in the width of the screen
    for(var i = 0; i < 4; i++) {
        //4 in the height of the screen
        for(var t = 0; t < 4; t++) {
            //locationofLoopi and LocationofLoopt space out the hypotrochoids
            var locationofLoopi = i * SquareW/4 + 10;
            var locationofLoopt = t * SquareW/4 + 10;
            //getting the color of locationofLoopi and LocationofLoop 2
            var colorLoop = get(locationofLoopi, locationofLoopt);
            //getting the brightness of the colors
            var sizeLoop = brightness(colorLoop);
            rectMode(CENTER);
            //implementing spacing into 16 hypotrochoids
            drawCurve(locationofLoopi, locationofLoopt, sizeLoop + 20, sizeLoop + 20);
            //pushing list into the arrays of location, color, and brightness of hypotrochoids
            placeCurvesX.push(locationofLoopi);
            placeCurvesY.push(locationofLoopt);
            getColors.push(colorLoop);
            getBrightness.push(sizeLoop);
        }
    }
//loop to call the length of the 16 points and implement the parameters from draw curve
    for(var a = 0; a < locationofLoopi.length; a++) {
        drawCurve(locationofLoopi[a], locationofLoopt[a], getColors[a], getBrightness[a]);
    }
pop();
}

For my final project, I wanted to explore the opportunities in which I can apply computational practices into a physical space. I decided to focus on creating a reactive storefront using a camera to show the movement of visitors in a visually intriguing way. To inspire a creative direction for the storefront, I created an Athletic Clothing brand called ové. Ové derives from the french word of l’oeuvre meaning work. Additionally, Ové is the shared characters of Move and Woven. To make the hypotrochoid curves reactive to the camera’s data, I used 16 specific points of pixel brightness and color. Through this, I create an interactive storefront that reacts to traffic and ultimately would entice people to enter the store.

Yoshi Torralva-Looking Outwards-12

ZPump Interactive Campaign
Ford explorer advertisement

As my final project proposal focuses on interactive environments, I selected the Animal Agency’s Rebook ZPump campaign that motivates people to run as fast as they can to win a free pair of shoes. What I admire about this project is that it turns a traditional form of a billboard into an interactive element through technology. By a sensor measuring speed, people run across the advertisement to see how fast they run. If it meets a specific rate, the billboard will unlock a shoe to be taken. The next work is an advertisement for the Ford Explorer. The user scans a QR code and places the phone on the magazine. Through this, it creates the illusion that there is a moving car on the phone. In comparison to the Rebook campaign, they involve the sensing of human interaction that I desire to implement in my final project proposal.

Yoshi Torralva – Project 12 – Proposal

For my final project, I’m interested in exploring the opportunities in interactive advertising. Over the past decade, advertising has changed drastically from paper ads, social media posts, and now, more recently, interactive environments. As we approach winter, I wanted to focus on winter clothing and how I can manifest the textiles to be interactive to people passing a storefront. To achieve this, I plan on using the input from the camera do adjust a curve(s) on the screen. I plan on comparing pixels from the camera to determine specific actions that the curve takes. The actual output from the camera will not be used and will be layered over by a blank canvas, and the type of curve I decide to choose. To show the advertisement in action, I plan on placing the ad in a storefront mockup to support the posted WordPress file. Overall, I’m quite excited to understand how to use camera data to engage the audience.

Yoshi Torralva – Looking Outwards – 11

Front view of spider dress 2.0
Video of Spider Dress 2.0 taking shape

Anouk Wipprecht is a Dutch fashion designer that is at the forefront of exploring the intersection between human-centered technology and couture. Through her practice as a fashion designer, she creates designs that are both reactive to the wearer’s personal and external environments. In this looking outwards blog post, I will be focusing on the Spider Dress. The spider dress is made out of 3D printed parts, motors, sensors, and an Intel Edison. Through these methods, the dress is reactive to people that approach the wearer. Depending on the external figure’s speed and the wearer’s data, the spider’s legs will jolt out fast or slow, depending on the situation. I admire how her designs push fashion in the direction of becoming reactive pieces of clothing that take their own personality and heighten the wearer’s own as well.

Yoshi Torralva-Generative Landscape

sketch

//Yoshi Torralva
//yrt@andrew.cmu.edu
//Section-E
//Project-11-Generative-Landscape 
var runningTree = [];

function setup() {
    createCanvas(480, 480); 
        //placing trees on first canvas frame
        for (var i = 0; i < 10; i++){
        var rx = random(width);
        runningTree[i] = runningTreeObject(rx);
    }
    frameRate(8);
}
function draw() {
    background(18, 36, 64); 
    fill(255, 240, 186);
    ellipse(100, 200, 100, 100)
    fill(18, 36, 64); 
    ellipse(120, 190, 80, 80)
    fill(74, 74, 7);
    //back horizon line
    rect(0, 400, width, 200);
    //adding functions to move trees across canvas
    updateRunTree();
    removeTree();
    addingTrees(); 
    //front horizon line
    fill(51, 54, 1);
    rect(0, 420, width, 200);
}
//updating tree movement
function updateRunTree(){
    for (var i = 0; i < runningTree.length; i++){
        runningTree[i].move();
        runningTree[i].display();
    }
}
//deleting trees after leaving canvas
function removeTree(){
    var keepTreeLoop = [];
    for (var i = 0; i < runningTree.length; i++){
        if (runningTree[i].x + runningTree[i].widthTree > 0) {
            keepTreeLoop.push(runningTree[i]);
        }
    }
    runningTree = keepTreeLoop;
}
//adding new trees
function addingTrees() {
    var randomTreeAdding = 0.1; 
    if (random(0,1) < randomTreeAdding) {
        runningTree.push(runningTreeObject(width));
    }
}
//moving the tree everytime it is redrawn
    function movingTree() {
    this.x += this.speed;
}
// draw the building and some windows
function Tree() {
    var minHeightOfTree = 60;
    var treeHeight = this.heightOfTree * minHeightOfTree; 
    noStroke(); 
    push();
    //moving bases to the bottom horizon line 
    translate(this.x, 420);
    //tree stumps
    fill(this.wood); 
    rect(0, -treeHeight, this.widthTree, treeHeight);
    //greenery of the tree
    //variations of green called from the tree object
    fill(this.colors);
    ellipse(random(10,20), -treeHeight + random(10,15), treeHeight, treeHeight);
    //for loop made to show motion of trees in the grass
    //10 opaque variations of the dust from running trees
    for (var i = 0; i < 10; i++) {
        fill(20,0,0,30);
        noStroke();
        //random location not made in object as it redraws
        ellipse(random(10, 50), random(10,50), this.scaleOfGreens, this.scaleOfGreens);
    }
    pop();
}
function runningTreeObject(startX) {
    var object = {x: startX,
        widthTree: 20,
        speed: -5.0,
        //multiply to randomize height of tree
        heightOfTree: round(random(1, 20)),
        //size of tree bush
        scaleOfGreens: round(random(100,300)),
        move: movingTree,
        display: Tree,
        //varied green color
        colors: randomColor(),
        //varied wood color
        wood: randomWoodColor()
        }
    return object;
}
//color of leaves
function randomColor() {
    return [Math.floor(random(0)), Math.floor(random(30,100)), Math.floor(random(10,20))]
}
//varied color of wood
function randomWoodColor() {
    return [Math.floor(random(20,50)), Math.floor(random(0,20)), Math.floor(random(0))]
}

Initial sketch of Generative Landscape

With this project, I wanted to generate a landscape that would give dynamic motion to actual elements in the landscape. I decided to give movement to the trees as if they were running on the ground. I added varied opaque clouds that show trailing dirt clouds. I placed made the background night time with a moon to depict the trees running in the night.

Yoshi Torralva-Looking Outwards-10

Album cover of Classical Music Composed by Computer: Experiments in Music Intelligence

On the topic of computer-generated music, I found this album by David Cope titled Classical Music Composed by Computer: Experiments in Musical Intelligence created in 1997 A professor from the University of California at Santa Cruz, he started as a trained musician but found a keen interest in the world of computing as it rose in popularity. David Cope realized how his musical approached paralleled that of programing. At that point in his life, he discovered the opportunity to explore where music meets computing. Eventually, Cope decided to create a program called the Experiments in Music Intelligence. The program would generate music based on data collected by various scores and even Cope’s music. In the album Classical Music Composed by Computer: Experiments in Musical Intelligence, the program generated the sheet music that the musicians would play from that would be recorded. What I admire about this work is how it finds a balance between human-made sound and generative computing. At its final stage, the song is made by an instrument, but the original song is derived from a computer program.

Yoshi Torralva-Project-10-Sonic-Sketch

sketch

//Yoshi Torralva
//yrt@andrew.cmu.edu
//Section E
//Project-10-Sonic-Sketch
var gOpen;
var eOpen;
var dOpen;
var aOpen;

function preload() {
    // call loadImage() and loadSound() for all media files here
    gOpen = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/11/gstring.wav");
    eOpen = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/11/estring.wav");
    dOpen = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/11/dstring.wav");
    aOpen = loadSound("https://courses.ideate.cmu.edu/15-104/f2019/wp-content/uploads/2019/11/astring.wav");
}
function setup() {
    createCanvas(480, 480);
    useSound();
}
function soundSetup() {
    //adjusting the volume
    //lowering a and e as they are higher notes
    gOpen.setVolume(1);
    eOpen.setVolume(0.1);
    dOpen.setVolume(1);
    aOpen.setVolume(0.2);

}


function draw() {
    // you can replace any of this with your own code:
    background(245, 229, 215);
    noStroke();
    //violin body
    fill(150, 74, 12);
    ellipse(width/2, 400, 400, 400);
    fill(245, 229, 215);
    ellipse(50, 525, 120, 200);
    fill(245, 229, 215);
    ellipse(430, 525, 120, 200);
    //violin finger board
    fill(0);
    rectMode(CENTER);
    rect(240, 0, 100, 800);
    //strings
    fill(61, 51, 42);
    rectMode(CORNER);
    rect(210, 0, 4, 480);
    rect(230, 0, 4, 480);
    rect(250, 0, 4, 480);
    rect(270, 0, 4, 480);
    //violin bow
    //visual indicator to show over the strings 
    rectMode(CENTER);
    fill(51, 28, 9);
    rect(mouseX, mouseY, 480, 20);
    //creating constraints for start and stop of each string
    //slight overlap between some to have better transitions
    //using start and stop commands to the strings
    if(mouseX > 200 & mouseX < 225) {
        gOpen.play();
        }else{ gOpen.stop();
    }
    if(mouseX > 220 & mouseX < 240) {
        dOpen.play();
        }else{ dOpen.stop();
    }
    if(mouseX > 240 & mouseX < 265) {
        aOpen.play();
        }else{ aOpen.stop();
    }
    if(mouseX > 265 & mouseX < 280) {
        eOpen.play();
        }else{ eOpen.stop();
    }

}

With this project using sound, I wanted to create an instrument. So, I decided to choose a violin. In this project, I represent the violin in a simple manner and have it close up. I initiate the sound of open G, D, A, and E strings by the mouseX position. When the mouseX position is not in bounds of the parameters, the sound stops. Slight overlaps in the parameters are used to create better sounding transitions between open strings.

Yoshi Torralva—Looking Outwards—09

eCloud installation in the San José Airport
Schematics of the eClouds for the San José Airport
Tiles used that can go from transparent to opaque.

For this looking outwards post, I found Joseph Zhang’s looking outwards post on the eCloud for data visualization quite interesting. eCloud is a physical representation of weather conditions in the current location. On the side of the walkway, different computer-generated Created in 2010 and a permanent installment in the San Jose Airport, eClouds are generated from other sites on a screen. I want to add to Joseph’s comments on the eCoud to how this physical data-visualization of weather data improves the overall space. As a whole, the opaque tiles create a feeling of direction, leading people to move across the terminal. Additionally, I admire how unobtrusive it is to both allow people to take a glance at the screen to understand the meaning of the formation of tiles but also move quickly through space. At first, it looks as if there is no technology equipped to these tiles, but through live-imputed data, it slightly shifts from transparent to opaque.

Yoshi Torralva—Project 9—Portrait

sketch

//Yoshi Torralva
//yrt@andrew.cmu.edu
//Section E
//Project—09—Portrait
var underlyingImage;

function preload() {
    var myImageURL = "https://i.imgur.com/htGpgok.jpg";
    underlyingImage = loadImage(myImageURL);
}

function setup() {
    createCanvas(480, 480);
    background(0);
    underlyingImage.loadPixels();
}

function draw() {
    //variables to randomize location
    var px = random(width);
    var py = random(height);
    var ix = constrain(floor(px), 0, width-1);
    var iy = constrain(floor(py), 0, height-1);
    var theColorAtLocationXY = underlyingImage.get(ix, iy);
    frameRate(360);

    //large border that indicates which pixel is drawn
    // rectangles redrawn at different angles that creates circular border
    push();
    rectMode(CENTER);
    translate(240, 240);
    noFill();
    stroke(theColorAtLocationXY);
    strokeWeight(1);
    rotate(random(0, 10));
    rect(10, 10, 460, 460);
    pop();

    //light beams formed by random length
    noStroke();
    fill(theColorAtLocationXY);
    //shorter beams for greater quality 
    rect(px, py, random(1), random(4, 50));
    //longer beams for greater expression 
    rect(px, py, random(1), random(50, 100));

    //placed on top layer to create a more visible image
    //end points of the beams
    push();
    rectMode(CENTER);
    rect(px, py, 3, 3);
    pop();
}

start of the portrait
in the middle of the generation of the portrait
nearing the end of the generative portrait
Self-portrait

With this project, I wanted to use an image that was simple which would allow me to have an increased focus on the formulation of forms. I decided to use this self-portrait photo from the mattress factory as it only relies on black and blue. I was inspired by light beams and achieved this through randomly sized length rectangles with squares placed on the top of each randomized length rectangle. To add an additional element, I placed a rotating square that formed a circular border informed by called pixel colors.

Yoshi Torralva-Looking Outwards-08

Jennifer Pahlka’s talk about the importance of designing better experiences in government.
Posters created to involve people in the city of Philadelphia to participate in polling using a texting feature.

Jennifer Pahlka is the executive director of Code for America. Code for America is an organization that works to tackle government-related issues surrounding experiences through design and technological lense. Jennifer Pahlka has a lifelong goal of impacting government operations to become human-centered. From 2013-2014, she served as the U.S. department Cheif of Technology officer, where she was able to change on a national level. In her Eyeo talk, she talks about her work in Coding for America. Through this organization, they select applicants from cities to work on developing a point of intervention that impacts government functions through design and technology. What I admire about Jennifer’s work is how her observations move her to start new initiatives. For instance, she mentions that she listened to someone’s comment about making our interactions with the government easy and clear. Through this talk, she was motivated to develop systems that met that statement. Jennifer also puts an essential consideration of the cost and efficiency of the production of specific interventions. She emphasizes that her fellows should consider the process of development. In focus to her presentation of her work, she explores her thought process in a transparent manner and how it answers her overall design brief. One project that I admire is involving members of the city of Philadelphia to poll in opinions to be used for town halls. Ultimately, it achieves the goal of making it more accessible for someone’s voice to be heard.