alchan-Looking Outwards 05

I’ve really enjoyed the small 3d-modeled pieces created by Agatha Yu as part of a year-long, “make something everyday” project (the particular piece pictured is “Crystal Shore,” from February 21, 2017). They’re mostly modeled and animated in Blender. The pieces are usually low-poly or made of smooth, simple forms, which work well with the soft color palettes the creator normally uses. I think these aspects, as well as the subject matter, combine to create a specific, calming mood that really drew me into each of the pieces. I also appreciate how the style of the work is not necessarily aiming for photorealism or some other theme normally associated with computer-generated graphics, but is rather branching out in a style of its own.

rmanagad-lookingoutwards-05-sectionE

Creator: Jing Zhang

Title of Work: Ford Fiesta

Year of Creation: 2016

Link to Project Work: http://www.mazakii.com/Ford-Fiesta

Link to Artist Bio: http://www.mazakii.com/About-Contact


Ford Fiesta is a commissioned illustration created by London-based Chinese designer, Jing Zhang. To develop this work, Zhang used 3/D rendering programs in Adobe Illustrator, modifying two-dimensional vector images (such as rounded rectangles) and putting them into isometric perspective. To animate the work, Zhang used keyframes in Adobe AfterEffects. Through loop-based algorithms, Adobe Illustrator creates vector-based elements that can be modified perspective-wise through the x, y, and z-axes.

As a designer focused on communications, Jing Zhang’s entire body of work serves as an influence in my work. I’m fascinated in what factors contribute to the believability and messages of two-dimensional images, as well as how concepts can be delivered and nuanced. The Ford Fiesta illustration is an example of this — Zhang emphasizes the car by shrinking the proportions of the architecture and using contrasting colors, which leads to our engagement with the images present in the animation.

Ford Fiesta — Jing Zhang

rmanagad-Project04-StringArt-SectionE

sketch

//Robert Managad
//Section E
//rmanagad@andrew.cmu.edu
//Project-04

var ellipX1 = 0; //x variable of outer ellipses
var ellipX2 = -82; // x variable of inner ellipses
var ellipY1 = -88; // y variable of outer ellipses
var ellipY2 = 0; // y value of inner ellipses


function setup() {
    createCanvas(300, 400);
}

function draw() {
	background(255);
	noStroke();
	for (var i = 0; i < 24; i++) { // creates 24 iterations of ellipses and lines
		drawCircle1(i); // created a function below for drawing each ellipse
	}
	//parabolic organism in the center
    var startpointX = 100; // initial outer constraints
    var endpointX = 200;
    var dotX1 = lerp(startpointX, endpointX, .125);
    var dotX2 = lerp(startpointX, endpointX, .25);
    var dotX3 = lerp(startpointX, endpointX, .375);
    var dotX4 = lerp(startpointX, endpointX, .625);
    var dotX5 = lerp(startpointX, endpointX, .75);
    var dotX6 = lerp(startpointX, endpointX, .875);

    var startpointY = 150 // initial outer constraints
    var endpointY = 245
    var dotY1 = lerp(startpointY, endpointY, .125);
    var dotY2 = lerp(startpointY, endpointY, .25);
    var dotY3 = lerp(startpointY, endpointY, .375);
    var dotY4 = lerp(startpointY, endpointY, .625);
    var dotY5 = lerp(startpointY, endpointY, .75);
    var dotY6 = lerp(startpointY, endpointY, .875);

    var yconver = constrain(mouseY, 150, 245); // modifies shape of organism based on mouse position
    var xconver = constrain(mouseX, 100, 200);

    //application of variables
    stroke(0);
    strokeWeight(2);
    line(dotX1, yconver, xconver, dotY3);
    line(dotX2, yconver, xconver, dotY2);
    line(dotX3, yconver, xconver, dotY1);
    line(dotX4, yconver, xconver, dotY1);
    line(dotX5, yconver, xconver, dotY2);
    line(dotX6, yconver, xconver, dotY3);
    line(dotX1, yconver, xconver, dotY4);
    line(dotX2, yconver, xconver, dotY5);
    line(dotX3, yconver, xconver, dotY6);
    line(dotX4, yconver, xconver, dotY6);
    line(dotX5, yconver, xconver, dotY5);
    line(dotX6, yconver, xconver, dotY4);

}
		
function drawCircle1(dotcount) {
	push();
	translate(width/2, height/2); // translation to center of canvas
	rotate(radians(dotcount * 15)); // rotates about the center -- 24 count of ellipses.
	rotate(mouseY);
	fill(0)
	ellipse(ellipX1, ellipY1, 8, 8);
	pop();
	push();
	translate(width/2, height/2);
	rotate(radians(dotcount * 15));
	rotate(mouseY);
	fill(0)
	ellipse(ellipX2, ellipY2, 4.75, 4.75);
	pop();
	push();
	translate(width/2, height/2);
	rotate(radians(dotcount * 15));
	rotate(mouseY);
	stroke(0);
	strokeWeight(2);
	line(ellipX1, ellipY1, ellipX2, ellipY2); // lines follow ellipses.
	pop();
}

I was inspired by the form and movement of birds during the initial ideation phase of my project, as well as animal nests. In thinking about these two concepts, I developed an interactive mouse-based string work that changes the “form” of the organism inside of the parabolic nest. As always, I used Illustrator to determine my composition (and widths, heights, coordinates) prior to starting the code.

 

mecha-project04-string-art

sketch

//global variables that hold x and y position as well as the amount that is added to both values
//(different x and y variables per function)

var x1 = -100;
var y1 = 0;
var x1Step = 2;
var y1Step = 4;

var x2 = 400;
var y2 = 0;
var x2Step = 2;
var y2Step = 2;

var gradient = 0;
var gradient2 = 256;
var gradient3 = 256;

var x11 = 0;
var x22 = 400;
var y11 = 300;
var y22 = 0;
var x11Step = 0.5;
var y11Step = 1;
var x22Step = 0.5;
var y22Step = 1;

function setup() {
    createCanvas(400,300);
}

function draw() {
    //makes background change color value
    background(gradient3--);
    //calls functions that produce lines
    lines();
    lines2();
    lines34();
}

//lines converging in center
function lines(){
    var x1constrain = constrain(mouseX,-100,0);
    strokeWeight(0.8);
    for(var i=0;i<1000;i+=10){
        stroke(gradient+i);
        line(x1constrain+i, 0, 0, y1-i);

    }
    x1+=x1Step;
    y1+=y1Step;
}

//lines converging in center
function lines2(){
    var y2constrain =constrain(mouseX,-100,0);
    strokeWeight(0.8);
    for(var i=0;i<1000;i+=10){
        stroke(gradient2-i);
        line(x2-i, 300, 400, y2constrain+i);

    }
    x2-=x2Step;
    y2+=y2Step;
}

//slower corner lines
function lines34(){
    //increases i at a faster rate so as to draw less lines
    for(var i=0;i<1000;i+=30){
        stroke(gradient+i*1.2);
        line(x11+i,0,0,y11-i);
        stroke(gradient+i*1.2);
        line(x22-i,300,400,y22+i);
    }
    //changes x11 and y22 values so that it creates a curve
    x11+=x11Step;
    y11-=y11Step;
    x22-=x22Step;
    y22+=y22Step;
}

For this project, I decided that I wanted to play with adding motion into my string art. I started with figuring out the values in order to make the first loop of lines in the top left corner and used my experimentation with that in order to mirror the lines into the bottom right corner.

While I thought that the two looked interesting together, I decided that rather than create two more loops for the other corners, I wanted to create two new series of lines that would start from the same corner but act in a very different way. While playing around with that concept, I came up with the code below.

sketch

//lines1 variables
var x1=0;
var x1Step=0.5;
var y1=300;
var y1Step=1;
var x2=400;
var x2Step=1;
var y2=0;
var y2Step=1;
var gradient=0;

var y3=100;
var y3Step=0.5;
var x3=1000;
var x3Step=1;
var y4=400;
var y4Step=1;
var x4=400;
var x4Step=1;


function setup() {
    createCanvas(400,300);
}

function draw() {
    background(255,200,200);
    lines1();
}

function lines1(){
    if(y1 >0 & y1 < 300){
    for(var i=0;i<1000;i+=20){
        strokeWeight(0.8);
        //gradient black to white lines
        stroke(gradient+i*1.2);
        line(x1+i,0,0,y1-i);
        //teal lines
        stroke(gradient+i*1.2);
        line(x2-i,300,400,y2+i);
    }
}
     for(var i=0;i<1000;i+=60){
        strokeWeight(0.8);
        stroke(0);
        line(0,y3+i,x3-i,0);
        stroke(256);
        line(0,y4-i,x4+i,0);
    }
    x1+=x1Step;
    y1-=y1Step;
    x2-=x2Step;
    y2+=y2Step;
    x3+=x3Step;
    y3-=y3Step;
    x4-=x4Step;
    y4+=y4Step;

}

Although I found that I was able to satisfy the requirements of the project, I decided that I was not satisfied with the inconsistent ways that the lines would move, so I restarted and played around with changing different variables. Through more experimentation, I was able to come up with my final code, which appeared more sophisticated in terms of the spacing of the grid it created as well as the monochrome colors.

agusman-Project04-StringArt

function setup() {
  createCanvas(windowWidth, windowHeight);
  background(60);

}

function draw() {
  fill(50);
  rect(0,0,windowWidth,windowHeight);
  // for(var j = 0; j < windowHeight ; j=j+20){
    for(var i = 0; i < windowWidth; i=i+10){
      from = color(200, 200, 32);
      to = color(72, 61, 200);
      for(var i=0; i < windowWidth; i++){
        var lerp = map(i, 0, windowWidth, 0, 1);
        stroke(lerpColor(from, to, lerp));
      line(0+mouseX,i+mouseY,i+cos(radians((i*5)%360))+50, windowHeight/2+sin(radians((i*5)%360))*5);
    }
   }
}
function windowResized() {
  resizeCanvas(windowWidth, windowHeight);
}

dayoungl – LookingOutwards 04

http://www.norment.net/work/sonic-performance-ind/rapture/

Camille Norment is an Oslo-based multimedia artist who works with sound, installation, sculpture, drawing, performance and video. For her piece, “Rapture”, she utilizes 12 voice chorus, glass armonica tones, and audio devices. According to the artist, ‘Rapture’ is a three-part project consisting of 1) a solo exhibition in the Nordic Pavilion, 2) a series of sonic performances unfolding from physical and referential elements within the installation, and 3), a three-part publication that explores the greater context of the investigation through a variety of essays relating sound to the body and society. Mainly, the artist explores the tension of constantly changing time and uncertainty of times. ‘Rapture’ uses vibrations (sound waves) for a phenomenological and socio-political reflection upon the body and mind’s relationship to trauma, ecstasy and the state of becoming. The body of the pavilion itself is subject to this experience, allowing visitors to witness, and enter within a body suspended in a state of excitation. Through this piece, visitors walk into a space of contrasting values: body of art in order and in chaos; art in harmony and in dissonance.

adev_Looking Outwards 04_Sound Art

Intersection, 1993

Don Ritter

This art installation has a lot of interesting things going on in it. It is essentially a group of eight speakers, spread out to create four “lanes” in a dark room with no light. The sound is of cars rushing past and when a visitor walks between these speakers, or in one of these lanes, these cars react and come to a screeching halt. If a visitor stays stationary for longer than eight seconds, more cars pile up and then zip off once the visitor leaves.

I think this is extremely thought provoking. It uses sound to create this feeling of rush and fear from car collision experience, one we imagine to be more about our physiological, visual experiences. It uses sound to affectively get that feeling across. It also provides this dark room where this interaction takes place, something that feeds into the fear while also making the experience much more visceral. I think the process makes you and other people so much more aware of you, in spite of the darkness. The screeching of a car is such a convergence of lives and people. In the moment of a car halt, we recognise each other, a very human feature of our inner nature comes out, its this strange act of survival in one’s everyday life, an average red flag.

Sheenu-Looking Outwards-04


Seaquence is a game on the app store that allows you to spawn creatures that produce music by themselves. The creature’s shape determines how a sound is made. Adding notes or making musical sequences and even changing the waveform of sounds alters how the creature looks. A variety of different creatures can form an ensemble of music with just a tap of a button.

The game was developed by a company called Okaynokay and evolved based on a flash game years ago. According to the creators, the game was made and designed using a custom programmed physics engine which allows the creatures to move on their own based on their notes and give the player a game-like experience.

The game has a dedicated community consisting of both regular individuals and professional musicians. So far, over 300,000 songs have been made on Seaquence.

I really admire the fact that people can now make music with an easy-to-use and fun game. Making music is a long and hard process, but with this game many average and ordinary people can now create their own compositions and get inspired by other creators out there. Making a process that is long and complicated easier and more accessible to the public truly makes this game special in my opinion.

agusman-LookingOutwards-04

The Classifier on Creative Applications

The Classifier

Creators: Benedict Hubener, Stephanie Lee, Kelvyn Marte, Andreas Refsgaard and Gene Kogan

The Classifier is an AI paired with a table interface that selects music based on the beverages people are consuming around it at that time. The system is comprised of a microphone that takes in the ambient noise of drinking the beverage and an AI that compares the characteristic sounds to pre-trained models. The current limitation of the models is that they are constrained to three categories: hot beverages, wine or beer. The approach they take in trying to characterize the beverages by their distinctive noises is really unique, yet I have to wonder what other sensors or methods they could’ve used to collect more… telling… kinds of information, such as heat, color, carbonation- which could possibly lead to expanding the beverage categories from the current three.

I’ve also always been fascinated by generating contextual music. In my time at CMU, I’ve come across several student projects who’ve sought to use visual and other sensory input as the lens through which music is created. A friend of mine tried to recreate the sound track of a classic episode of “Tom & Jerry” just purely through a series of frames. Seems like a fascinating and incredibly enigmatic field of study that I would love to pursue.

dayoungl Project 04

sketch

//Sharon Lee
//dayoungl@andrew.cmu.edu
//Section E
//Project 4
var ychange = 30;
var xchange = 10; 
var x1 = 1;
var x2 = 150;
var x3 = 100;
var y1 = 200;
var y2 = 1;
var y3 = 55;
var r;
var g;
var b;
function setup(){
	//randomzied colour
	r = random(255);
	g = random(255);
	b = random(255);
	createCanvas(300,400);	
}

function draw() {
	background (255);
	for(var i = 0; i < 100; i +=1){
		strokeWeight(0.5);
		stroke(0);
		line(x2, mouseY, 300 , ychange *i);
		line(x2, mouseY, 0 , ychange *i);
		stroke(r,g,b);
		strokeWeight(1);
		line(mouseX, ychange * i, xchange * i, y2); 
		line(x2, y1, 300, ychange *i);
		line(x2, y1, 0, ychange * i);
	}
}

//when mouse is clicked, string changes colour
function mousePressed (){
	stroke(r,g,b);
	r = random(255);
	g = random(255);
	b = random(255);
}

I wanted to make my string art minimal as possible because I like minimalistic art. I like the fact that this looks like a graphic poster.