//Brandon Darreff
//Section A
//bdarreff@andrew.cmu.edu
//Project-05
var bl; //background line x positions
var cv; //bezier curve y positions
var lcir; //large circle diameter
var scir; //small circle diameter
var cy; //circle y position
var cx; //circle x position
function setup() {
createCanvas(500, 700);
}
function draw() {
background(25);
for(bl = -100; bl < width; bl += 50) { //background white slanted lines
stroke(255, 40);
strokeWeight(1);
line(bl, 0, bl + 100, height);
}
for (var cv = -50; cv < height + 50 ; cv += 45) { //background bezier curves
noFill();
stroke(200, 50);
strokeWeight(random(0.25, 2));
bezier(0, cv - 50, 150, cv - 160, 300, cv + 190, width, cv);
stroke(231, 181, 144, 70);
strokeWeight(random(0.25, 5));
bezier(0, cv, 150, cv - 150, 300, cv + 200, width, cv - 50);
}
for (cy = 95; cy < height; cy += 170){ //large foreground circles
for (var z = 100; z < width; z += 150) {
for (lcir = 15; lcir <= 125; lcir *= 1.1) {
strokeWeight(1);
fill(255, 5);
ellipse(z, cy, lcir, lcir);
}
}
}
for (cy = 140; cy < height; cy += 170){ // circles bottom right quadrant
for (cx = 130; cx < width; cx += 150) {
for (scir = 15; scir <= 50; scir *= 1.1) {
ellipse(cx, cy, scir, scir);
}
}
}
for (cy = 50; cy < height; cy += 170){ // circles upper left quadrant
for (cx = 70; cx < width; cx += 150) {
for (scir = 15; scir <= 50; scir *= 1.1) {
ellipse(cx, cy, scir, scir);
}
}
}
noLoop();
}
My initial idea for this project was to explore arraying flowers across the canvas but as I kept working I became more interested in using geometric elements. The resultant wallpaper is a more contemporary version of my starting sketch.
]]>
One of many explorations of lighting effects through projection
With his work “Unfold 01,” visual artist Can Buyukberber utilizes a combination of lighting effects and 3-D computer graphics to abstract the process of morphogenesis. I admire the way Buyukberber is able to present a scientific process related to the development of an organism in a beautiful way using powerful imagery that starts to read like a time-lapse. With this project in particular, he began with a 3-d model of a child’s face in Maxon Cinema 4D Studio and utilized cloning tools within animation modules of the software to array the base curves along a circular path. He then lofted surfaces between the curves and used the tool Atom Array to create spheres around vertices of the curves. Lastly he played with the illumination levels and shadows while rendering images to experiment with the effects of light on the 3-D model resulting in a variety of images of the same object. In this project, Buyukberber successfully communicates his interest in patterns and intelligence within nature by abstracting his perception of biological processes.
Unfold 01 from Can Buyukberber on Vimeo.
]]>//Brandon Darreff
//Section A
//bdarreff@andrew.cmu.edu
//Project-04
var diam; //circle diameters
var h; //ribbon flared ends control
var x; //horizontal ribbon driver
var y; //vertical ribbon driver
var sd = 40; //starting diameter of circles
function setup() {
createCanvas(480, 640);
}
function draw() {
background(23, 51, 68);
noFill();
for(diam = sd; diam <= 260; diam *= 1.1) { //background circle left
stroke(75);
strokeWeight(1);
ellipseMode(CENTER); //modify ellipse drawing origin
ellipse(width / 2.4, height / 2.667, diam, diam);
}
for(diam = sd; diam <= 250; diam *= 1.1) { //background circle right
stroke(75);
ellipse(width / 1.37, height / 1.44, diam, diam);
}
h = 50; //assign flared end control value
for (y = 200; y <= 400; y += 10) {
stroke(200, 91, 111); //vertical ribbon color backdrop
strokeWeight(.5);
bezier(y + 80, 0, h + 5, 575, 450, 280, y + 75, height);
stroke(220);
strokeWeight(0.75); //vertical ribbon white overlay
bezier(y + 100, 0, h, 550, 500, 270, y + 10, height);
h -= 15 //flared end multiplier
}
for (x = 50; x <= 250; x += 15) {
stroke(200, 91, 111); //horizontal ribbon color backdrop
strokeWeight(0.35);
bezier(0, x - 30, 125, 305, 950, 300, 0, x + 370);
stroke(200); //horizontal ribbon white overlay
strokeWeight(0.25);
bezier(0, x - 50, 125, 285, 950, 300, 0, x + 350);
h -= 12; //flared end multiplier
}
for(diam = sd; diam <= 230; diam *= 1.05) { //white circle left
stroke(255);
strokeWeight(1);
ellipse(width / 2.133, height / 2.783, diam, diam);
}
for(var diam = sd; diam <= 150; diam *= 1.05) { //white circle right
stroke(215);
ellipse(width / 1.574, height / 1.471, diam, diam);
}
}
With this project I explored using arrayed curves to mimic ribbons across the defined canvas. By focusing in on the intersection points of the two ribbons using ellipses, the intent was to create depth using tone and varying line weights.
]]>With his 2016 project “re rain”, designer Kouichi Okamoto experimented with non-visual elements to invert the way we perceive the sound of rain. Fifteen speakers were placed at the base of open umbrellas to project the sound of rain hitting the top of an umbrella to the underside of the open umbrellas. In other words, the installation takes sound typically generated from a convex surface and applies it to the concave side of the same surface allowing the generated sound waves to take on the role as the rain. Just as an umbrella deflects rainwater, it redirects the sound waves throughout the room as well as vibrates from the interaction as it would if exposed to real raindrops. I admire the way Okamoto experiments with morphing such a common distinctive sound, giving it a new identity, as well as the way he gives a sort of physical presence to the sound waves. In terms of the possible algorithms behind this installation, Okamoto had to experiment with balancing the magnetic force of the speaker, the weight of the umbrella, and the pitch of the recorded sound to achieve the desired effect. By testing a range of options using these three variables, Okamoto was able to produce a simple yet elegant representation of sound art.
http://www.kyouei-ltd.co.jp/re-rain.html
]]>//Brandon Darreff
//Section A
//bdarreff@andrew.cmu.edu
//Project-03
function setup() {
createCanvas(600, 400, WEBGL);
}
function draw() {
var orbitMe = frameCount * 0.001; //Mercury orbit speed
var orbitV = frameCount * 0.0001; //Venus orbit speed
var orbitE = frameCount * 0.01; //Earth orbit speed
var orbitM= frameCount * 0.01; //Mars orbit speed
var orbitJ = frameCount * 0.07; //Jupiter orbit speed
var orbitS = frameCount * 0.01; //Saturn orbit speed
var orbitU = frameCount * 0.05; //Uranus orbit speed
var orbitN = frameCount * 0.05; //Neptune orbit speed
var res = 100 //resolution of spheres
background(0);
//sunlight tracked to mouse position
var sunlightY = (mouseY / height - 0.5) * (-2);
var sunlightX = (mouseX) / width - 0.5;
pointLight(250, 250, 250, sunlightX, sunlightY, 0);
//Mercury
translate(-width, -350);
push();
//orbit
rotateZ (orbitMe);
rotateX (orbitMe);
rotateY (orbitMe);
//surface color
ambientMaterial(100);
sphere(20, res);
pop();
//Venus
translate(height / 4, height / 4);
push();
//orbit
rotateZ (orbitV);
rotateX (orbitV);
rotateY (orbitV);
//surface color
ambientMaterial(174, 103, 58);
sphere(30, res);
pop();
//Earth
translate(height / 4, height / 4);
push();
//orbit
rotateZ (orbitE);
rotateX (orbitE);
rotateY (orbitE);
//surface color
ambientMaterial(32, 67, 100);
sphere (35, res);
pop();
//object orbiting earth
push();
ambientMaterial(255, 30);
rotateY(orbitE);
rotateX(orbitE)
ellipse(-87, 90, 200, 100, 100, 0);
pop();
//Mars
translate(height / 4, 70);
push();
//orbit
rotateZ (orbitM);
rotateX (orbitM);
rotateY (orbitM);
//surface color
ambientMaterial(131, 51, 44);
sphere(25, res);
pop();
//object orbiting mars
push();
ambientMaterial(255, 30);
rotateZ(orbitE);
ellipse(-87, 90, 100, 80, 100, 0);
pop();
//Jupiter
translate(200, 120);
push();
//orbit
rotateZ (orbitJ);
rotateX (orbitJ);
rotateY (orbitJ);
//surface color
ambientMaterial(199, 142, 87);
sphere(150, res);
pop();
//object orbiting jupiter
push();
ambientMaterial(255, 30);
rotateX(orbitJ);
ellipse(-87, 90, 200, 100, 100, 0);
pop();
//Saturn
translate(260, 120);
push();
//orbit
rotateZ (orbitS);
rotateX (orbitS);
rotateY (orbitS);
//surface color
ambientMaterial(210, 169, 121);
sphere(120, res);
pop();
//object orbiting saturn
push();
ambientMaterial(255, 30);
rotateX(orbitS);
rotateY(orbitS);
ellipse(-87, 90, 500, 100, 100, 0);
pop();
//Uranus
translate(210, 75);
push();
//orbit
rotateZ (orbitU);
rotateX (orbitU);
rotateY (orbitU);
//surface color
ambientMaterial(104, 179, 205);
sphere(70, res);
pop();
//Neptune
translate(180, 75);
push();
//orbit
rotateZ (orbitN);
rotateX (orbitN);
rotateY (orbitN);
//surface color
ambientMaterial(97, 136, 202);
sphere(70, res);
pop();
//object orbiting neptune
push();
ambientMaterial(255, 25);
rotateX(orbitN);
rotateY(orbitN);
rotateZ(orbitN);
ellipse(-87, 90, 300, 100, 100, 0);
pop();
}
With this project I focused on creating a dynamic drawing which mimics changing sunlight on spheres meant to represent planets. Each planet rotates around its own axis at a rate derived from its respective actual rotation speed and additional objects are set into motion around various planet axises. The sunlight projected onto the spheres is tracked by the position of the mouse to add another dynamic variable to the overall image.
]]>I appreciate the amount of complexity and depth Jean Pierre Hebert was able to achieve in his piece “olive branch,” completed in 1990 utilizing a single drawing instrument and line weight. Hebert, an Algorist and pioneer of digital art, programs printing devices such as custom-built devices and plotters using computer codes he has developed himself to produce incredibly detailed drawings such as this one. By combining programming languages and simple mathematics, he successfully bridges both digital and analog mediums in a way that transforms how we think about art. Overall, it easy to hypothesize how his method of creating artwork has greatly influenced digital art practices in use today.
]]>//Brandon Darreff
//Section A
//bdarreff@andrew.cmu.edu
//Project-02-Variable-Face
//left figure hair color
var r1 = 199;
var g1 = 161;
var b1 = 87;
//right figure hair color
var r2 = 152;
var g2 = 136;
var b2 = 215;
//left eye size
var eyeWidth1 = 12;
var eyeHeight1 = 10;
//right eye size
var eyeWidth2 = 10;
var eyeHeight2 = 12;
//left mouth size
var mouthWidth1 = 20;
var mouthHeight1 = 10;
//right mouth size
var mouthWidth2 = 15
var mouthHeight2 = 8
//setup canvas size
function setup() {
createCanvas(640, 480);
}
//fill background with color and design
function draw () {
background (112, 169, 169);
stroke(200);
strokeWeight(2);
noFill();
ellipse(200, 250, 300, 300);
ellipse(350, 175, 200, 200);
ellipse(550, 350, 100, 100);
noStroke();
//head & neck left
fill(231, 200, 163);
beginShape();
vertex(125, 350);
curveVertex(115, 400);
curveVertex(135, 310);
curveVertex(135, 290);
curveVertex(120, 260);
curveVertex(115, 230);
curveVertex(125, 200);
curveVertex(160, 170);
curveVertex(190, 170);
curveVertex(230, 190);
curveVertex(238, 210);
curveVertex(230, 230);
vertex(240, 270);
vertex(230, 280);
curveVertex(230, 310);
curveVertex(200, 310);
curveVertex(190, 330);
curveVertex(180, 400);
vertex(125, 350);
endShape();
//head & neck right
beginShape();
vertex(530, 350);
curveVertex(540, 400);
curveVertex(520, 310);
curveVertex(520, 290);
curveVertex(535, 260);
curveVertex(540, 230);
curveVertex(530, 200);
curveVertex(495, 170);
curveVertex(465, 170);
curveVertex(425, 190);
curveVertex(417, 210);
curveVertex(425, 230);
vertex(415, 270);
vertex(425, 280);
curveVertex(425, 310);
curveVertex(455, 310);
curveVertex(465, 330);
curveVertex(475, 400);
vertex(530, 350);
endShape();
// shirt left
fill(255);
beginShape();
vertex(80, 480);
curveVertex(80, 480);
curveVertex(78, 420)
curveVertex(100, 360);
curveVertex(120, 350);
curveVertex(150, 360);
curveVertex(190, 350);
curveVertex(200, 370);
curveVertex(220, 400);
curveVertex(210, 480);
vertex(220, 480);
vertex(80, 480);
endShape();
// shirt right
fill(0);
beginShape();
vertex(560, 480);
curveVertex(560, 480);
curveVertex(562, 420)
curveVertex(540, 360);
curveVertex(520, 350);
curveVertex(490, 360);
curveVertex(450, 350);
curveVertex(440, 370);
curveVertex(420, 400);
curveVertex(430, 480);
vertex(420, 480);
vertex(560, 480);
endShape();
//hair left
fill(r1, g1, b1);
stroke(0);
strokeWeight(1);
beginShape();
vertex(115, 260);
curveVertex(115, 260);
curveVertex(110, 220);
curveVertex(115, 170);
curveVertex(130, 160);
curveVertex(125, 150);
curveVertex(160, 140);
curveVertex(220, 155);
curveVertex(270, 150);
curveVertex(230, 180);
curveVertex(265, 175);
curveVertex(220, 200);
curveVertex(160, 180);
curveVertex(165, 220);
curveVertex(150, 260);
curveVertex(115, 260);
vertex(115, 260);
endShape();
//hair right
fill(r2, g2, b2);
stroke(0);
strokeWeight(1);
beginShape();
vertex(540, 260);
curveVertex(540, 260);
curveVertex(555, 220);
curveVertex(550, 170);
curveVertex(540, 160);
curveVertex(505, 150);
curveVertex(460, 150);
curveVertex(415, 180);
curveVertex(445, 200);
curveVertex(485, 210);
curveVertex(500, 220);
curveVertex(485, 260);
curveVertex(540, 260);
vertex(540, 260);
endShape();
//left eye
fill(255);
stroke(0);
strokeWeight(0.5);
//variable shape
ellipse(220, 235, eyeWidth1, eyeHeight1);
//pupil
noStroke();
fill(0);
ellipse(224, 235, 6, 6);
//highlight
fill(255);
ellipse(226, 235, 2, 2);
//right eye
fill(255);
stroke(0);
strokeWeight(0.5);
//variable shape
ellipse(433, 235, eyeWidth2, eyeHeight2);
//pupil
noStroke();
fill(0);
ellipse(431, 235, 6, 6);
//highlight
fill(255);
ellipse(429, 235, 2, 2);
//left mouth
fill(172, 97, 130);
ellipse(218, 285, mouthWidth1, mouthHeight1);
//right mouth
fill(211, 129, 137);
ellipse(435, 290, mouthWidth2, mouthHeight2);
// left sleeve
stroke(230);
strokeWeight(1);
fill(248);
ellipse(150, 480, 80, 200);
//right sleeve
stroke(50);
fill(25);
ellipse(500, 480, 80, 200);
}
function mousePressed() {
//left figure hair color change
r1 = random(75, 200);
g1 = random(75, 200);
b1 = random(75, 200);
//right figure hair color change
r2 = random(130, 250);
g2 = random(130, 250);
b2 = random(130, 250);
//left eye size
eyeWidth1 = random(10, 18);
eyeHeight1 = random(4, 14);
//right eye size
eyeWidth2 = random(8, 18);
eyeHeight2 = random(4, 14);
//left mouth size
mouthWidth1 = random(10, 20);
mouthHeight1 = random(0, 15);
//right mouth size
mouthWidth2 = random(8, 20);
mouthHeight2 = random(0, 10);
}
With the face variable assignment, I utilized two portraits and adjusted facial features of each figure individually to make it appear they are having a conversation.
]]>// Brandon Darreff
// Section A (Tuesdays 9:00)
// bdarreff@andrew.cmu.edu
// Project-01
function setup() {
createCanvas(400, 550);
background(186, 212, 126);
}
function draw() {
// background triangles
fill(211, 156, 93);
stroke(255);
triangle(0, 550, 0, 200, 175, 550);
triangle(0, 0, 0, 100, 400, 0);
triangle(400, 75, 400, 550, 300, 550);
// left eyebrow
fill(36, 107, 104);
strokeWeight(0.5);
stroke(255);
beginShape();
vertex(140, 200);
quadraticVertex(220, 220, 200, 225);
endShape(CLOSE);
// right eyebrow
beginShape();
vertex(291, 268);
quadraticVertex(220, 215, 236, 238);
endShape(CLOSE)
// left eye
fill(255);
stroke(100);
strokeWeight(0.5);
beginShape();
vertex(150, 220);
quadraticVertex(170, 210, 190, 250);
quadraticVertex(170, 250, 150, 235);
endShape(CLOSE);
// right eye
beginShape();
vertex(235, 255);
quadraticVertex(255, 245, 275, 285);
quadraticVertex(255, 285, 235, 270);
endShape(CLOSE);
// left iris
fill(36, 107, 104);
stroke(0);
strokeWeight(1);
ellipse(167, 233, 17, 17);
// right iris
ellipse(252, 268, 17, 17);
// left pupil
fill(0);
noStroke();
ellipse(167, 233, 10, 10);
// right pupil
ellipse(252, 268, 10, 10);
// left eye highlight
fill(255);
noStroke();
ellipse(165, 230, 4, 2);
//right eye highlight
ellipse(249, 265, 4, 2);
// nose
fill(210, 232, 145);
stroke(125);
arc(190, 319.5, 20, 20, 30.5, HALF_PI, OPEN);
noStroke();
beginShape();
vertex(220, 237);
quadraticVertex(163, 322, 190, 330);
endShape(CLOSE);
p1 = {x: 220, y: 237}, p2 = {x: 190, y: 330}
p3 = {x: 400, y: 320};
noFill();
stroke(125);
curve(p1.x, p1.y, p1.x, p1.y, p2.x, p2.y, p3.x, p3.y);
// hair
fill(36,107,104);
stroke(255);
strokeWeight(1);
beginShape();
vertex(115, 250);
quadraticVertex(110, 180, 150, 110);
quadraticVertex(140, 130, 170, 150);
quadraticVertex(160, 120, 180, 90);
quadraticVertex(190, 140, 225, 150);
quadraticVertex(210, 120, 220, 85);
quadraticVertex(280, 160, 325, 180);
quadraticVertex(340, 230, 290, 310);
quadraticVertex(300, 295, 300, 250);
quadraticVertex(280, 260, 240, 180);
quadraticVertex(220, 135, 130, 205);
vertex(115, 250);
endShape(CLOSE);
// mouth
fill(255);
stroke(100);
strokeWeight(1.5);
beginShape();
vertex(160, 345);
quadraticVertex(190, 350, 200, 375);
quadraticVertex(190, 385, 160, 360);
endShape(CLOSE);
strokeWeight(1);
line(160, 354, 200, 375);
// left ear
fill(210, 232, 145);
stroke(255);
strokeWeight(1);
beginShape();
vertex(115, 250);
quadraticVertex(95, 255, 114, 290);
vertex(115, 250);
endShape(CLOSE);
// right ear
beginShape();
vertex(290, 310);
quadraticVertex(310,325,265,350)
vertex(290, 310);
endShape(CLOSE);
// neck
noFill();
stroke(210, 232, 145);
beginShape();
vertex(150, 390);
quadraticVertex(175, 410, 205, 400);
quadraticVertex(190, 425, 195, 450);
quadraticVertex(155, 475, 120, 425);
quadraticVertex(145, 410, 139, 370);
vertex(139, 370);
endShape();
// chin
noFill();
stroke(210, 232, 145);
strokeWeight(1);
beginShape();
vertex(115, 250);
quadraticVertex(105, 315, 150, 390);
quadraticVertex(175, 410, 205, 400);
quadraticVertex(250, 370, 265, 350);
endShape();
// shirt
fill(255);
stroke(255);
beginShape();
vertex(120, 425);
quadraticVertex(155, 475, 195, 450);
quadraticVertex(250, 450, 290, 480);
quadraticVertex(298, 515, 290, 550);
vertex(175, 550);
vertex(110, 420);
endShape(CLOSE);
stroke(200);
line(230, 550, 241, 505);
line(237, 520, 234, 510);
noLoop();
}
With this project I used a combination of primitive 2-d functions and irregular shapes to create the stylized portrait above. It was helpful to quickly sketch out the intended end-product on graph paper to be able to map certain features. I think the look of concern on the resultant figure’s face successfully captures my crippling anxiety and I am looking forward to learning how to develop the code I wrote in much cleaner, more efficient ways.
]]>I chose the project inter(e)scapes GLOW by the company SHO in collaboration with ULR Studio which was presented to me by one of my professors, a collaborator on the installation, Gregory T. Spaw. I admire how the project team focused on mimicking a natural element like prairie grass through digital fabrication to start to integrate the technological and natural words. The collaborative project team was comprised of between ten and fifteen people from both studios which worked together in pairings throughout various phases, including pre-planning, design, fabrication, and finally installation.
In order to achieve the desirable response to the implemented system, a custom interactive software and imaging system was developed to react to surrounding kinetic energy and forces such as wind, human touch, and shifting of weight on the ground surrounding the installation. I feel the integration of response to both human and natural forces increases this project’s potential because there is more activity in the system even when individuals are not present.
I think this project begins to hint at further, large scale integration between technology and the natural environment to develop interesting spatial experiences for users.
Project Link: http://www.sh-o.us/intr-e-scapes-GLOW-2015
Intr(ə)Scapes @ Georgetown GLOW 2015 from SHO Architecture on Vimeo.
]]>