Play this link in a separate tab before starting!
Press keys a, p, m, or y for different displays.
// Isabella Hong
// Section A
// ijhong@andrew.cmu.edu
// Final Project - AudioVisualMixer
//variables for color in RGB mode
var colorR = 255;
var colorG = 255;
var colorB = 255;
//variable used in function Beat
var s = 0;
//variable used for noise effect
var xoff = 0;
function setup() {
createCanvas(500, 500); //canvas size
}
function draw() {
background(0); //background color black
//create values for the rotation, offset, and diameter of "Ring"
var rotation = 45;
var offset = [50, 100, 150, 200, 250, 300];
var d = [40, 25, 10, 5, 3, 2];
//create continuous ring of ellipses
push();
translate(width / 2, height / 2);
for (var i = 0; i < 25; i ++) {
Ring(offset[0], 0, d[0]);
Ring(offset[1], 0, d[1]);
Ring(offset[2], 0, d[2]);
Ring(offset[3], 0, d[3]);
Ring(offset[4], 0, d[4]);
Ring(offset[5], 0, d[5]);
rotate(radians(rotation));
rotate(millis()/9500);
}
pop();
//various displays
Beat(100, 100);
Beat(400, 400);
Interject1();
Interject2();
Interject3();
}
//rotating ring (default display)
function Ring(x, y, d) {
//randomize colors of ellipses
colorR = random(200, 255);
colorG = random(100, 255);
colorB = random(100, 200);
noStroke();
colorMode(RGB);
fill(colorR, colorG, colorB);
ellipse(x, y, d, d);
}
//pulsing
function Beat(x, y, s) {
s = random(100, 200);
if (keyIsPressed) {
//press key p to display
if ((key == 'p')) {
noStroke();
//randomize size of ellipses in this range to create pusling effect
s = random(10, 110);
fill(206, 255, 255);
ellipse(x, y, s, s);
s = random(100, 200);
strokeWeight(5);
stroke(205, 0, 205);
noFill();
ellipse(x, y, s, s);
}
}
}
//oscillation
function Interject1() {
//noise
xoff += 0.015;
var n = noise(xoff) * width;
if (keyIsPressed) {
//press key m to display
if ((key == 'm')) {
noStroke();
//randomize color
colorR = random(155, 255);
colorG = random(155, 255);
colorB = random(155, 255);
//background
fill(colorR, colorG, colorB);
rect(0, 0, 1000, 1000);
//objects affected by noise
strokeWeight(10);
stroke(255);
noFill();
line(0, n, 100, n);
line(500, n, 400, n);
ellipse(250, 250, n, n);
}
}
}
//rotating bar
function Interject2() {
//press key y to display
if ((key == 'y')) {
noStroke();
fill(0);
rect(0, 0, 10000, 10000);
rotate(millis()/275);
//create the rectangles to rotate
for (var i = 0; i < 1000; i += 50) {
rectMode(CENTER);
noFill();
strokeWeight(10);
stroke(255, 255, 0);
rect(i, i, 100, 100)
stroke(255, 0, 255);
rect(i, i, 200, 200);
stroke(0, 255, 255);
rect(i, i, 300, 300);
strokeWeight(10);
stroke(255, 255, 0);
rect(i, i, 100, 100)
stroke(255, 0, 255);
rect(i, i, 200, 200);
stroke(0, 255, 255);
rect(i, i, 300, 300);
}
}
}
//shaky strings
function Interject3() {
//press key a to display
if ((key == 'a')) {
//randomize colors
colorR = random(100, 200);
colorG = random(200, 255);
colorB = random(200, 150);
//background
fill(colorR, colorG, colorB);
rect(0, 0, 1000, 1000);
//drawing strings
for (var a = 0; a < width; a += 10) {
stroke(255);
strokeWeight(1);
line(a, 400, width, a);
}
for (var b = 0; b < height; b += 10) {
stroke(255, 0, 255);
strokeWeight(1);
line(b, 400, height - 100, b);
}
for (var c = 0; c < width; c += 10) {
stroke(255);
strokeWeight(2);
line(c, 300, width - 640, c);
}
for (var d = 0; d < height; d += 5) {
stroke(0, 255, 255);
strokeWeight(1);
line(d, 100, height - 640, d);
}
}
}
For my final project, I created an interactive audio visual mixer using the song Shelter by Porter Robinson and Madeon. There are four different display settings that the user can choose from. Pressing the corresponding keys will load animations that correspond to parts of the song. There are three functions called “Interject” that display image oriented animations while the function “Beat” displays pulsing ellipses that coincide with the beats of the song.
The main issue I had was embedding the audio file and having it load properly so I opted to link the file instead of it having it play automatically.
Enjoy!
]]>For my final project, I want to create an audiovisual mixer like Daito Manabe’s “Arigato Skating.” In his production, Manabe used cameras to capture the movement of the junior skaters on the ice and used projectors to project visuals on to the ice in line with the movements. In the end, it looked like twinkling lights and lines were emerging from the skaters’ blades as they performed on the ice.
My idea is to have shapes and patterns emerge from the user’s mouse. The different objects that will be drawn as the mouse moves will depend on the song’s (which I have yet to choose) current rhythm. This way, the visual is in accordance to the audio, hence an audiovisual mixer.
These are some preliminary sketches and ideas that I have for the audio and visuals. They will become solidified once I choose the exact portion of one of the songs I have in mind.
]]>
For my last Looking Outwards post, I will be comparing and discussing the works of interaction designers, Daito Manabe and Caitlin Morris.
Daito Manabe is an artist, programmer, and DJ based in Tokyo, Japan. As a designer, he focuses on the relationship between the body and programming, opting to represent the connections in simple, clean works. He enjoys finding the balance between simple and intricate in his productions and this shows in his work, “Arigato Skating”, a motion graphic made for the opening of the NHK Trophy (a stop on the figure skating grand prix circuit). By using projectors and cameras, Manabe created the illusion that the junior skaters were creating lines of light and flowers with their blades. The full production is beautiful.
http://www.daito.ws/en/work/2012-nhk-trophy-arigato-skating.html
Caitlin Morris is an artist and technologist that explores various representations of physical space often through sound and perception. She is constantly crossing the line between digital and physical space, testing where the limit is on both. In November of 2010, Morris did a sound installation on the Brooklyn Bridge that demonstrated this flirtation with digital and physical interaction. She installed contact microphones across the railings of the Brooklyn Bridge. When pedestrians plugged their headphones into the little boxes, they could hear the vibrations of the bridge, indirectly interacting with the bridge’s interaction with water, wind and travelers. It was very cool.
Although both Manabe and Morris are interaction designers, their works delve and trigger reactions from different senses. Manabe focuses on the visual and tactile interaction between his art and his audience. Meanwhile, Morris focuses on how her audience can interact with sound. Overall, both artist provoke reaction through their productions, something that I think is crucial when presenting personal work.
]]>For the composition project this week, I decided to attempt to create an abstract rendering of a rose. To start with, I looked at the properties of Turtle Graphics that we had learned and implemented last week and began my project.
Here is what it looked like throughout the process of creating my “rose petals.”
‘I found that when I was incrementing by numbers larger than one, I was getting the desired density in the center of my graphic, essentially the center of the flower. When I switched my incrementation to i++, I got the exact look I was going for – a heavily filled in center that spiraled outwards to create a beautiful abstract rose.
Here’s the final result:
// Isabella Hong
// Section A
// ijhong@andrew.cmu.edu
// Project 11
//angle to turn and reposition by in degrees
var turnAngle = 110;
//the degree by which the turtle turns by
var adegree = 120;
function setup() {
createCanvas(400, 400);
//pale yellow background
background(250, 255, 200);
strokeJoin(MITER);
strokeCap(PROJECT);
var turtle = makeTurtle(width / 2, 265);
//pink color for rose
turtle.setColor(color(255, 174, 188));
//loops that will continue to draw the base figure to create the rose
for (var i = 0; i < 100; i++) {
for (var length = 0; length < 200; length += 100) {
//base figure for the rose
//set the line weight
turtle.setWeight(2);
turtle.forward(length);
turtle.right(adegree);
turtle.forward(length);
turtle.right(adegree);
turtle.forward(length);
turtle.right(adegree);
turtle.forward(length);
turtle.right(adegree);
//push the base figure by turnAngle each time it is drawn
turtle.penUp();
turtle.forward(i * 2);
turtle.right(turnAngle);
turtle.penDown();
}
}
//save on computation
noLoop();
}
function draw() {
}
//turtle graphics API
function turtleLeft(d){this.angle-=d;}function turtleRight(d){this.angle+=d;}
function turtleForward(p){var rad=radians(this.angle);var newx=this.x+cos(rad)*p;
var newy=this.y+sin(rad)*p;this.goto(newx,newy);}function turtleBack(p){
this.forward(-p);}function turtlePenDown(){this.penIsDown=true;}
function turtlePenUp(){this.penIsDown = false;}function turtleGoTo(x,y){
if(this.penIsDown){stroke(this.color);strokeWeight(this.weight);
line(this.x,this.y,x,y);}this.x = x;this.y = y;}function turtleDistTo(x,y){
return sqrt(sq(this.x-x)+sq(this.y-y));}function turtleAngleTo(x,y){
var absAngle=degrees(atan2(y-this.y,x-this.x));
var angle=((absAngle-this.angle)+360)%360.0;return angle;}
function turtleTurnToward(x,y,d){var angle = this.angleTo(x,y);if(angle< 180){
this.angle+=d;}else{this.angle-=d;}}function turtleSetColor(c){this.color=c;}
function turtleSetWeight(w){this.weight=w;}function turtleFace(angle){
this.angle = angle;}function makeTurtle(tx,ty){var turtle={x:tx,y:ty,
angle:0.0,penIsDown:true,color:color(128),weight:1,left:turtleLeft,
right:turtleRight,forward:turtleForward, back:turtleBack,penDown:turtlePenDown,
penUp:turtlePenUp,goto:turtleGoTo, angleto:turtleAngleTo,
turnToward:turtleTurnToward,distanceTo:turtleDistTo, angleTo:turtleAngleTo,
setColor:turtleSetColor, setWeight:turtleSetWeight,face:turtleFace};
return turtle;}
]]>For this week’s post on sound art, I chose to focus on Sound Machines, an instrument that was created for performing electronic music. Sound Machines was unveiled in 2011 for Volkswagen at the reveal party for the then new Beetle.
The device itself is composed of three different standard looking record players that are synced to an additional sequencer. The three discs each track a different sound. The visual patterns on the discs were produced manually by “reverse engineering” the music that had been customized for the event itself. The tracks were then controlled by the softwares Arduino and Processing.
I admire that The Product, the Berlin based creators of the device, took into consideration the music itself when designing the device. It adds a personal touch and connection. In addition, the SoundMachines was only the beginning – The Product wants to take it one step further and implement the device at music festivals in the future.
]]>
For my project this week, I decided to have my generative landscape be a sky with various sized stars. The stars are continuously generated and disappear at the end of the canvas in various sizes.
// Isabella Hong
// Section A
// ijhong@andrew.cmu.edu
// Project 10
var stars = [];
function setup() {
createCanvas(600, 400);
// create an initial collection of stars
for (var i = 0; i < 10; i++){
var rst = random(width);
stars[i] = makeStars(rst);
}
frameRate(30);
}
function draw() {
background(0);
updateandgeneratestars();
takeawaystars();
addnewstars();
basicForeground();
showStarNumber();
}
function updateandgeneratestars(){
for (var i = 0; i < stars.length; i++){
stars[i].move();
stars[i].display();
}
}
function takeawaystars(){
var starsToKeep = [];
for (var i = 0; i < stars.length; i++){
if (stars[i].x + stars[i].breadth > 0) {
starsToKeep.push(stars[i]);
}
}
stars = starsToKeep; // remember the remaining stars
}
function addnewstars() {
// With a very tiny probability, add a new star to the end.
var newStarProbability = 0.05;
if (random(0, 1) < newStarProbability) {
stars.push(makeStars(width));
}
}
// method to update position of stars every frame
function starsmove() {
this.x += this.speed;
}
// draw the stars
function showstars() {
var starHeight = 20;
fill(245);
noStroke();
push();
translate(this.x, height - 375);
rect(0, starHeight, 10, 10);
translate(this.x, height - 325);
ellipse(50, starHeight, 20, 20);
translate(this.x, height - 290);
rect(300, starHeight, 15, 15);
translate(this.x, height - 285);
ellipse(100, starHeight, 10, 10);
pop();
}
function makeStars(birthLocationX) {
var str = {x: birthLocationX,
breadth: 50,
speed: -1.0,
nstars: round(random(2,8)),
move: starsmove,
display: showstars}
return str;
}
function basicForeground() {
//creating the window frame
noFill();
strokeWeight(15);
stroke(101, 67, 33);
rectMode(CENTER);
rect(width / 2, height / 2, 585, 385);
line(width / 2, 0, width / 2, 400);
line(0, height / 2, 600, height / 2);
//little person looking out
strokeWeight(1);
stroke(0, 0, 128);
fill(0, 0, 128);
ellipse(width / 4, 380, 85, 100);
noStroke();
fill(255, 223, 180);
ellipse(width / 4, 300, 100, 100);
//speech bubble
stroke(0);
fill(255);
triangle(250, 280, 250, 305, 200, 305);
rect(345, 300, 250, 50);
}
function showStarNumber() {
noStroke();
fill(216, 182, 0);
var starNumber = "I see " + stars.length + " stars tonight, wow!"
textSize(16);
textFont("Georgia");
text(starNumber, 250, 305);
}
]]>For this week’s post, I decided to focus on designer, educator, and creator Rachel Wingfield. Wingfield attended the Royal College of Art and studied responsive environments that are inspired by living systems. Her concentration in school is still evident in the work she has produced throughout her professional career.
In 2003, Wingfield founded Loop.ph, a crafting space and laboratory that focuses on incorporating living materials and technology into public environments. As the founder of Loop.ph and as a designer, researcher and educator, Wingfield encourages designers and the public to intervene on an urban scale, to turn living materials into visual experiences and environments.
A project that incorporated these fundamentals was Brainwaves: Faster than Sound. The project was done in collaboration with Loop.ph, the contributing artists being Wingfield, Mathias Gmachl, Professor Vincent Walsh, Mira Calix, Anna Meredith, and The Aurora Orchestra, Joana Seguro – Lumin. The installation was constructed of dead trees that were arranged into a highly intricate grid of electro-luminous strings that worked in tandem with the positions of the musicians in the space. While the musicians performed, MRI images and slices of Malthius’ skull would be projected, demonstrating the intricacies of neural functions in action.
I particularly admired this project for it’s unique output – the final installation incorporated biology, music, and technology to create this immersive and very futuristic experience. I think that what Wingfield is doing as an artist is constructing a bridge between science, technology, and the arts even further.
]]>// Isabella Hong
// Section A
// ijhong@andrew.cmu.edu
// Project 09
var img;
function preload() {
//my image
var imgURL = "https://i.imgur.com/hdA7L6z.jpg";
img = loadImage(imgURL);
}
function setup() {
//create canvas and load image on to it
createCanvas(525, 375);
img.resize(525, 375);
background(255);
//rate at which pixels appear
frameRate(500);
img.loadPixels();
}
function draw() {
//randomize the x and y coordinates
var CpixelW = random(width);
var CpixelH = random(height);
//keep pixels within the canvas
var CpositionX = constrain(floor(CpixelW), 0, width - 20);
var CpositionY = constrain(floor(CpixelH), 0, height - 20);
//get colors from the image
var colorxy = img.get(CpositionX, CpositionY);
//draw the rectangular pixels and have them fill in the colors
//from the original image
noStroke();
fill(colorxy);
rectMode(CENTER);
rect(CpixelW, CpixelH, 10, 10);
}
For my portrait, I chose a photo of myself that my friend took while we were hiking at Ohiopyle. I liked that the photo had several different colors and depths, which I thought would add to the pixelated version. This is my project at various stages.
Stage 1
Stage 2
Stage 3
I am doing my Looking Outwards post on Hannah K’s post from Week 4. She did a post on Six Drawings, a joint collaborative effort between artist Maotik, percussionist David Espinosa, and musical director David Adamcyk. The project was presented in an audiovisual and instrumental performance at Société des Arts Technologiques in Montreal during the IX Symposium in the spring of 2014. The displays on the rubber orb were controlled through the program TouchDesign while the connected computer ran a Max multimedia software.
I also agree that the work is a completely immersive experience. When the first sounds are visually displayed on the giant orb, the room becomes silent and everyone is enraptured and fascinated by the erie, almost tribal, audiovisuals. One simply cannot peel their eyes away.
]]>
https://vimeo.com/channels/eyeo2015
At Eyeo 2015, Samuel Sinyangwe and Deray McKesson spoke on the topic of racial justice and activism in addition to how they’ve utilized social media to spread awareness of what goes on in the news each day. For this post, I chose to focus on Singyangwe.
Samuel Sinyagwe in an American policy analyst and racial justice activist that currently resides in San Francisco, California. He is the co-founder of WetheProtestors, an organization that provide people with a platform to end police violence in their local communities. Sinyagwe studied racial intersections within the United States in the realms of politics, economics, and class. He describes himself as an activist that works with affected communities.
I admire Sinyagwe for his calm yet active approach to tackling the issue of racial injustice. He isn’t angrily aggressive, he’s actively progressive and constantly working to give people agency. He embraces the community and helps them initiate change.
Sinyagwe is an articulate and charismatic speaker that understands the power of employing a group for change. He shows the audience that there is evidence, real and tangible statistics of racial injustice, clearing the fog around where racial injustice is concentrated. It’s not – it’s everywhere and that’s why Sinyaqwe does what he does.
]]>