I instantly thought of Tron bikes when I read through the assignment, so that’s what I ended up doing. I modified a version of my generative hillside from Assignment 7 (I think?) and created two types of random objects, some abstract distant vertical structures that are represented by lines, and some dystopian irregular buildings. The height of the buildings and the placement of the window sections are randomized, as well as the heights of the structures in the background. There is also a moving grid to create more sense of motion in the work.
I spent a lot of the time fiddling with the different movement rates of objects and the tonal values to get the right depth and parallax effect.
var buildings = [];
var lines = [];
var bottomLines =[];
var topLines =[];
let hillHeight = [];
let noiseParam = 0;
let noiseStep = 0.05;
function setup() {
createCanvas(480, 240);
// create an initial collection of buildings
for (var i = 0; i < 10; i++){
var rx = random(width);
buildings[i] = makeBuilding(rx);
}
//populates the grid lines
for (var i=0; i<49;i++){
topLines[i]=10*i;
bottomLines[i]=-1200+(60*i);
}
frameRate(10);
//hill values
for (let i=0; i<=width;i+=5){
var n= noise(noiseParam);
var value = map(n,0,1,height/8,height/2,true);
hillHeight.push(value);
noiseParam += noiseStep;
}
}
function draw() {
background(0);
drawHill();
drawGrid();
drawBike();
updateAndDisplayLines();
removeLinesThatHaveSlippedOutOfView();
addNewLineWithSomeRandomProbability();
updateAndDisplayBuildings();
removeBuildingsThatHaveSlippedOutOfView();
addNewBuildingsWithSomeRandomProbability();
displayHorizon();
}
//updates and draws the grid
function drawGrid(){
strokeWeight(1);
stroke(255);
fill(18,49,62);
rect(0,height-100,width,height/2);
//vertical lines
for (var i=0; i<49;i++){
line(topLines[i],height-100,bottomLines[i],height);
topLines[i]-=5;
bottomLines[i]-=30;
if(topLines[i]==0){
topLines[i]=480;
}
if(bottomLines[i]==-1200){
bottomLines[i]=1680;
}
}
//horizontal lines
for(var i=0; i<10; i++){
line(0,height-100+pow(2,i),width,height-100+pow(2,i));
}
}
//draws background hill
function drawHill(){
strokeWeight(0.25);
hillHeight.shift();
var n= noise(noiseParam);
var value = map(n,0,1,0,height,true);
hillHeight.push(value);
noiseParam+=noiseStep;
fill(8,26,34);
//start of the hill shape, with a buffer point off screen
beginShape();
curveVertex(0,height);
curveVertex(0,height);
curveVertex(-5,hillHeight[0]);
//loop for drawing all vertices
for(let j=0; j<(width/5)+1; j++){
if(j!=0&hillHeight[j-1]>hillHeight[j]&&hillHeight[j+1]>hillHeight[j]){
rect(j*5,hillHeight[j]-20,5,hillHeight[j]-20);
}
curveVertex(j*5,hillHeight[j]);
}
//end of hill shape with buffer
curveVertex(width+5,hillHeight[width/5]);
curveVertex(width,height);
curveVertex(width,height);
endShape();
}
//draws the Tron bike
function drawBike(){
push();
translate(100,180);
noStroke();
//stripe
fill(255,190,107);
rect(-213,9,220,15);
//base back
fill(182,134,44);
beginShape();
vertex(7,5);
vertex(7,5);
vertex(31,0);
vertex(60,10);
vertex(60,22);
vertex(19,22);
vertex(19,10);
vertex(7,5);
endShape();
//wheels
fill(198,128,4);
ellipse(6,18,18,18);
ellipse(52,18,18,18);
fill(0);
ellipse(6,18,12,12);
ellipse(52,18,12,12);
pop();
}
//buildings
function updateAndDisplayBuildings(){
// Update the building's positions, and display them.
for (var i = 0; i < buildings.length; i++){
buildings[i].move();
buildings[i].display();
}
}
function removeBuildingsThatHaveSlippedOutOfView(){
// If a building has dropped off the left edge,
// remove it from the array. This is quite tricky, but
// we've seen something like this before with particles.
// The easy part is scanning the array to find buildings
// to remove. The tricky part is if we remove them
// immediately, we'll alter the array, and our plan to
// step through each item in the array might not work.
// Our solution is to just copy all the buildings
// we want to keep into a new array.
var buildingsToKeep = [];
for (var i = 0; i < buildings.length; i++){
if (buildings[i].x + buildings[i].breadth > 0) {
buildingsToKeep.push(buildings[i]);
}
}
buildings = buildingsToKeep; // remember the surviving buildings
}
function addNewBuildingsWithSomeRandomProbability() {
// With a very tiny probability, add a new building to the end.
var newBuildingLikelihood = 0.12;
if (random(0,1) < newBuildingLikelihood) {
buildings.push(makeBuilding(width));
}
}
// method to update position of building every frame
function buildingMove() {
this.x += this.speed;
}
// draw the building and some windows
function buildingDisplay() {
strokeWeight(0.5);
var floorHeight = 20;
var bHeight = this.nFloors * floorHeight;
fill(14,43,55);
stroke(255);
push();
translate(this.x, height - 100);
rect(0, -bHeight, this.breadth, bHeight);
fill(0);
stroke(255);
for (var i = 0; i < this.nFloors-1; i++) {
if(this.side==2){
rect(this.breadth/2,-15 - (i * floorHeight),this.breadth-10, 10);
}
else{
rect((this.breadth/2)-this.breadth,-15 - (i * floorHeight),this.breadth-10, 10);
}
}
pop();
}
function makeBuilding(birthLocationX) {
var bldg = {x: birthLocationX,
side: (int(random(0,3))),
breadth: 40,
speed: -15.0,
nFloors: round(random(2,8)),
move: buildingMove,
display: buildingDisplay}
return bldg;
}
//lines
function updateAndDisplayLines(){
// Update the line's positions, and display them.
for (var i = 0; i < lines.length; i++){
lines[i].move();
lines[i].display();
}
}
function removeLinesThatHaveSlippedOutOfView(){
var linesToKeep = [];
for (var i = 0; i < lines.length; i++){
if (lines[i].x + lines[i].breadth > 0) {
linesToKeep.push(lines[i]);
}
}
lines = linesToKeep; // remember the surviving lines
}
function addNewLineWithSomeRandomProbability() {
var newLineLikelihood = 1;
if (random(0,1) < newLineLikelihood) {
lines.push(makeLine(width));
}
}
// method to update position of line every frame
function lineMove() {
this.x += this.speed;
}
//draw the line spikes
function lineDisplay() {
strokeWeight(5);
var floorHeight = 8;
var bHeight = this.nFloors * floorHeight;
stroke(80);
push();
translate(this.x, height - 100);
line(0, -bHeight, 0, 0);
pop();
}
function makeLine(birthLocationX) {
var ln = {x: birthLocationX,
breadth: 40,
speed: -10.0,
nFloors: round(random(1,6)),
move: lineMove,
display: lineDisplay}
return ln;
}
function displayHorizon(){
stroke(255);
line (0,height-100, width, height-100);
}
]]>For this week’s Looking Outwards, I looked at the CAre BOt by Caroline Sinders. The project is an interface bot that is concerned with helping victims of Social Media Break Up. It provides counsel and advice for users undergoing social media harassment, but it doesn’t replace therapy. It highlights inequities and failures in harassment policies and procedures for victims by using an empathetic and artistic interface.
Caroline Sinders is a computational designer and artist, and her work focuses on abuse, interaction, society, A.I., and conversation. She operates a studio that uses machine learning to design for public good and solve problems through user research. She received a Masters in Interactive Telecommunications from New York University.
https://carolinesinders.com/care-b0t/
CAre B0t, Caroline Sinders 2019
My sonic story about the election goes as follows:
Biden and Trump meet at the white house. Trump debates biden by saying ‘Wrong’ repeatedly while smiling. Biden then bonks Trump on the head with the great state of Pennsylvania, winning him the election. Trump’s defeat music plays as he frowns and Biden smiles. Trump leaves the white house and Biden dances to his victory music.
Sounds- Wrong, Bonk (hitting on head), Bass(Trump defeat sound), winSound(Biden’s victory song)
//trump and biden meet, trump debates biden by saying wrong repeatedly while smiling, then biden bonks trump with a democratic pennsylvania, winning the election. Trump then frowns and his panic music plays, he then leaves and biden does a dance to victory music.
//sounds
var bonk;
var winSound;
var bass;
var wrong;
function preload(){
bonk = loadSound("https://courses.ideate.cmu.edu/15-104/f2020/wp-content/uploads/2020/11/bonk-1.wav");
winSound = loadSound("https://courses.ideate.cmu.edu/15-104/f2020/wp-content/uploads/2020/11/Wide-1.wav");
bass = loadSound("https://courses.ideate.cmu.edu/15-104/f2020/wp-content/uploads/2020/11/Bass-1.wav");
wrong = loadSound("https://courses.ideate.cmu.edu/15-104/f2020/wp-content/uploads/2020/11/Wrong-1.wav");
}
function setup() {
createCanvas(400, 400);
useSound();
frameRate(1);
imageMode(CENTER);
rectMode(CENTER);
noStroke();
}
function soundSetup() {
winSound.setVolume(0.5);
}
var trumpPos=0;
var bidenPos=400;
var armrot=12;
function draw() {
translate(200,200);
//biden and trump slide in
if(frameCount<8){
drawWH();
trumpPos+=16;
bidenPos-=16;
push();
translate(trumpPos-200,0);
drawTrump(true);
pop();
push();
translate(bidenPos-200,0);
drawBiden(false);
pop();
}
//trump wrong sound
if(frameCount==7||frameCount==8||frameCount==9){
wrong.play();
}
//biden arm appears and swings
if(frameCount>=9&frameCount<11){
armrot-=0.2;
drawWH();
push();
translate(trumpPos-200,0);
drawTrump(true);
pop();
push();
translate(80,80);
rotate(armrot);
drawPenn();
pop();
push();
translate(bidenPos-200,0);
drawBiden(false);
pop();
}
//bonk sound effect
if(frameCount==12){
bonk.play();
}
//trump mad
if(frameCount==13){
drawWH();
push();
translate(trumpPos-200,0);
drawTrump(false);
pop();
push();
translate(80,80);
rotate(armrot);
drawPenn();
pop();
push();
translate(bidenPos-200,0);
drawBiden(true);
pop();
}
//bass sound
if(frameCount==14){
bass.play();
}
//trump exit, biden slide
if(frameCount>14&frameCount<19){
drawWH();
trumpPos-=55;
bidenPos-=27;
push();
translate(trumpPos-200,0);
drawTrump(false);
pop();
push();
translate(bidenPos-200,0);
drawBiden(true);
pop();
}
//win song plays
if(frameCount==20){
winSound.play();
}
//biden wiggle
if(frameCount>20&frameCount<25){
drawWH();
push();
var rot=random(-0.3,0.6);
rotate(rot);
translate(bidenPos-200,0);
drawBiden(true);
pop();
}
if(frameCount==25){
background(0);
noLoop();
}
}
function drawTrump(happy){
//red suit
fill(170,27,27);
rect(0,200,150,400);
//head and ears
fill(255,173,101);
ellipse(0,0,100,90);
ellipse(50,0,20,40);
ellipse(-50,0,20,40);
//hair
fill(255,220,88);
ellipse(0,-35,100,50);
fill(0);
//smiley face
if(happy){
arc(0,10,40,20,0,PI,CHORD);
}
//sad face
else{
arc(0,10,40,20,PI,0,CHORD);
}
}
function drawBiden(happy){
//blue suit
fill(47,62,140);
rect(0,230,100,400);
//hair
fill(255);
ellipse(0,-25,70,60);
//head and ears
fill(214,169,140);
ellipse(0,0,80,100);
ellipse(40,0,10,20);
ellipse(-40,0,10,20);
fill(0);
//smiley face
if(happy){
arc(0,10,40,20,0,PI,CHORD);
}
//sad face
else{
arc(0,10,40,20,PI,0,CHORD);
}
}
function drawPenn(){
//arm
fill(47,62,140);
rect(0,-45,30,90);
//hand
fill(214,169,140);
ellipse(0,-95,40,40);
//state
fill(47,62,140);
rect(0,-160,60,120);
}
function drawWH(){
background(82,198,205);
//grass
fill(53,123,42);
rect(0,50,400,300);
//main house
fill(255);
rect(0,-120,100,40);
//tip
triangle(0,-150, -20,-140,20,-140);
//inside
fill(230);
rect(0,-118,40,32);
}
]]>The project that I’m discussing is “Travis Bott” by the creative agency space150. The project uses artificial intelligence and machine learning to create a completely generated song that sounds as if it was created and performed by the american hip hop artist Travis Scott (hence the name “Travis Bott”). They had different teams that analyzed the music separately, breaking it down into beats, melodies, lyrics, and more. As the algorithm learned likely sequences and patterns in note and lyrical structure, they could then guide it in reconstructing a completely new song. Not only did they have to reconstruct the instrumental and lyrics, but Travis Scott’s vocal information was reconstructed as well. In some ways, Travis’ artistic sensibilities are the main focus here as anything that the algorithm was produced from his content and influence, but the packaging of the final song and video had some input from the studio behind it.
Travis Bott (2020)
https://www.space150.com/work/v45-travisbott
Feb 2020 by space150
I used lines that are randomly horizontal and vertical, and stroke width and lengths that progressively get smaller as the frames approach 10000, increasing the resolution of the portrait. At the end, the text ‘fin’ is displayed to conclude the drawing of the portrait.
let picture;
function preload() {
picture = loadImage('https://i.imgur.com/3hKjmTL.jpg');
}
function setup() {
createCanvas(480, 480);
imageMode(CENTER);
textAlign(CENTER);
noStroke();
background(0);
picture.loadPixels();
frameRate(60);
}
function draw() {
//chooses random pixel coordinates
let x = floor(random(picture.width));
let y = floor(random(picture.height));
//gets the pixel color
let pixelcolor = picture.get(x,y);
//sets whether the line is randomly vertical or horizontal
let verticality=[true,false];
let horiz=random(verticality);
stroke(pixelcolor);
//sets the stroke weight in relation to the frame count
strokeWeight(map(10000-frameCount,10000,0,20,0));
//horizontal lines
if(horiz){
line(random(x-((10000-frameCount)/200),x),y,random(x,x+((10000-frameCount)/200)),y);
}
//vertical lines
else{
line(x,random(y-((10000-frameCount)/200),y),x,random(y,y+((10000-frameCount)/200)));
}
//ends the program at 10000 frames with a 'fin' message
if(frameCount>10000){
fill(255);
background(0);
textSize(100);
textFont('Georgia');
textStyle(ITALIC);
text('fin', 240, 240);
noLoop();
}
}
]]>I looked at Hayoon’s Looking Outwards 07: information Visualization post on Jonathan Harris’ work “We feel fine”. I think the work is very well presented and visually stunning, but I question the artistic decision for how the algorithm attributes emotions to the text strings. I think more nuanced aspects of speech inflection (like sarcasm) and other semantics are lost when converted to text, especially in different contexts. I think Hayoon does a good job of addressing the complexity of the topic and admiring the creators’ sensibilities. I think she could’ve gone more into speculating the algorithmic or functional aspects of the project, but she captures and describes the project and the artistic influences well.
“We feel fine” by Jonathan Harris, 2009, c/o Seth Kamvar
http://number27.org/wffbook
Ariel Waldmen works on NASA’s Innovative Advanced Concepts program and explores the relationship between scientific exploration and creative expression. Although she has a concentration on space exploration specifically, she also works in other science fields to promote collaboration in science. I admire how her work not only shows a lifelong fascination of space exploration, but also a longer journey into how she can share that fascination with others. I think my favorite project of hers is spaceprobe.es, which displays the satellites, rovers, and probes currently in space to share the active projects of space exploration. To present effectively, she uses real world examples to lead viewers through more familiar stories to the lesser told ones.
]]>Move your mouse and be patient to generate curves, and try wiggling your mouse in the same spot to build up contrast areas!
function setup() {
createCanvas(480, 480, WEBGL);
angleMode(DEGREES);
noFill();
translate(240,240);
}
function draw() {
//draws black lines
drawBulletNoseCurve();
//draws white lines to create 'gaps', that spins
for(let j=0; j<360; j++){
push();
stroke(255);
fill(255,255,255,50);
rect(-240,-240,480,480,10);
noFill();
rotate(mouseY);
drawBulletNoseCurve();
pop();
}
}
function drawBulletNoseCurve(){
var x;
var y;
var a=mouseY/10;
var t;
var b=mouseX/50;
//vertical orientation curves
beginShape();
for(var i=0;i<100; i++){
var t = map(i, 0, 100, 0, 200);
x=a*cos(t);
y=b*(cos(t)/sin(t));
vertex(x,y);
}
endShape();
//horizontal biased
beginShape();
for(var i=0;i<100; i++){
var t = map(i, 0, 100, 0, 400);
x=10*-a*cos(t);
y=10*b*(cos(t)/sin(t));
vertex(x,y);
}
endShape();
}
I used the Bullet Nose curve to create my project. It uses both black and white layers of lines on a white line, so the white layers create ‘gaps’ or glitches in the black pattern, but it uses the bullet nose curve rotated around the origin to create them. I decided to create a separate function for drawing the curve, as I would use it for different purposes with different strokes. The mouse position helps determine the width of the asymptote portions of the curves, as well as the severity of the curve (how flat the inner portion is). I also made the creative decision to have it build up or truly ‘draw’ instead of re drawing the background each time. This allows the user to see the history of the work and understand the curves and gaps better.
This project creates visualizations for the way that content is shared through Facebook. It creates branching organic visuals on a pastel background to show the different rates and tree-like structure that describes media dissemination using Facebook. The exponential share-based structure allows the graphic to start at the center and branch out to fill the space, so I think the algorithm may assign share trees randomly around the centerpoint to then extend in a somewhat linear fashion, but the curves of the branches may describe unique share patterns. The creator’s artistic sensibilities are manifested through the visual display of the linkages (the leave/flower graphics) and the algorithm that dictates the organization of the branches.
function setup() {
createCanvas(480, 480, WEBGL);
angleMode(DEGREES);
}
function draw() {
background(204,207,235);
directionalLight(250, 250, 250, 0, 0, -45);
rotateX(135);
noStroke();
//midplane circles
push();
rotateX(90);
ellipse(0,30,100);
ellipse(160,-40,100);
ellipse(80,0,100);
ellipse(-160,-40,100);
ellipse(-80,0,100);
stroke(255);
strokeWeight(1);
noFill();
for(let i=0; i<2000;i+=20){
ellipse(0,30,120+i);
ellipse(160,-40,120+i);
ellipse(80,0,120+i);
ellipse(-160,-40,120+i);
ellipse(-80,0,120+i);
}
noStroke();
pop();
//millisecond cylinder
push();
translate(160,0,-40);
fill(255,20);
cylinder(40, 240);
fill(40, 106, 167);
cylinder(40, (millis()%1000)*0.24);
cylinder(36, ((millis()+75)%1000)*0.24);
cylinder(32, ((millis()+150)%1000)*0.24);
cylinder(28, ((millis()+225)%1000)*0.24);
cylinder(24, ((millis()+300)%1000)*0.24);
pop();
//second cylinder
push();
translate(80,0);
fill(255,20);
cylinder(40, 240);
fill(40, 159, 167);
cylinder(40, (second()%60)*4);
cylinder(36, ((second()+4.5)%60)*4);
cylinder(32, ((second()+9)%60)*4);
cylinder(28, ((second()+13.5)%60)*4);
pop();
//minute cylinder
push();
translate(0,0,30);
fill(255,20);
cylinder(40, 240);
fill(121,94,199);
cylinder(40, (minute()%60)*4);
cylinder(36, ((minute()+4.5)%60)*4);
cylinder(32, ((minute()+9)%60)*4);
pop();
//hour cylinder
push();
translate(-80,0);
fill(255,20);
cylinder(40, 240);
fill(94,199,173);
cylinder(40, (hour()%24)*10);
cylinder(36, ((hour()+1.8)%24)*10);
pop();
//day cylinder
push();
translate(-160,0,-40);
fill(255,20);
cylinder(40, 240);
fill(190,94,199);
cylinder(40, (day()/31)*240);
pop();
}
I used abstract filling 3D cylinders to represent the passing of time at different scales. From left to right the scales are- Days/Month, Hours/Day, Minutes/Hour, Seconds/Minute, Milliseconds/Second.
I drew out the overall structure and my initial spacing plan for the 3D geometry.
]]>