Vinyl & controller setups are large and expensive.

 

 

Two audio track crossfade without beat/tempo match.

 

 

interactive textile interface – acrylic and silver-nano inks on polyester substrate.

 

 

Bluno Nano and CapSense arduino library.

Bluno can easily connect arduino sensors to Android & iOS. Although, this project is not serially connected to Unity on Android, the example code connects to an app created in Android Studio and the Bluno connects to Unity on iOS.

This program uses an initialization stage to calculate a baseline for each screen
printed touch sensor, then uses a multiplier to calculate a touch threshold. To
increasethe correctness of the sensor data, I will implement a touch calibration
step into the setup sequence.

#include 

// infinEight Driver
// Ty Van de Zande 2018

/*
 * CapitiveSense Library Demo Sketch
 * Paul Badger 2008
 * Uses a high value resistor e.g. 10M between send pin and receive pin
 * Resistor effects sensitivity, experiment with values, 50K - 50M. Larger resistor values yield larger sensor values.
 * Receive pin is the sensor pin - try different amounts of foil/metal on this pin
 */


// Arcitecture
// TBD

static int IN1 = 18;
static int IN2 = 17;
static int IN3 = 16;

static int ledGROUND = 23;
static int ledONE    = 21;
static int ledTWO    = 20;
static int ledTHREE  = 19;


int SENSE1;
int SENSE2;
int SENSE3;

long THRESH1;
long THRESH2;
long THRESH3;

float mult = 1.7;



void setup()                    
{
   pinMode(ledONE, OUTPUT);
   pinMode(ledTWO, OUTPUT);
   pinMode(ledTHREE, OUTPUT);
   pinMode(ledGROUND, OUTPUT);
   
   
   Serial.begin(9600);
   Serial.println("Prepping");
   initializeSensors();
}

void loop()                    
{
  updateSensors();
  //printSensors();
  digitalWrite(ledONE, LOW);
  digitalWrite(ledTWO, LOW);
  digitalWrite(ledTHREE, LOW);
  areWeTouched();     
  delay(10);                    
}

void  areWeTouched()
{
  if(SENSE1 > THRESH1 || SENSE1 == -2){
//    printSensors();
    digitalWrite(ledONE, HIGH);
      Serial.println("3");
  };
  if(SENSE2 > THRESH2 || SENSE2 == -2){
//    printSensors();
      digitalWrite(ledTWO, HIGH);
      Serial.println("2");
  };
  if(SENSE3 > THRESH3  || SENSE3 == -2){
//    printSensors();
      digitalWrite(ledTHREE, HIGH);
      Serial.println("1");
  };
}



void printThresh(int one, int two, int three)
{
  Serial.print(one);
  Serial.print(" . ");
  Serial.print(two);
  Serial.print(" . ");
  Serial.print(three);
  Serial.println(" ");
  
}



void updateSensors()
{
    SENSE1 = touchRead(IN1);
    SENSE2 = touchRead(IN2);
    SENSE3 = touchRead(IN3);
    // Array not working???
//     int SENSESTATES[] = {SENSE1, SENSE2, SENSE3, SENSE4};
//     int lisLEN = sizeof(SENSESTATES);
//     for(int i = 0; i < lisLEN; i++){
//        Serial.print(i);
//        Serial.print(":  ");
//        Serial.print(SENSESTATES[i]);      
//     }
}

void printSensors()
{
    Serial.print(SENSE1);
    Serial.print(" . ");
    Serial.print(SENSE2);
    Serial.print(" . ");
    Serial.print(SENSE3);
    Serial.println(" ");
}


void initializeSensors()
{
  int cts = 104;
  //int mult = 20;
  
  long temp1 = 0;
  long temp2 = 0;
  long temp3 = 0;

  for(int i = 0; i < 20; i++){
    updateSensors();
    //printSensors();
  }
  
  Serial.println("Collecting Summer Readings");
  for(int i = 0; i < cts; i++){
    if (i % 4 == 0) { Serial.print("|"); }
    updateSensors();
    temp1 += SENSE1;
    temp2 += SENSE2;
    temp3 += SENSE3;
  }

  Serial.println(" ");
  Serial.println("Averaging thresholds");
  THRESH1 =  mult * (temp1 / cts);
  THRESH2 =  mult * (temp2 / cts);
  THRESH3 =  mult * (temp3 / cts);
  printThresh(THRESH1, THRESH2, THRESH3);
  printThresh(THRESH1/mult, THRESH2/mult, THRESH3/mult);
  Serial.println(" ");
  digitalWrite(ledONE, HIGH);
  delay(80);
  digitalWrite(ledTWO, HIGH);
  delay(80);
  digitalWrite(ledTHREE, HIGH);
  delay(80);
  Serial.println("Ready!");
  
}


 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

Thank you to people who helped, and others!!!!
Golan Levin
Claire Hentschker
Zachary Rapaport
Gray Crawford
Daiki Itoh
Lucas Ochoa
Lucy Yu
Jake Scherlis
Imin Yeh
Jesse Klein
Dan Lockton
FRFAF
URO-SURF

Asssignment 8–Arduino Timer

#include

#ifdef __AVR__
#include
#endif

static const int PIN = 3;
static const int NUMPIXELS = 2;
int incomingByte;
char incomingLetter;
Adafruit_NeoPixel pixels = Adafruit_NeoPixel(NUMPIXELS, PIN, NEO_GRB + NEO_KHZ800);
unsigned long interval = 10000;
unsigned long previousMillis = 0;

void setup() {
pixels.begin();
pixels.setPixelColor(0, pixels.Color(0, 0, 0));
pixels.setPixelColor(1, pixels.Color(0, 0, 0));
pixels.show();

}

void loop() {

unsigned long currentMillis = millis();

if (Serial.available() > 0) {
incomingByte = Serial.read();
incomingLetter = (char) incomingByte;
if (incomingLetter == ‘S’) {
while(currentMillis – previousMillis < interval) {
currentMillis = millis();
}

previousMillis = currentMillis;
pixels.setPixelColor(0, pixels.Color(255, 0, 0));
pixels.show();
}

else if (incomingLetter == ‘B’) {
while(currentMillis – previousMillis < interval){
pixels.setPixelColor(1, pixels.Color(0, 255, 0));
pixels.show();
currentMillis = millis();
}
previousMillis = currentMillis;
pixels.setPixelColor(1, pixels.Color(0, 0, 0));
pixels.show();
}

else {
pixels.setPixelColor(0, pixels.Color(0, 0, 0));
pixels.setPixelColor(1, pixels.Color(0, 0, 0));
pixels.show();
}
}
}

Assignment8 – Tabletop Haptic Interaction for AR

I challenged to work with an array of mini vibration motors and wireless local networking. The goal of this project is to create an immersive AR interactivity by generating haptic feedback of the virtual objects interacting with a physical desk. The user feels the vibration of the AR ball bouncing on a table through the haptic device. Every time it bounces, a signal is sent wirelessly to the main PC, which is sent serially to Arduino to activate motors. The future step is to more precisely activate the array of motors by computing the level of vibration based on the locations of the AR objects and the device.

Exercise 8

For this assignment, I wanted to go back to object programming, because that confuses me more than most things, and I tend to be a generally confused person.

I also decided I was going to use Processing, because I’ve been watching a ton of coding train videos and I got tired of seeing suggestions come up on the side of my screen that look super cool but use Processing and had me thinking “that’s probably going to take me a while, I can’t learn how to use that right now.”

WHAT I MADE:

I used a blob detection code that allowed me to track a colour using my computer camera. I then decided I wanted to detect someone’s facial expression by using the shape of their lips. I could then use my millenial/gen X social media skills to create emojis that reflect typical facial expressions, and flash the related emoji at the user. Using my stellar illustrator skills and some photo bits and pieces off of the internet, I collaged a couple of emojis. I then used my a-ma-zing object programming skills to decode the code I was using and integrate an analysis of blob sizes and numbers. This took an insane amount time, because there are just so many objects, but I have it working! It’s super jittery though, especially between the “shock” face and “happy” face, and you have to be at just the right distance from the camera.

(I also started thinking: hey, if I can detect colors, maybe I’ll be able to detect light colors my arduino gives off and have an elementary replacement to my malfunctioning serial control.)

——————————————————–

If anyone wants to test this code out, your skin and lip color will be too similar, and if you mess with the code’s colour threshold you will definitely make it worse, so here’s the easiest fix: Put on some bright lipstick. If you’re reluctant to do so you’re going to have to figure out a way to make your lips change colour. Once you’ve defied the laws of nature (or used someone’s makeup kit) hit the ‘i’ key and click on your lips, then hit the ‘i’ key again.

It’s preset to a bright red, so if you already have a bright red lipstick on, you’re all set.

———————————————————

I sincerely apologize for the weird faces.

code and image files:

use_your_lipstick

Class Notes, 22 March 2018

Nathan Shedroff’s definition of interaction tweaked by me.   (I’m not sure from where I cut-and-pasted this.)

  • Duration: Initiation, Immersion, Conclusion, and Continuation.  (I would include “memory” in this, that’s part of the duration of an event.)
  • Intensity: Reflex, Habit, Engagement
  • Breadth: Products, Services, Brands, Nomenclatures, Channels/Environment/Promotion, and Price
  • Interaction: Passive < > Active < > Interactive  (I would say “aggressive” goes after “interactive.)
  • Triggers: Human Senses, Concepts, and Symbols  (Swastikas in post WWII western countries vs. maps in Japan.)
  • Significance: Meaning, Status, Emotion, Price, and Function (How does an interaction compare to a static physical entity?)