IL(Eric & Young)-Final Documentation

Conception and Design:

My design is mainly inspired by three concepts: Daoism, Narciss, and Flappy Bird. In my early documentation, I mentioned a project called Narciss, an AI whose only purpose is to investigate itself, which inspired me to elaborate on my previous definition of interaction: verbal and physical communication. As Narciss investigates itself in front of the mirror, it inspires audiences to think more and dig into philosophical significance in their daily lives. Narciss inspires me to look into a mental interaction between my project and users and that’s why I combined Daoism with my interaction project. To enter the second stage, users have to hold their fingers on the white circle (Figure 1). By doing so, users have to use both their hands to play. Such interaction reflects the emphasize on balance and the idea that truths are hidden behind illusions in Daoism.

Figure 1

 

As for Flappy Bird part, I changed the character into an old master drawn by illustrator (Figure 2).

Figure 2

The simple yet addictive game reflects the idea of going through obstacles before achieving the Dao in Daoism. Philosophy is not something out here, it can also be combined with silly games.

Fabrication and Production:

At the very beginning, I wanted to hide one force sensor within a book. Once the book is opened by the user, the game begins. However, during my experiment, I found the sensor is not sensitive enough to detect the change of book pages. So I inserted both beneath a picture of Taiji and changed the game rule. Before the user testing session, I didn’t have a “winning stage” for the game. Users could play it as long as they didn’t fail. Yet, the game was so addictive that a lot of people just sticked with my game, playing again and again. Based on my observation, I found most of the users could play to three or four, thus, I used >5 as the condition of winning (Figure 3).

Figure 3

Conclusion:

The goal of my project is to develop interaction into a mental and philosophical sense. By combining Daoism with an interactive game, I think I succeeded. By doing this project, I also realized the importance of statistics and adapting methods based on technical insufficiency in interactive designs. I hope my project has delivered a Humanities major’s message that philosophy is not something out here–it can also be amusing and combined with stupid little games.

Demo:

 

// For sending multiple values from Arduino to Processing


void setup() {
  Serial.begin(9600);
}

void loop() {
  int sensor1 = analogRead(A0);
  int sensor2 = analogRead(A1);
//  int sensor3 = analogRead(A2);

  // keep this format
  Serial.print(sensor1);
  Serial.print(",");  // put comma between sensor values
  Serial.print(sensor2);
  Serial.println();
//  Serial.print(sensor3);
//  Serial.println(); // add linefeed after sending the last sensor value

  // too fast communication might cause some latency in Processing
  // this delay resolves the issue.
  delay(100);
}


// For receiving multiple values from Arduino to Processing

/*
 * Based on the readStringUntil() example by Tom Igoe
 * https://processing.org/reference/libraries/serial/Serial_readStringUntil_.html
 */

import processing.serial.*;

String myString = null;
Serial myPort;


int NUM_OF_VALUES = 2;   /** YOU MUST CHANGE THIS ACCORDING TO YOUR PROJECT **/
int[] sensorValues;      /** this array stores values from Arduino **/

//processing Project

float angle;
float jitter;
float r=500;
float speed=5;
PImage img;

//experimenting

float ballY = 0;
float gravity = .2;
float velocity = 0;
float topValue = random(0, 230);
float bottomValue = topValue+250;
float obstacleX = 500;
boolean alive = true;
int score = 0;




void setup() {
  size(500, 500);
  ellipseMode(CENTER);
  background(200);
  //line(width/3, height/5, width/2, height/5);

  img = loadImage("LaoZi2.png");

  refresh();
  obstacle();
  ballY = 0;
  gravity = .2;
  velocity = 0;
  topValue = random(50, 300);
  bottomValue = topValue+250;
  obstacleX = 500;
  alive = true;
  score = 0;

  setupSerial();
}

void refresh() {
  if (ballY+50> 450) {
    alive = false;
  }
  if (alive==true) {
    velocity = velocity + gravity;
    ballY = ballY+velocity;


    image(img, 150, ballY);

    rect(0, 480, 500, 20);
    fill(0);
    rect(obstacleX, 0, 75, topValue);
    rect(obstacleX, bottomValue, 75, 500);
    obstacleX = obstacleX-2;
  }
}

void jump() {
  if (sensorValues[1]>10 == true) {
    if (velocity > 2  ) {
      velocity = -6;
    }
  }
}  

void obstacle() {
  if (obstacleX+75<0) {
    topValue = random(50, 300);
    bottomValue = topValue+250;
    obstacleX = 500;
  }
}

void collisionCheck() {
  if ((obstacleX>50)&&(obstacleX<175)) {  
    if ((ballY+50>bottomValue)||(ballY<topValue)) {
      alive=false;
    }
  }
  //if (alive==false){
  //  if(sensorValues[1]>10){
  //    setup();
  //  }
  //}
}

void restartScreen() {
  textSize(29);
  fill(0);
  text("YOU FAILED TO MASTER THE DAO.", 10, 200);
  fill(100);
  rect(130, 250, 200, 100);
  fill(0);
  textSize(50); 
  text("RETRY", 150, 320);
  //RETRY();
}

//void mouseClicked(){
//  if (alive==false){
//    if((mouseY>250)&(mouseY<350)&(mouseX>130)&(mouseX<330)){
//      setup();
//    }
//  }
//}
//void RETRY() {
//  if (alive==false) {
//    if (sensorValues[1]>10) {
//      setup();
//    }
//  }
//}
//void mouseClicked(){
//  if (alive==false){
//    if((mouseY>250)&(mouseY<350)&(mouseX>130)&(mouseX<330)){
//      setup();
//    }
//  }
//}

void scoreCheck() {
  if (obstacleX == 60) {
    score++;
  }
  fill(255, 0, 0);
  textSize(40);
  text(score, 450, 50);
  if (score>5) {
    background(200);
    textSize(29);
    fill(0);
    text("The Master Said:", 100, 150);
    text("Dao Produces One", 100, 200);
    text("One Produces Two", 100, 250);
    text("Two Produces Three", 100, 300);
    textSize(27);
    text("Three Produces the Myriad Creatures.", 10, 350);
    //if (sensorValues[1]>10) {
    //  setup();
    //}
  }
}





void draw() {
  updateSerial();
  printArray(sensorValues);

  // use the values like this!
  // sensorValues[0] 

  // add your code

  //
  background(200);

  pushMatrix();
  if (second() % 2 == 0) {
    jitter = random(-0.1, 0.1);
  }
  angle = angle + jitter;
  float c = cos(angle);
  translate(width/2, height/2);
  rotate(c);

  MyTaiji(0, 0, r);

  popMatrix();
  //r = r + speed;

  //if (r>500) {
  //  speed = -5;
  //}
  //if (r<200) {
  //  speed = 5;
  //}

  if (sensorValues[0]>10) {


    background(200);





    if (alive) {
      refresh();
      jump();
      obstacle();
      collisionCheck();
    } else {
      restartScreen();
      //if (sensorValues[1]>10) {
      //  setup();
      //}
      
    }
    scoreCheck();
  }
}



void setupSerial() {
  printArray(Serial.list());
  myPort = new Serial(this, Serial.list()[7], 9600);
  // WARNING!
  // You will definitely get an error here.
  // Change the PORT_INDEX to 0 and try running it again.
  // And then, check the list of the ports,
  // find the port "/dev/cu.usbmodem----" or "/dev/tty.usbmodem----" 
  // and replace PORT_INDEX above with the index number of the port.

  myPort.clear();
  // Throw out the first reading,
  // in case we started reading in the middle of a string from the sender.
  myString = myPort.readStringUntil( 10 );  // 10 = 'n'  Linefeed in ASCII
  myString = null;

  sensorValues = new int[NUM_OF_VALUES];
}



void updateSerial() {
  while (myPort.available() > 0) {
    myString = myPort.readStringUntil( 10 ); // 10 = 'n'  Linefeed in ASCII
    if (myString != null) {
      String[] serialInArray = split(trim(myString), ",");
      if (serialInArray.length == NUM_OF_VALUES) {
        for (int i=0; i<serialInArray.length; i++) {
          sensorValues[i] = int(serialInArray[i]);
        }
      }
    }
  }
}

void MyTaiji(float x, float y, float r ){
  


  fill(255);
  noStroke();
  strokeWeight(10);
  arc(x, y, r, r, HALF_PI, PI+HALF_PI);//Left half white

  fill(0);
  arc(x, y, r, r, -HALF_PI, HALF_PI);//right half black

  fill(0);
  ellipse(x, y-r/4, r/2, r/2);//up left black

  fill(255);
  ellipse(x, y+r/4, r/2, r/2);//down right white

  fill(255);
  ellipse(x, y-r/4, r/10, r/10);//small white ellipse

  fill(0);
  ellipse(x, y+r/4, r/10, r/10);//small black ellipse
  
  
}

IL(Eric & Young)-Recitation 10: Making a Media Controller

For this week’s recitation, I imported a picture of Bagua (八卦) that I downloaded online, as the background and use two potentiometers to control the position of Yingyang(阴阳) on it.

This is the picture of the circuit:

The video of the outcome:

The screenshot of the outcome:

Reflection:

After reading Computer Vision for Artist and Designer, I realize the importance of coding, and the convenience those installations for novice has provided us with. As it is written in the text, “a computer, without additional programming, is unable to answer even the most elementary questions about whether a video stream contains a person or object, or whether an outdoor video scene shows daytime or nighttime, etcetera.” I really didn’t think about the easy dragging and tapping ect. movements we are making everyday  are very hard for those professional computer programmers to realize by coding.

 

 

// IMA NYU Shanghai
// Interaction Lab
// For sending multiple values from Arduino to Processing


void setup() {
  Serial.begin(9600);
}

void loop() {
  int sensor1 = analogRead(A0);
  int sensor2 = analogRead(A1);
//  int sensor3 = analogRead(A2);

  // keep this format
  Serial.print(sensor1);
  Serial.print(",");  // put comma between sensor values
  Serial.print(sensor2);
  Serial.println();
//  Serial.print(sensor3);
//  Serial.println(); // add linefeed after sending the last sensor value

  // too fast communication might cause some latency in Processing
  // this delay resolves the issue.
  delay(100);
}

// IMA NYU Shanghai
// Interaction Lab
// For receiving multiple values from Arduino to Processing

/*
 * Based on the readStringUntil() example by Tom Igoe
 * https://processing.org/reference/libraries/serial/Serial_readStringUntil_.html
 */

import processing.serial.*;

String myString = null;
Serial myPort;


int NUM_OF_VALUES = 2;   /** YOU MUST CHANGE THIS ACCORDING TO YOUR PROJECT **/
int[] sensorValues;      /** this array stores values from Arduino **/

PImage img;

float angle;
float jitter;

void setup() {
  size(500, 407);
  background(0);
  setupSerial();
  
  img = loadImage("BaGua.jpg");
  

}


void draw() {
  background(255);
  image(img,0,0);
  updateSerial();
  printArray(sensorValues);
 

 
  float x = map(sensorValues[0], 0, 1023, 0, width);
  float y = map(sensorValues[1], 0, 1023, 0, height);
 
 
  MyTaiji(x, y);
  

  


  // use the values like this!
  // sensorValues[0] 

  // add your code

  //
}



void setupSerial() {
  printArray(Serial.list());
  myPort = new Serial(this, Serial.list()[ 7 ], 9600);
  // WARNING!
  // You will definitely get an error here.
  // Change the PORT_INDEX to 0 and try running it again.
  // And then, check the list of the ports,
  // find the port "/dev/cu.usbmodem----" or "/dev/tty.usbmodem----" 
  // and replace PORT_INDEX above with the index number of the port.

  myPort.clear();
  // Throw out the first reading,
  // in case we started reading in the middle of a string from the sender.
  myString = myPort.readStringUntil( 10 );  // 10 = 'n'  Linefeed in ASCII
  myString = null;

  sensorValues = new int[NUM_OF_VALUES];
}



void updateSerial() {
  while (myPort.available() > 0) {
    myString = myPort.readStringUntil( 10 ); // 10 = 'n'  Linefeed in ASCII
    if (myString != null) {
      String[] serialInArray = split(trim(myString), ",");
      if (serialInArray.length == NUM_OF_VALUES) {
        for (int i=0; i<serialInArray.length; i++) {
          sensorValues[i] = int(serialInArray[i]);
        }
      }
    }
  }
}

IL(Eric & Young)-Racitation 9: Serial Communication, Le(Gwyneth) Yin

Exercise 1:

To make the sketch, I downloaded the code from the uploaded file and changed the variable number to two. I built the circuit with two potentiometers and connected them to Arduino board.

And first we can see the printed number changing on Processing.

Then, I added a “ellipse” under void draw and made the filled color of it random from 0 to 255(from black to white).

Bingo!

Exercise 2:

Based on the downloaded code and tune example, I first built the circuit.

Connected it to Arduino and Processing, changed NUM_OF_VALUES to two and set values[i] = mouseX. I also set background as (mouseX/2).

And Bingo!

Special Thanks to IMA fellows and Dora, without whom I cannot finish these two exercises.

-----------------------ARDUINO----------------------------
void setup() {
  // put your setup code here, to run once:
Serial.begin(9600);
}

void loop() {
  // put your main code here, to run repeatedly:
  int p1 = analogRead(A0);
  int p2 = analogRead(A1);

    // keep this format
  Serial.print(p1);
  Serial.print(",");  // put comma between sensor values
  Serial.print(p2);
  Serial.println(); // add linefeed after sending the last sensor value

  delay(100);
}
-----------------------PROCESSING----------------------------
import processing.serial.*;

String myString = null;
Serial myPort;


int NUM_OF_VALUES = 2;   /** YOU MUST CHANGE THIS ACCORDING TO YOUR PROJECT **/
int[] sensorValues;      /** this array stores values from Arduino **/


void setup() {
  size(500, 500);
  background(255);
  setupSerial();
  frameRate(10);
}


void draw() {
  updateSerial();
  printArray(sensorValues);

  // use the values like this!
  // sensorValues[0] 
  // add your code

  //
  fill(random(0,255));
  ellipse(sensorValues[0], sensorValues[1], 200, 300);
  
}



void setupSerial() {
  printArray(Serial.list());
  myPort = new Serial(this, Serial.list()[ 31 ], 9600);
  // WARNING!
  // You will definitely get an error here.
  // Change the PORT_INDEX to 0 and try running it again.
  // And then, check the list of the ports,
  // find the port "/dev/cu.usbmodem----" or "/dev/tty.usbmodem----" 
  // and replace PORT_INDEX above with the index number of the port.

  myPort.clear();
  // Throw out the first reading,
  // in case we started reading in the middle of a string from the sender.
  myString = myPort.readStringUntil( 10 );  // 10 = 'n'  Linefeed in ASCII
  myString = null;

  sensorValues = new int[NUM_OF_VALUES];
}



void updateSerial() {
  while (myPort.available() > 0) {
    myString = myPort.readStringUntil( 10 ); // 10 = 'n'  Linefeed in ASCII
    if (myString != null) {
      String[] serialInArray = split(trim(myString), ",");
      if (serialInArray.length == NUM_OF_VALUES) {
        for (int i=0; i<serialInArray.length; i++) {
          sensorValues[i] = int(serialInArray[i]);
        }
      }
    }
  }
}
------------------------------------------------------------EXERCISE TWO----------------------------------------------------------------
-----------------------ARDUINO----------------------------
// IMA NYU Shanghai
// Interaction Lab


/**
  This example is to send multiple values from Processing to Arduino.
  You can find the Processing example file in the same folder which works with this Arduino file.
  Please note that the echo case (when char c is 'e' in the getSerialData function below)
  checks if Arduino is receiving the correct bytes from the Processing sketch
  by sending the values array back to the Processing sketch.
 **/

#define NUM_OF_VALUES 2    /** YOU MUST CHANGE THIS ACCORDING TO YOUR PROJECT **/


/** DO NOT REMOVE THESE **/
int tempValue = 0;
int valueIndex = 0;

/* This is the array of values storing the data from Processing. */
int values[NUM_OF_VALUES];


void setup() {
  Serial.begin(9600);
}

void loop() {
  getSerialData();

  // add your code here
  // use elements in the values array
  // values[0]
  // values[1]

  tone(8, values[0], values[1]);
}


//recieve serial data from Processing
void getSerialData() {
  if (Serial.available()) {
    char c = Serial.read();
    //switch - case checks the value of the variable in the switch function
    //in this case, the char c, then runs one of the cases that fit the value of the variable
    //for more information, visit the reference page: https://www.arduino.cc/en/Reference/SwitchCase
    switch (c) {
      //if the char c from Processing is a number between 0 and 9
      case '0'...'9':
        //save the value of char c to tempValue
        //but simultaneously rearrange the existing values saved in tempValue
        //for the digits received through char c to remain coherent
        //if this does not make sense and would like to know more, send an email to me!
        tempValue = tempValue * 10 + c - '0';
        break;
      //if the char c from Processing is a comma
      //indicating that the following values of char c is for the next element in the values array
      case ',':
        values[valueIndex] = tempValue;
        //reset tempValue value
        tempValue = 0;
        //increment valuesIndex by 1
        valueIndex++;
        break;
      //if the char c from Processing is character 'n'
      //which signals that it is the end of data
      case 'n':
        //save the tempValue
        //this will b the last element in the values array
        values[valueIndex] = tempValue;
        //reset tempValue and valueIndex values
        //to clear out the values array for the next round of readings from Processing
        tempValue = 0;
        valueIndex = 0;
        break;
      //if the char c from Processing is character 'e'
      //it is signalling for the Arduino to send Processing the elements saved in the values array
      //this case is triggered and processed by the echoSerialData function in the Processing sketch
      case 'e': // to echo
        for (int i = 0; i < NUM_OF_VALUES; i++) {
          Serial.print(values[i]);
          if (i < NUM_OF_VALUES - 1) {
            Serial.print(',');
          }
          else {
            Serial.println();
          }
        }
        break;
    }
  }
}
-----------------------PROCESSING----------------------------
// IMA NYU Shanghai
// Interaction Lab


/**
 * This example is to send multiple values from Processing to Arduino.
 * You can find the arduino example file in the same folder which works with this Processing file.
 * Please note that the echoSerialData function asks Arduino to send the data saved in the values array
 * to check if it is receiving the correct bytes.
 **/


import processing.serial.*;

int NUM_OF_VALUES = 2;  /** YOU MUST CHANGE THIS ACCORDING TO YOUR PROJECT **/


Serial myPort;
String myString;

// This is the array of values you might want to send to Arduino.
int values[] = new int[NUM_OF_VALUES];

void setup() {
  size(500, 500);
  background(0);

  printArray(Serial.list());
  myPort = new Serial(this, Serial.list()[ 31 ], 9600);
  // check the list of the ports,
  // find the port "/dev/cu.usbmodem----" or "/dev/tty.usbmodem----" 
  // and replace PORT_INDEX above with the index of the port

  myPort.clear();
  // Throw out the first reading,
  // in case we started reading in the middle of a string from the sender.
  myString = myPort.readStringUntil( 10 );  // 10 = 'n'  Linefeed in ASCII
  myString = null;
}


void draw() {
  background(mouseX/2);

  // changes the values
  //for (int i=0; i<values.length; i++) {
  //  values[i] = mouseX;  /** Feel free to change this!! **/
  //}
  values[0] = mouseX;
  values[1] = mouseY;

  // sends the values to Arduino.
  sendSerialData();

  // This causess the communication to become slow and unstable.
  // You might want to comment this out when everything is ready.
  // The parameter 200 is the frequency of echoing. 
  // The higher this number, the slower the program will be
  // but the higher this number, the more stable it will be.
  echoSerialData(200);
}

void sendSerialData() {
  String data = "";
  for (int i=0; i<values.length; i++) {
    data += values[i];
    //if i is less than the index number of the last element in the values array
    if (i < values.length-1) {
      data += ","; // add splitter character "," between each values element
    } 
    //if it is the last element in the values array
    else {
      data += "n"; // add the end of data character "n"
    }
  }
  //write to Arduino
  myPort.write(data);
}


void echoSerialData(int frequency) {
  //write character 'e' at the given frequency
  //to request Arduino to send back the values array
  if (frameCount % frequency == 0) myPort.write('e');

  String incomingBytes = "";
  while (myPort.available() > 0) {
    //add on all the characters received from the Arduino to the incomingBytes string
    incomingBytes += char(myPort.read());
  }
  //print what Arduino sent back to Processing
  print( incomingBytes );
}

Week 11: Interview Excerpt – Le(Gwyneth) Yin

I conducted this interview with a middle-aged lady in an old shop near Suzhou River. She’s working there, making a sort of hand-made metal net (I’m not sure what that is). I’m not a Northern-Jiangsu Dialect speaker, but the interesting thing happened in this interview is that while I asked her questions in Mandarin, she responded in the Dialect. Surprisingly, I could understand about 80% of what she was talking about. So the conversation went on successfully.

In this interview excerpt, at the beginning, the lady refuses me because she thinks she’s not an expert about Suzhou River. However, I keep asking her questions related to daily life and personal experience, she becomes willing to answer. She thinks all the bridges are similar to each other and they are all good bridges. However, she really appreciates Shanxilu Bridge, which is right next to her shop, because this bridge made her traveling through the river much easier. She also recalls how disgusting the river was in the past, and really appreciates how the government has made this river cleaner. She doesn’t think living next to the river is a good idea, since the wind often makes her feel cold. But she also said she would be loath to leave the shop, since she’s been there for a long time. At the end of the excerpt, she asks not to put this recording on the TV, because she thinks her voice is unpleasant.

All the ambient sound in this interview is naturally there without editing, the music is from the TV. The jolting sound is produced by the tools the lady used to make the metal nets.

IL(Eric & Young)-Final Project Proposal Essay, Le(Gwyneth) Yin

Project Title: Leave Me Alone

Project Statement of Purpose:

Inspired by Eye Contact by Keven O’Connor, I plan to make something similar but more physically interactive. Hopefully, I’ll introduce two players into my game and let them compete so the interactions within this project is not only the one between the user and the equipment but also the one between different users. In this regard, my targeting audience would anyone who is interested in having a fun “competition” with his/her fellows and my intended impact would be a raising friendship between the two users and the users’ awareness of the fun interaction between human and human, human and computer.

Project Plan:

Echoing with my project title, I plan to do this project on my own. To make mine more physically interactive than O’Connor’s, I plan to add two Joystick Modules into my project and let each of them control a ball to get rid of other bouncing balls. And to introduce a competition between the users, if one of the controlled balls contact more bouncing balls than the other within a certain period of time, the former will be recognized as “loose”. In the coming weeks, I will first figure out the drawing in Processing, and then the animation in Processing. After that, I’ll finish the code on Arduino and combine it with Processing. Finally, I’ll finish the circuits, connect it to my computer, and ask people for user testing.

Context and Significance:

In my previous projects I defined interaction based on what is written in The Art of Interactive Design: it is like a conversation: ‘a cyclic process in which two actors alternately listen, think, and speak'” (3). I first defined interaction no more than a conversation between two entities with their judging and thinking involved. However, after taking Interaction Lab for more than half a semester, my definition evolved in the sense that there should also be physical interactions encouraging “body language communication” in the interaction, and this why I plan to add Joystick Modules, different from O’Connor’s original idea. Nevertheless, during the last recitation, Ying shares the idea of cooperation in interaction. Taking off from his idea, I elaborated on my previous definition and thought a competition would add one more layer of interaction in this project (the interaction between two users), and that’s why I plan to use two Joystick Modules instead of one.

 

IL(Eric & Young) – Recitation 8: Final Project Process, Le(Gwyneth) Yin

In my previous group project, I defined interaction based on what is written in The Art of Interactive Design: it is like a conversation: “a cyclic process in which two actors alternately listen, think, and speak” (3). And in IL(Eric & Young) – Research Project, I write that “‘interaction’ involves two participants and these two participants should be able to react to each others’ actions after a process of thinking and judging.” In conclusion, I define interaction as a conversation between two (or more than 2) entities, whose logic thinking and judging are involved in the whole process. Having that in mind, I prototyped a locker that can bring things the user wants for the research project (the locker is able to output certain things after thinking and judging what the user wants) and an interactive Haunted House (the Haunted House has different outputs determined by users’ inputs through the process of computing) for the midterm.

Based on the reading and the research project, you can see the “conversation” involved in the interaction is nothing more than its literal meaning–by talking, or verbal inputs/outputs. However, while making the midterm project, my partner and I were inspired by projects like PomPom Mirror encouraging users to play with it by involving their whole bodies. So, we added some “metaphorical” meanings behind the word “conversation” and defined users’ moving hands, instead of talking, as inputs in this “conversation”/interaction.

The project that aligns with my definition is PomPom Mirror. As I’ve mentioned, this project encourages users to interact with it by using their whole bodies–since the “Mirror” would show different actions that the users’ bodies make. Within this process, as the users make certain poses, those poses become the input (like actor 1 speaking to actor two in a conversation). And as the project computing those inputs, its thinking and judging in this “conversation”/interaction are involved. Finally, as the “Mirror” shows different poses of the users, those shown poses become the output (as actor 2 responding to actor 1 in a conversation).

The project that differs from my definition is Narciss – AI whose only purpose is to investigate itself. In this project, there is no direct “conversation”/interaction between the user and the AI–neither verbally or physically. Also, there’s only one actor, that is the AI itself involved in this project so I don’t think there is a “conversation”/interaction going on there.

As for a new definition of interaction, I’d say I’m actually inspired by Narciss – AI whose only purpose is to investigate itself, which is mentioned as different from my old definition of interaction above. Though there’s no direct verbal or physical communication going on between Narciss and the users, I would say this project is creating a mental communication between machines and users. As Narciss studying and investigating itself, its actions has inspired human beings (for example, me) to think more and investigate more in our daily life–instead of following the daily routine. Nevertheless, though Narciss itself is the only actor involved in this case, it’s still a “conversation”/interaction since it is logically reflecting and thinking about itself. In conclusion, my new definition of interaction is, a conversation involving one or more entities, who are logically thinking, reflecting, and inspiring others/itself.

 

 

 

Week 9: Fieldnotes – Le (Gwyneth) Yin

Date: November 4, 2018

Time: 2:00pm

Location: Waibaidu Bridge (外白渡桥)

It was a warm sunny afternoon. After getting off the metro at East Nanjing Road station, I went straight to Waibaidu Bridge, ignoring all the “seductive” snacks I passed by on my way to the bridge. Once I arrived there, I stood at the middle of the bridge, got my notebook out of my bag, and started taking notes.

 

The first voice that came into my ear was, unsurprisingly, the traffic drone. Most of the sound was made by cars, however, every thirty seconds or so, there would be a bus passing by, making a sound that was much thicker than the cars. Since there are traffic lights at both two ends of the bridge, after around three minutes, the traffic drone faded away and the passengers’ sound became clear.

Since the pathway on the bridge is made of wood, passengers’ footsteps made great sounds on it. It was interesting to observe the passengers’ shoes—I was really excited when I saw a woman wearing high heels walking by, because her shoes would make a unique sound that is different from everyone else’s flats or sneakers. At the same time, I was disappointed when I saw a group of people walking passed by without making any footsteps sound due to their quality sneakers. While observing passengers’ shoes, I heard someone saying “向左一点,再向左一点, 对, 很好, 看着你老公, 头抬起来一点, 再抬高一点(Turn to the left a little bit, turn a bit more, yes, good, look at your husband, look up a little bit, look up a little bit more)…” I turned round to see what’s going on and found out it was a photographer “directing” a newly married couple for their “wedding photos”. Before hearing the photographer finally got satisfied with the couple’s poses, the traffic drone started again, covering the photographer’s voice.

 

This time, I wasn’t overwhelmed by the traffic drone anymore. Since I’d started paying attention to the passengers’ shoes, I realized there were more passengers when there was traffic drone—when it’s green light for the cars, it was also the green light for the passengers who were on the other side of the street. Instead of being filled up with traffic drone in my ears, I especially paid attention to capture those passengers’ conversations—still, I failed, because most of them were speaking dialects that I don’t understand. However, I was lit up when two young women speaking Mandarin passed by: “我们酒店订在哪的啊, 那边附近都有什么好玩的(Where is the hotel that we booked, is there anything interesting around it) ?” It seems they are traveling in Shanghai. Without acknowledging it, the traffic drone was gone, but I couldn’t hear that photographer’s sound anymore. I looked back, only to find they were gone.

The brief escape from the traffic drone brought the passengers’ footsteps back to me. This time, I noticed something different. A man with a suitcase passed by. The sound of his suitcase (caused by its wheels), somehow resonated with his footsteps—it sounded as if the suitcase was producing the “melody” and his footsteps were the beats. I also noticed an elder man walking with a wheelchair. When he was passing by, the wheelchair was also like the “melody” and his footsteps were like the beats—but this rhythm was obviously different from the man with his suitcase. As I’ve mentioned above, there were fewer people when the traffic light was red. And among those people who were still on the bridge when the light was red, the majority was taking photos. Standing in front of me are three women taking selfies separately. If I didn’t pay attention to their facial expression, I would think they were taking pictures of me—a weird person standing at the middle of the bridge while writing something on her notebook. However, their smiling face gave them away. It is also interesting to point out that most selfie people don’t make any voice whereas people taking photos for their companies had a lot of conversations with his/her companies. Standing on my right was a couple in their thirties. The woman was taking photos for the man: “你刚刚那张就很好, 嗯, 这样也很好, 不错(The photo I just took for you is really good, yes, this one is good too, great)…” Comparing this woman’s tone to the photographer’s (the one taking wedding photos for a couple), I just really wanted to laugh. When I was thinking about this amusing contrast, a shrill whistle from my right frightened me. I looked to my right: some passengers were trying to go through a red light so a security guard used the whistle as a signal to stop them from doing that. But the traffic light turned green right after the whistle and those passengers crossed the street—they got onto this bridge with the traffic drone—left me thinking, was it really necessary for the security guard to care about those two seconds before the lights turned green.

 

The traffic drone was similar to the two previous ones and those passengers were also speaking dialects/languages that I can’t understand. I stood at the middle of the bridge, seeing people come and leave—just like the water in Suzhou River, flowing beneath Waibaidu Bridge. Suzhou River! I tried my best to listen to the river’s sound, but what came into my ears, still, was the sound of traffic, sound of footsteps, sound of chatting…Perhaps all the mentioned sounds have already become a part of Suzhou River. “Whooooooooo—“ A bus passed by with a strangely high speed, the extremely thick drone made by it scared me. I was leaning against the railing and I moved my back away immediately, perhaps it was a signal for me to head for my next spot.

 

IL(Eric & Young) – Recitation 7: Processing Animation, Le(Gwyneth) Yin

Recitation: Processing Basics

Date: October 26th 2018

Documented by: Le(Gwyneth) Yin

Instructor: Eric Parren & Younghyun Chung

Aim: Use Processing to animate the sketch that I made during last recitation.

In my last documentation, I complained about the difficulty of using processing to draw quads. However, after taking last week’s two lectures, I realized I was so stupid. I need to apologize to Processing because I did’t know its function of Rotate and Mousepressed can make drawing quads such convenient. So, to make up the “sorrow” I had while drawing those quads, I used Mousepressed function to draw a quad(which is rotated from a rectangle) each time I click the mouse.

Nevertheless, the colors of these rectangles are restricted by color (random(255),0,0), and are transparent. By doing this, the color of these rectangles can fit into the keynote color of the canvas. Also, the size of the rectangles are restricted by l = random (10, width/2). By doing this, there would be rectangles of different size on the canvas and the patterns become more cyber and artistic.

What’s more, after filling the canvas with rectangles of different sizes, you can simply tap the “c” on the keyboard to clean all of them and start a new canvas.

Homework(code attached after the recitation code):

Step 1:

To start with, I made the canvas size (600,600), and draw a circle with strokeWeight (20) and a 200 long radius at the center of the canvas.

Step 2:

Then, I made the circle periodically expands and contracts by giving conditions to the change of its radius. if the radius is shorter than 200, it will be lengthened; if the radius is longer than 400, it will be shortened.

Step 3:

To change the outline color smoothly, I made the red part of its color variable and inserted colorMode function. So the changing of the red part is restricted within (0,255) and once it’s smaller than zero, it will increase, once it’s bigger than 255, it will decrease.

Step 4:

Finally, I added the keyPressed function into this animation. Once you click “W”, the circle will move upward, once you click “S”, the circle will move downward, and “A” to the left, “D” to the right. The movement of this circle is restricted within the canvas as I wrote if y/x is >400/<200, y/x would =400/=200.

 

 

 

 

 

 

 

//Recitation work starts from here~

void setup() {
size(520,770);
background(#0C3364);
fill(#6C1414);
stroke(#6C1414);
rect(10,10,250,250);

fill(#5A1010);
stroke(#5A1010);
rect(260,10,250,250);

fill(#6F0909);
stroke(#6F0909);
rect(10,260,250,250);

fill(#520202);
stroke(#520202);
rect(260,260,250,250);

fill(#760202);
stroke(#760202);
rect(10,510,250,250);

fill(#400808);
stroke(#400808);
rect(260,510,250,250);
}

void draw() {

}

void mousePressed() {
  color c = color(random(255),0,0);
  strokeWeight(2);
  translate(mouseX,mouseY); 
  rotate(PI/4.0);
  fill(c,30);
  float l = random (10, width/2);
  stroke(#FACFB6);
  rect(0,0, l,l); 
  
}

void keyPressed() {
  
  
  if (key=='C' || key == 'c'){
    background(#0C3364);
    fill(#6C1414);
stroke(#6C1414);
rect(10,10,250,250);

fill(#5A1010);
stroke(#5A1010);
rect(260,10,250,250);

fill(#6F0909);
stroke(#6F0909);
rect(10,260,250,250);

fill(#520202);
stroke(#520202);
rect(260,260,250,250);

fill(#760202);
stroke(#760202);
rect(10,510,250,250);

fill(#400808);
stroke(#400808);
rect(260,510,250,250);
  }
  }


//recitationHomework

float r = 200;
float speed = 5;
float c=0;
float speed2 = 1;
float x=300;
float y=300;

void setup() {
  size (600, 600);
  frameRate(40);
}

void draw() {
  background(255);

  noFill();
  strokeWeight(20);



  colorMode(HSB);
  c = c + speed2;
  stroke(c, 255, 255);
  if (c>255) {
    speed2=-1;
  }
  if (c<0) {
    speed2=1;
  }


  ellipse(x, y, r, r );


  r = r + speed;

  if (r>400) {
    speed = -5;
  }
  if (r<200) {
    speed = 5;
  }


  if (y<200) {
    y=200;
  }
  if (y>400) {
    y=400;
  }
  if (x<200) {
    x=200;
  }
  if (x>400) {
    x=400;
  }
}

void keyPressed() {
  if (key=='W' || key == 'w') {
    y=y-5;
  }
  if (key=='S' || key == 's') {
    y=y+5;
  }
  if (key=='A'|| key=='a') {
    x=x-5;
  }
  if (key=='D'||key=='d') {
    x=x+5;
  }
}


IL(Eric & Young) – Recitation 6: Processing Basics, Le(Gwyneth) Yin

Recitation: Processing Basics

Date: October 26th 2018

Documented by: Le(Gwyneth) Yin

Instructor: Eric Parren & Younghyun Chung

Aim: Draw an image in Processing based on any image that inspires me.

I chose this image because I like the way the artist divides the canvas into small rectangles which are all filled with symmetric patterns. Though all those patterns in different rectangles are different from each other, they somehow echo with each other and reinforced the beauty of repetition and symmetric in the canvas. I also like the colors the artist chose. Though the color of each rectangle is slightly different, the transition from one to the other is so gentle that makes the whole canvas harmonious.

To be honest, I chose it also because as a beginner for Processing, I think it should be easy to create symmetric patterns in Processing.

I basically wanted to draw something sharing the same idea of symmetric with the canvas. However, I simplified those complicated patterns into lines and quads. By simplifying those patterns, I wanted to create something more cyber and less exotic. To achieve my goal, I first divided my canvas into six rectangles and filled them with lines. While adding lines into the canvas, I didn’t restrict each of them staying in the same rectangle. Instead, I let some of them cross the boarder to make the separated rectangles look more like a whole together.

 

 

Comparing my work to the original canvas, as I’ve mentioned above, I simplified those complicated patterns. The biggest inspiration the original canvas gave me is the beauty of symmetric and I used a lot of that concept in my own work. I also used different but similar colors in each rectangles. By doing this, I was hoping the keynote of the color in my work is wine red which makes the whole canvas harmonious and reminds audiences of the autumn.

By using Processing, I think it sure provides me with convenience of dividing the canvas, drawing straight lines, and choosing colors precisely. However, when I was adding quads into my work, I was driven crazy by the calculation a lot of times–it would be much easier to draw the quads by pen than calculating the position of each vertex.

 

 

 

size(520,770);
background(#0C3364);

fill(#6C1414);
stroke(#6C1414);
rect(10,10,250,250);

fill(#5A1010);
stroke(#5A1010);
rect(260,10,250,250);

fill(#6F0909);
stroke(#6F0909);
rect(10,260,250,250);

fill(#520202);
stroke(#520202);
rect(260,260,250,250);

fill(#760202);
stroke(#760202);
rect(10,510,250,250);

fill(#400808);
stroke(#400808);
rect(260,510,250,250);

stroke(#FACFB6);
strokeWeight(3);
line(10,93,260,93);
line(10,176,260,176);
line(135,135,510,135);
line(10,385,385,385);
line(135,635,510,635);
line(260,426,510,426);
line(260,343,510,343);
line(10,593,260,593);
line(10,676,260,676);

//squads on the upper right corner
strokeWeight(2);
quad(322.5,20,342.5,55,322.5,75,302.5,55);
quad(322.5,75,332.5,95,322.5,130,312.5,95);

stroke(#F0A894);
quad(385,20,395,55,385,75,375,55);
quad(385,75,405,95,385,130,365,95);

stroke(#FACFB6);
quad(447.5,20,467.5,55,447.5,75,427.5,55);
quad(447.5,75,457.5,95,447.5,130,437.5,95);

//squats on the down right corner
quad(322.5,645,332.5,680,322.5,700,312.5,680);
quad(322.5,700,342.5,720,322.5,755,302.5,720);

stroke(#F0A894);
quad(385,645,405,680,385,700,365,680);
quad(385,700,395,720,385,755,375,720);

stroke(#FACFB6);
quad(447.5,645,457.5,680,447.5,700,437.5,680);
quad(447.5,700,467.5,720,447.5,755,427.5,720);

//middle left
line(10,260,75,385);
line(10,510,75,385);

line(260,260,200,385);
line(260,510,200,385);


stroke(#F0A894);
line(135,260,200,385);
line(200,385,135,510);
line(135,510,75,385);
line(75,385,135,260);

stroke(#FACFB6);
quad(135,312.5,155,342.5,135,372.5,115,342.5);
quad(135,397.5,155,427.5,135,457.5,115,427.5);

IL(Eric & Young) – Midterm Project, Le(Gwyneth) Yin

I. Project Name: Haunted House

When hearing “Haunted House”, what comes to people’s mind can be a room or house in the theme park which contains a lot of horror/scary elements. And those horror/scary elements are exactly what we would like to put into our project–however, our project is much smaller than those Haunted Houses in theme parks. We would say our project is a miniature of the Haunted House so we give it the name, “Haunted House”, which suggests the horror theme it demonstrates straightforward.

II. Project Statement of Purpose

  • Chief Purpose of our project:

The purpose of our project is basically an elaboration of the coming Halloween. To achieve this purpose, we aim at making something scary, funny, and interactive for our users—we want to make it distinct from the bloody but boring decorations you can see in the cafeteria (figure 1) and cafe every day. And to make our project “interactive”, we don’t want it to provide users only with “lowly-interactive” experiences like typing the keyboards or pushing the buttons–instead, we want our users to engage with our projects with their whole bodies and keep being curious about our project.

figure 1

  • What it addresses/resolves and why:

To make our project fit the Halloween theme, we used a ghost mask, two pumpkins (one 3D printed), and some other horror decorations. And to make our project interactive, we use two ultrasonic ranging sensors to control LEDs, speakers, and a stepper motor, which are all combined with our Halloween decorations.

In conclusion, the issue our project intends to address is how to combine entertaining Halloween theme with highly interactive technologies; to address the issue we try to use the ultrasonic ranging sensor to receive the analog input and look for the certain code. We are also inspired by a lot of readings to make our projects more user-friendly.

III. Literature/Art Review:

In The Art of Interactive Designit is mentioned that interaction is “a cyclic process in which two actors alternately listen, think, and speak” (3). Inspired by this quotation, we want to build a project that can really “listen” to and “think” about users’ inputs–not just keep giving the same output all the time. So, we use two ultrasonic ranging sensors, together with coding, to make our project give different responses when users give different inputs (different distance to the sensors in this case).

In The Future of Design: when you come to a fork, take itit is mentioned that “the design had to be informed by…an appreciation of the limits and capabilities of the ordinary people who were expected to master the devices.” From this quotation, we learned that, to make our project user-friendly, we should make it clear to users that how should they play with our project. Taking that into consideration, we stick two pictures of hand next to our sensors (figure 2). 

figure 2

We are also inspired by two wonderful artists–Daniel Rozin and Matthias Dörfelt. When professors showed the former’s PomPom Mirror in the lecture at the very beginning of this semester, I was somehow “enlightened” by it–the best interaction between humans and machines should never be restricted in cliched actions like typing the keyboard or pushing a button–instead, it should encourage users to move their whole bodies and be in motion! That’s the essential inspiration for us to include two ultrasonic ranging sensors at two sides of our project. As for the latter, his Face Trade – Art vending machine that trades mugshots for “free” portraits inspired us that arts shouldn’t be something unapproachable, instead, it should be something funny, close-to-life, and entertaining. However, the one inspired us most is the Student Government in NYU Shanghai. It is their “exquisite” decorations for the Cafe and Cafeteria that gave us the initial idea–maybe a interactive project relating to Halloween would be a funny, entertaining, and functional.

IV. Project Description:

As is mentioned above, after doing the readings and appreciating those former works by wonderful artists, we decided to use ultrasonic ranging sensors together with LEDs, Speakers, a stepper motor, and a 3D printed pumpkin to make our goal of making something entertaining and interactive become true (as is illustrated by figure 3). We especially chose the ultrasonic ranging sensor because we both played with this one in the sensor recitation and find the way it gives different outputs based on distances is genuinely interactive and interesting.

figure 3

After finishing the basic circuit, we stuck it into a box and combined it with other Halloween themed decorations (as is illustrated by figure 4). You can find the “hands” in figure 4 is different from those in figure 2. We changed this because in

figure 4

the user testing session, a lot of users suggested that it would be better if the “hands” are also Halloween themed. We also added the slogan “I dare you to come a little closer” because Eric said those two hands pictures used in the user testing session are too suggestive. We combined those two suggestions together and used the skull hands with the slogan. The idea of using “signs” is also related to the idea of user-friendly in The Future of Design: when you come to a fork, take itwhich has been mentioned in Section III.

V. Project Significance:

I think the significance of our project is that it can provide interactive Halloween themed entertainments to people of any age (perhaps not for elder people with a weak heart), especially to whom really loves Halloween (for example, me). I think our project is of especial value for those Halloween lovers in a sense that it can provide them with a different Halloween experience due to its interactivity and technology involving. We expect our project to be attractive to its users both aesthetically and technologically, and can invoke the users’ curiosity for it. To achieve this expectation, we used a 3D printed pumpkin as a “lampshade” for LEDs, included other funny decorations, and left all the cables visible on purpose to demonstrate a “cyber” theme.

VI. Project Design & Production:

  • Video before user testing session:

  • Video after user testing session:

As I’ve mentioned and illustrated in former sections, we used two ultrasonic ranging sensors to control LEDs, Speakers, and a stepper motor; we also used a 3D printed pumpkin as a “lampshade ” for LEDs. We especially use ultrasonic ranging sensors because we both used this sensor in a recitation and really think the way it detects distances is highly interactive and interesting. Also, we used LEDs because certain colors of lights (for example, red, which is what we used), can evoke users certain feelings (horror in our case); and we used speakers to play the horror tune composed by Beethoven to create the scary Halloween atmosphere. Nevertheless, we added a stepper motor behind the mask after the user testing session because, in the user testing session, a lot of users told us that they think the project would be scarier if the mask can shake or move. As is mentioned in Section V, we also changed the “hands sign” on our project and added a slogan to make it scarier and less suggestive. For the code, I basically adapted the code for ultrasonic ranging sensors and added something Annabel found online (the code for the Beethoven tune). I’m clearly acknowledged of the flaws in the code–since it’s for another project, something unnecessary is in it. However, I really tried to simplify it but every time I deleted some parts I thought unnecessary, the code would stop working. If I had more time, I would definitely try my best to fix those coding flaws, but for now, a code with flaws is better than a code that doesn’t work.

VII. Conclusions:

Again, the goal of our project is providing users with a new highly interactive and entertaining Halloween experience. I think we succeeded in the sense that our project indeed encouraged its users to use their two hands to play with it (in fact, not just hands, since ultrasonic ranging sensors can detect distances, even if you just walk through it without noticing it, it will still give you responses as you are already close enough). The problems we haven’t solved yet are 1. those flaws in the code (but it works anyway!); 2. the support in our hollow pumpkin. As is mentioned in former sections, we wanted to put LEDs inside the 3D printed pumpkin. However, since the pumpkin is hollow, it needs a support inside it, which made our LEDs unable to be inside. Though we didn’t fix this problem in a smart technological way, we fixed it by melting the support brutally. Overall, I think we’ve done a good job–I was really happy when I saw users really loved to play with our project in the user testing session!

 

//this constant won't change. It's the pin number of the sensor's output:
const int pingPin = 7;

int speakerPin = 9;

int length = 15; // the number of notes
char notes[] = "gggx fffd   "; // a space represents a rest
int beats[] = { 1, 1, 1, 4,2, 1, 1, 1, 4,2, 1, 1, 1, 4, 1, 1, 1,4 };
int tempo = 300;

void playTone(int tone, int duration) {
for (long i = 0; i < duration * 1000L; i += tone * 2) {
digitalWrite(speakerPin, HIGH);
delayMicroseconds(tone);
digitalWrite(speakerPin, LOW);
delayMicroseconds(tone);
}
}

void playNote(char note, int duration) {
char names[] = { 'c', 'd', 'e', 'x', 'f', 'g', 'a', 'b', 'C' };
int tones[] = { 1915, 1700, 1519, 1450, 1432, 1275, 1136, 1014, 956 };

// play the tone corresponding to the note name
for (int i = 0; i < 8; i++) {
if (names[i] == note) {
playTone(tones[i], duration);
}
}
}

#include <Stepper.h>

const int stepsPerRevolution = 200;  // change this to fit the number of steps per revolution
// for your motor

// initialize the stepper library on pins 8 through 11:
Stepper myStepper(stepsPerRevolution, 8, 9, 10, 11);



void setup() {
  // initialize serial communication:
  Serial.begin(9600);
  pinMode(13, OUTPUT);
  pinMode(12, OUTPUT);
  pinMode(speakerPin, OUTPUT);
   // set the speed at 60 rpm:
  myStepper.setSpeed(60);
  // initialize the serial port:
  Serial.begin(9600);

  
 
   
  
}

void loop() {
  // establish variables for duration of the ping, and the distance result
  // in inches and centimeters:
  long duration, inches, cm;

  // The PING))) is triggered by a HIGH pulse of 2 or more microseconds.
  // Give a short LOW pulse beforehand to ensure a clean HIGH pulse:
  pinMode(pingPin, OUTPUT);
  digitalWrite(pingPin, LOW);
  delayMicroseconds(2);
  digitalWrite(pingPin, HIGH);
  delayMicroseconds(5);
  digitalWrite(pingPin, LOW);

  // The same pin is used to read the signal from the PING))): a HIGH pulse
  // whose duration is the time (in microseconds) from the sending of the ping
  // to the reception of its echo off of an object.
  pinMode(pingPin, INPUT);
  duration = pulseIn(pingPin, HIGH);



  // convert the time into a distance
  inches = microsecondsToInches(duration);
  cm = microsecondsToCentimeters(duration);

  Serial.print(inches);
  Serial.print("in, ");
  Serial.print(cm);
  Serial.print("cm");
  Serial.println();





      
if( inches > 15) {
    digitalWrite(13, LOW);
    digitalWrite(12, LOW);
    
    
   
  } else if ( inches > 5) {
    digitalWrite(12, LOW); 
    digitalWrite(13, HIGH);
     // step one revolution  in one direction:
  Serial.println("clockwise");
  myStepper.step(stepsPerRevolution);
  delay(200);

  // step one revolution in the other direction:
  Serial.println("counterclockwise");
  myStepper.step(-stepsPerRevolution);
  delay(200);
    


    
    
  }   
  else if ( inches < 5) {
    digitalWrite(12, HIGH); 
    digitalWrite(13, LOW);
    // get the sensor value
  
    
   for (int i = 0; i < length; i++) {
if (notes[i] == ' ') {
delay(beats[i] * tempo); // rest
} else {
playNote(notes[i], beats[i] * tempo);
}

// pause between notes
delay(tempo / 2); 
}

   
  }   

     
 
   

  delay(100);
}

long microsecondsToInches(long microseconds) {
  // According to Parallax's datasheet for the PING))), there are 73.746
  // microseconds per inch (i.e. sound travels at 1130 feet per second).
  // This gives the distance travelled by the ping, outbound and return,
  // so we divide by 2 to get the distance of the obstacle.
  // See: http://www.parallax.com/dl/docs/prod/acc/28015-PING-v1.3.pdf
  return microseconds / 74 / 2;
}

long microsecondsToCentimeters(long microseconds) {
  // The speed of sound is 340 m/s or 29 microseconds per centimeter.
  // The ping travels out and back, so to find the distance of the object we
  // take half of the distance travelled.
  return microseconds / 29 / 2;
}