Final Documentation

By: Kelvin Liu
Partner: Fernando
Professor: Dan

For our final project, Fernando and I bounced around many ideas. At first, we wanted to extened the sound visualizer we created for our midterm into physical space. This would be done by creating an array of physical columns, each representing a different frequency band, and the heights of each column would react to changes in environmental sound. This was an ambitious goal, and while feasible, we decided to pivot after prototyping a few linear actuators due to time and resource contraints.

The next idea we had was to add a new layer of interaction to the existing visualizer via a Leap Motion. The device enables a great way to interact with computers using gestures instead of the typical keyboard/mouse configuration. Our initial plan was to replace the keyboard commands that controlled color and other visual aspects of the visualizer with Leap Motion gestures. In addition, we planned to use an arduino-based speaker react to different gestures, thereby allowing the user to not only control how the visualizer looks, but also control what the visualizer visualizes.

We were able to finish this idea, and the result was as expected. However, we decided to pivot once again for a variety of reasons. Creating a project from a drum machine/sequencer was far more interested for the both of us, and we ended up learning a lot more. We used Minim instead of Processing’s sound library, and leveraged the nice programming interfaces the library provides. The end result was a 32 step sequencer pre-loaded with 808 hi-hats, snare, and kick samples and the ability to record three custom samples. Additionally, we used an arduino-based controller to control the recording as well as the playback tempo. The controller is optional, as there are key commands the accomplish the same things.

the initial recording screen

the initial recording screen

a really quick beat

a really quick beat

The division of work was pretty straightforward. We both worked on the processing code; Fernando dealt with adapting Minim’s example drum machine to support more steps and instruments, and I worked on serial communication as well as the sample recording logic. On the Arduino side, Fernando built to physical circuit, while I wrote the code to send over the potentiometer and button values to processing.

// CustomDrumMachine2Arduino.ino -- a controller for a drum machine

#define R1_BUTTON 7
#define R2_BUTTON 9
#define R3_BUTTON 10
#define TEMPO_POT A0

// saved sensor values
bool r1, r2, r3;
int tempo;

void setup() {
  // setup serial communication
  Serial.begin(9600);
  // set buttons as input
  pinMode(R1_BUTTON, INPUT);
  pinMode(R2_BUTTON, INPUT);
  pinMode(R3_BUTTON, INPUT);
  // set potentiometers as input
  pinMode(TEMPO_POT, INPUT);
}

void loop() {
  // check if buttons are pressed
  bool r1 = digitalRead(R1_BUTTON);
  bool r2 = digitalRead(R2_BUTTON);
  bool r3 = digitalRead(R3_BUTTON);
  // read potentiometer values
  int tempo = analogRead(TEMPO_POT);
  // send over serial
  Serial.print(r1);
  Serial.print(",");
  Serial.print(r2);
  Serial.print(",");
  Serial.print(r3);
  Serial.print(",");
  Serial.print(tempo);
  Serial.println();
  // loop delay
  delay(50);
}

// CustomDrumMachine2.pde -- a drum machine that supports dynamic samples

import processing.opengl.*;
import processing.serial.*;
import ddf.minim.*;
import ddf.minim.ugens.*;

// colors
color red = color(255, 0, 0);
color green = color(0, 255, 0);
color blue = color(0, 0, 255);

// serial communication
boolean SERIAL = false;
int NUM_OF_VALUES = 4;
int[] sensorValues = new int[NUM_OF_VALUES];
Serial myPort;
String myString;

// minim
Minim minim;
// audio io
AudioInput in;
AudioOutput out;
// recorders
AudioRecorder rec1;
AudioRecorder rec2;
AudioRecorder rec3;
// samples
Sampler kick;
Sampler snare;
Sampler hat;
Sampler cust1;
Sampler cust2;
Sampler cust3;

// logic
String status = "Not recording any samples.";
ArrayList<Rect> buttons = new ArrayList<Rect>();
// which screen to display
boolean firstScreen = true;
// values
boolean recorded = false;
boolean recorded2 = false;
boolean recorded3 = false;
int counter = 100;
int a = 25;
int b = 32;
int bpm = 120;
int beat = 0;
// bool arrays
boolean[] hatRow = new boolean[b];
boolean[] snareRow = new boolean[b];
boolean[] kickRow = new boolean[b];
boolean[] cust1Row = new boolean[b];
boolean[] cust2Row = new boolean[b];
boolean[] cust3Row = new boolean[b];

// classes
class Tick implements Instrument {
  void noteOn(float dur) {
    if (hatRow[beat])
      hat.trigger();
    if (snareRow[beat])
      snare.trigger();
    if (kickRow[beat])
      kick.trigger();
    if (cust1Row[beat])
      cust1.trigger();
    if (cust2Row[beat])
      cust2.trigger();
    if (cust3Row[beat])
      cust3.trigger();
  }

  void noteOff() {
    // calculate next beat
    beat = (beat + 1)%b;
    // trigger next note
    out.setTempo(bpm);
    out.playNote(0, 0.25f, this);
  }
}

// simple class for drawing the gui
class Rect {
  int x, y, w, h;
  boolean[] steps;
  int stepId;

  public Rect(int _x, int _y, boolean[] _steps, int _id) {
    x = _x;
    y = _y;
    w = 14;
    h = 30;
    steps = _steps;
    stepId = _id;
  }

  public void draw() {
    fill((steps[stepId]) ? green : red);
    rect(x, y, w, h);
  }

  public void mousePressed() {
    if ( mouseX >= x && mouseX <= x+w && mouseY >= y && mouseY <= y+h )
      steps[stepId] = !steps[stepId];
  }
}

// sets up serial communication
void setupSerial() {
  // create Serial object
  myPort = new Serial(this, Serial.list()[2], 9600);
  // throw away potentially garbage initial reading
  myPort.clear();
  myString = myPort.readStringUntil(10);
  myString = null;
}

// reads values from serial and plaecs them in sensorValues
void updateSerial() {
  while (myPort.available() > 0) {
    myString = myPort.readStringUntil(10);
    if (myString != null) {
      String[] serialInArray = split(trim(myString), ",");
      if (serialInArray.length == NUM_OF_VALUES)
        for (int i=0; i<serialInArray.length; i++)
          sensorValues[i] = int(serialInArray[i]);
    }
  }
}

void setup() {
  // basics
  size(900, 400);
  background(0);
  textFont(createFont("Arial", 20));
  // conditionally enable serial communication
  if (SERIAL)
    setupSerial();
  // create minim object
  minim = new Minim(this);
  in = minim.getLineIn();
  out = minim.getLineOut();
  // recorders for custom samples
  rec1 = minim.createRecorder(in, "data/custom1.wav");
  rec2 = minim.createRecorder(in, "data/custom2.wav");
  rec3 = minim.createRecorder(in, "data/custom3.wav");
  // load all samples
  hat = new Sampler("808HH.wav", 4, minim);
  snare = new Sampler("808SD.wav", 4, minim);
  kick = new Sampler("808BD.wav", 4, minim);
  cust1 = new Sampler("custom1.wav", 4, minim);
  cust2 = new Sampler("custom2.wav", 4, minim);
  cust3 = new Sampler("custom3.wav", 4, minim);
  // patch samplers to output
  hat.patch(out);
  snare.patch(out);
  kick.patch(out);
  cust1.patch(out);
  cust2.patch(out);
  cust3.patch(out);
  // create buttons
  for (int i = 0; i < b; i++) {
    int x = 24*i + 10;
    buttons.add(new Rect(x, 100, hatRow, i));
    buttons.add(new Rect(x, 150, snareRow, i));
    buttons.add(new Rect(x, 200, kickRow, i));
    buttons.add(new Rect(x, 250, cust1Row, i));
    buttons.add(new Rect(x, 300, cust2Row, i));
    buttons.add(new Rect(x, 350, cust3Row, i));
  }
  // prep sequencer
  out.setTempo(bpm);
  out.playNote(0, 0.25f, new Tick());
  out.mute();
}

void draw() {
  // update serial
  if (SERIAL)
    updateSerial();
  // draw ui
  if (firstScreen)
    drawRecorder();
  else
    drawSequencer();
  // display status text
  fill(255);
  text(status, 5, 15);
  String screenName = (firstScreen) ? "sequencer" : "recorder";
  text("Press spacebar to toggle the " + screenName, 5, 35);
}

void drawRecorder() {
  background(0);
  stroke(255);
  // draw the waveforms
  for (int i = 0; i < in.bufferSize() - 1; i++) {
    line(i, 50  + in.left.get(i)*50, i+1, 50  + in.left.get(i+1)*50);
    line(i, 150 + in.right.get(i)*50, i+1, 150 + in.right.get(i+1)*50);
  }

  // start recording
  if (!rec1.isRecording() && sensorValues[0] == 1) {
    rec1.beginRecord();
    status = "Recording sample 1.";
    println(status);
  } else if (!rec2.isRecording() && sensorValues[1] == 1) {
    rec2.beginRecord();
    status = "Recording sample 2.";
    println(status);
  } else if (!rec3.isRecording() && sensorValues[2] == 1) {
    rec3.beginRecord();
    status = "Recording sample 3.";
    println(status);
  }

  // stop recording
  if (rec1.isRecording() && sensorValues[0] == 0) {
    rec1.endRecord();
    rec1.save();
    rec1 = minim.createRecorder(in, "data/custom1.wav");
    cust1 = new Sampler("custom1.wav", 4, minim);
    cust1.patch(out);
    status = "Recorded sample 1!";
    println(status);
  } else if (rec2.isRecording() && sensorValues[1] == 0) {
    rec2.endRecord();
    rec2.save();
    rec2 = minim.createRecorder(in, "data/custom2.wav");
    cust2 = new Sampler("custom2.wav", 4, minim);
    cust2.patch(out);
    status = "Recorded sample 2!";
    println(status);
  } else if (rec3.isRecording() && sensorValues[2] == 0) {
    rec3.endRecord();
    rec3.save();
    rec3 = minim.createRecorder(in, "data/custom3.wav");
    cust3 = new Sampler("custom3.wav", 4, minim);
    cust3.patch(out);
    status = "Recorded sample 3!";
    println(status);
  }
}

void drawSequencer() {
  noStroke();
  background(0);
  // draw buttons
  for (int i = 0; i < buttons.size(); ++i)
    buttons.get(i).draw();
  // text
  fill(255);
  text("hi-hat", 778, 125);
  text("snare", 778, 175);
  text("kick", 778, 225);
  text("sample 1", 778, 275);
  text("sample 2", 778, 325);
  text("sample 3", 778, 375);
  // draw beat marker
  fill(blue);
  rect(24*beat + 10, 85, 14, 9);
  // update bpm
  if (SERIAL)
    bpm = int(map(sensorValues[3], 0, 1023, 40, 208));
  // set status text
  status = "Tempo: " + bpm;
}

void mousePressed() {
  if (!firstScreen)
    for (int i = 0; i < buttons.size(); ++i)
      buttons.get(i).mousePressed();
}

void keyReleased() {
  // space changes screen
  if (key == ' ') {
    firstScreen = !firstScreen;
    if (firstScreen) {
      out.mute();
      status = "Not recording any samples.";
    } else {
      out.unmute();
    }
  }
  // '1' starts recording sample 1
  else if (key == '1')
    sensorValues[0] = 1;
  // 's' saves sample 1
  else if (key == 's')
    sensorValues[0] = 0;
  // similar for samples 2 and 3
  else if (key == '2')
    sensorValues[1] = 1;
  else if (key == 'd')
    sensorValues[1] = 0;
  else if (key == '3')
    sensorValues[2] = 1;
  else if (key == 'f')
    sensorValues[2] = 0;
}

Lab 12 Documentation

By: Kelvin Liu
Professor: Dan

For this lab, we were asked to create an arduino-based media controller for a processing sketch. I decided to leverage the Minim library for this lab, since I had already used Sound library for the midterm project. While looking through the examples, I found a basic drum machine (Minim/Advanced/DrumMachine). I only has three sounds — kick, snare, and hi-hat — and requires the user to use the mouse to click rectangles to create sound. I decided that this would be a great project for which to create a controller. My controller includes three buttons (one for each instrument) as well as a potentiometer to control to playback tempo.

I began by creating the controller and programming the arduino. This did not take very long, since my “sensors” were very basic — just buttons and a potentiometer. The early weeks of interaction lab prepared me well 🙂 I had originally planned to use two potentiometers — another for volume, however when I tried to change the volume on the AudioOutput in Minim, I got a warning saying volume control was not supported.

humble beginnings

humble beginnings

making connections

making connections

all connected

all connected

Then, I moved on to modify the drum machine example code. First I created the necessary variables and methods to communicate over serial. To do this, I adapted the code Dan shared a few weeks back when we were first learning how to pass data between arduino and processing. Next, I wrote the functionality to actually use these values. Each button corresponds to the kick, snare, or hi-hat. So I wrote code that checks the value of the buttons, and if any are 1, the corresponding instrument at the current beat is toggled. Additionally, the value of the potentiometer is map‘d to an integer between 40 and 208. I chose these values because it is the standard range of a metronome. For the instrument samples, I used some old 808 samples that I made a few years ago.

making beats using the controller

making beats using the controller

code:

// simple808ard.ino -- a controller for a simple 808 drum machine

#define BD_BUTTON 13
#define SD_BUTTON 12
#define HH_BUTTON 11
#define TEMPO_POT A0

// saved sensor values
bool bd, sd, hh;
int tempo;

void setup() {
  // setup serial communication
  Serial.begin(9600);
  // set buttons as input
  pinMode(BD_BUTTON, INPUT);
  pinMode(SD_BUTTON, INPUT);
  pinMode(HH_BUTTON, INPUT);
  // set potentiometers as input
  pinMode(TEMPO_POT, INPUT);
}

void loop() {
  // check if buttons are pressed
  bool tempBd = digitalRead(BD_BUTTON);
  bool tempSd = digitalRead(SD_BUTTON);
  bool tempHh = digitalRead(HH_BUTTON);
  // read potentiometer values
  int tempTempo = analogRead(TEMPO_POT);
  // only send and save values if they have changed
  if (
    tempBd != bd
    || tempSd != sd
    || tempHh != hh
    || tempTempo != tempo
  ) {
    // save values
    bd = tempBd;
    sd = tempSd;
    hh = tempHh;
    tempo = tempTempo;
    // send over serial
    Serial.print(bd);
    Serial.print(",");
    Serial.print(sd);
    Serial.print(",");
    Serial.print(hh);
    Serial.print(",");
    Serial.print(tempo);
    Serial.println();
  }
  // loop delay
  delay(50);
}

// simple808proc.ino -- a simple 808 drum machine that can be controlled
// adapted from `minim/examples/Advanced/DrumMachine`

// imports
import processing.opengl.*;
import processing.serial.*;
import ddf.minim.*;
import ddf.minim.ugens.*;

// variables
// serial communication
int NUM_OF_VALUES = 4;
int[] sensorValues;
Serial myPort;
String myString;

// drum machine
AudioOutput out;
Minim minim;
Sampler kick;
Sampler snare;
Sampler hat;
boolean[] hatRow = new boolean[16];
boolean[] snrRow = new boolean[16];
boolean[] kikRow = new boolean[16];
ArrayList<Rect> buttons = new ArrayList<Rect>();
int bpm = 120;
int beat;
int pbeat;

// sets up serial communication
void setupSerial() {
  // create Serial object
  myPort = new Serial(this, Serial.list()[2], 9600);
  // throw away potentially garbage initial reading
  myPort.clear();
  myString = myPort.readStringUntil(10);
  myString = null;
  // create array to hold the values
  sensorValues = new int[NUM_OF_VALUES];
}

// reads values from serial and plaecs them in sensorValues
void updateSerial() {
  while (myPort.available() > 0) {
    myString = myPort.readStringUntil(10);
    if (myString != null) {
      String[] serialInArray = split(trim(myString), ",");
      if (serialInArray.length == NUM_OF_VALUES) {
        for (int i=0; i<serialInArray.length; i++) {
          sensorValues[i] = int(serialInArray[i]);
        }
      }
    }
  }
}

// here's an Instrument implementation that we use
// to trigger Samplers every sixteenth note.
// Notice how we get away with using only one instance
// of this class to have endless beat making by
// having the class schedule itself to be played
// at the end of its noteOff method.
class Tick implements Instrument {
  void noteOn( float dur ) {
    if (hatRow[beat]) hat.trigger();
    if (snrRow[beat]) snare.trigger();
    if (kikRow[beat]) kick.trigger();
  }

  void noteOff() {
    // next beat
    beat = (beat+1)%16;
    // set the new tempo
    out.setTempo( bpm );
    // play this again right now, with a sixteenth note duration
    out.playNote( 0, 0.25f, this );
  }
}

// simple class for drawing the gui
class Rect {
  int x, y, w, h;
  boolean[] steps;
  int stepId;

  public Rect(int _x, int _y, boolean[] _steps, int _id) {
    x = _x;
    y = _y;
    w = 14;
    h = 30;
    steps = _steps;
    stepId = _id;
  }

  public void draw() {
    if (steps[stepId])
      fill(0,255,0);
    else
      fill(255,0,0);
    rect(x,y,w,h);
  }

  public void mousePressed() {
    if ( mouseX >= x && mouseX <= x+w && mouseY >= y && mouseY <= y+h )
      this.toggle();
  }

  public void toggle() {
    steps[stepId] = !steps[stepId];
  }
}

void setup() {
  size(395, 200);
  minim = new Minim(this);
  out = minim.getLineOut();

  // load all of our samples, using 4 voices for each.
  // this will help ensure we have enough voices to handle even
  // very fast tempos.
  kick  = new Sampler( "808BD.wav", 4, minim );
  snare = new Sampler( "808SD.wav", 4, minim );
  hat   = new Sampler( "808HH.wav", 4, minim );

  // patch samplers to the output
  kick.patch( out );
  snare.patch( out );
  hat.patch( out );

  for (int i = 0; i < 16; i++) {
    buttons.add( new Rect(10+i*24, 50, hatRow, i ) );
    buttons.add( new Rect(10+i*24, 100, snrRow, i ) );
    buttons.add( new Rect(10+i*24, 150, kikRow, i ) );
  }

  beat = 0;

  // start the sequencer
  out.setTempo( bpm );
  out.playNote( 0, 0.25f, new Tick() );
  // setup serial
  setupSerial();
}

void draw() {
  background(0);
  fill(255);
  // read serial
  updateSerial();
  // check buttons
  boolean bd = boolean(sensorValues[0]);
  boolean sd = boolean(sensorValues[1]);
  boolean hh = boolean(sensorValues[2]);
  if (bd && pbeat != beat) {
    buttons.get(beat*3 + 2).toggle();
    pbeat = beat;
  }
  if (sd) {
    buttons.get(beat*3 + 1).toggle();
    pbeat = beat;
  }
  if (hh) {
    buttons.get(beat*3 + 0).toggle();
    pbeat = beat;
  }
  // map tempo to standard metronome range
  bpm = int(map(sensorValues[3], 0, 1023, 40, 208));
  // draw buttons
  for(int i = 0; i < buttons.size(); ++i)
    buttons.get(i).draw();
  stroke(128);
  if ( beat % 4 == 0 )
    fill(200, 0, 0);
  else
    fill(0, 200, 0);
  // beat marker
  rect(10+beat*24, 35, 14, 9);
}

void mousePressed()
{
  for(int i = 0; i < buttons.size(); ++i)
  {
    buttons.get(i).mousePressed();
  }
}

Lab 11 Documentation

By: Kelvin Liu
Partner: Fernando Nunez
Professor: Dan

For this lab, we were asked to create a “drawing robot”. The build process had two main parts: (1) creating a circuit to drive a stepper motor and (2) creating the arm mechanism to hold a pen. The former required an arduino, some jumper cables, a stepper motor, a SH75440NE integrated circuit, and a DC adapter. And the latter required a pen, some metal pins, and some digitally fabricated parts (see below).

We started by creating the circuit according to the following diagram.

the schematic for our motor

the schematic for our motor

It didn’t take very long to get this set up, however we did run into a minor hiccup when building the circuit. We tried our best to follow the colors shown in the schematic to make handling the wires simpler, but we somehow still managed to mix up some wires on our first attempt. We were able to fix this with a few minutes of simple debugging. To make sure everything was working, we used the stepper_oneRevolution example in the stepper library for arduino.

the beginnings

the beginnings

completed and working circuit!

completed and working circuit!


Assembling the arm was very simple. The fabricated pieces had cutouts for the pins as well as the pen. After finishing the arm, we attached it to the motor.

the parts

the parts

the assembled arm

the assembled arm

the attached arm

the attached arm

with a pen

with a pen

we need another group!

we need another group!

Finally, we teamed up with another group to create a basic working drawing robot.

Additionally, we each attached a potentiometer to control the speed at which the motor ran. This allowed us gain more control over what the robot drew. Demo here.

Interaction Lab Final Project Concept

By: Kelvin Liu
Professor: Dan

The word interaction, though commonplace, carries significant and profound meaning. It is the result of prefixing the base action with the stem inter-. Action typically refers to a process that creates some effect. And the Latin origins of inter- imply that it means “between” or “among”. Then, interaction encapsulates the effects of many processes on one another. Simply put, the word describes reciprocal influences between multiple things, objects, or processes.

For the final project, I will be working with Fernando to extend the Processing-based sound visualization we created for our midterm project. To refresh, our midterm visualized environmental sound as a stack of 3D cylinders. We leveraged Processing’s Sound library to apply a fast Fourier transform (FFT) to sound input from a computer microphone. This gave us data on how much of each frequency was represented in the sound over a set of 512 frequency bands. We visualized these values as the radii of the cylinders.

Originally, we planned to take this visualization and put it into physical space. That is, we wanted to make something like a kinetic sculpture that would react to sound within its environment. Our design involved an array of linear actuators, each representing a band of frequencies. A particular actuator would be used to raise or lower a column depending on the amplitude value of the corresponding frequency given by the FFT.

However, after a couple iterations of prototyping a single linear actuator, we realized the difficulties that this idea introduces. To do it right, we would need a linear actuator for each frequency band we wanted to visualize. And each actuator would require a rack and pinion, housing, and some sort of motor to drive it. This in turn creates issues in powering the actuators as well as resource limitations in terms of motors and available pins on Arduino boards. Our original visualization uses 512 bands, and while it is very simple to decrease this number, the smaller the number, the less representative the visualization becomes. All in all, this idea did not seem to scale very well.

So we pivoted. After playing around with a Leap Motion, we quickly saw how incorporating this device would add a layer of immersion and interactivity that our previous visualization lacked. Instead of having to directly manipulate the mouse and keyboard to change parameters of the visualization such as color, users would be able to do this via hand gestures. Additionally, we now plan to use an Arduino with an attached speaker to emit sounds in response to other Leap Motion gestures. As a result, the user would be able to manipulate not only how the visualization looks, but also the visualized sound itself.

When it comes to sound visualization, the de facto standard seems to be music visualization. This is understandable — it adds a nice layer of allure and entertainment — but why stop at music? Visualizing everyday sounds can be just as compelling and interesting and may even expose slabs of sound that usually go unnoticed. Furthermore, most music visualizations are not always informative. More often than not, they simply show blobs and waves of color reacting to aspects of the track such as dynamics or tempo. While this can certainly result in something beautiful (but not always, i.e.milkdrop user presets), it is not very telling. Overall, we see our project to be purposeful and valuable in that it allows the user to be an active participant and visualizes sound — not just music — in an elegant, yet informative fashion.

Lab 9 Documentation

By: Kelvin Liu
Partner: Fernando Nunez
Professor: Dan

For this lab, we were asked to use tinkercad to prototype a 3D-printable object that incorporated a sensor. The sensors available included accelerometers, digital displays, joysticks, and stepper motors. We took this as an opportunity to prototype a crucial part of our final project — fabricating racks and pinions. To fulfill the parameters of this lab, it was clear that we needed to use the stepper motor.

Using tinkercad was relatively straightforward. We used a customizable gear model for the pinion, and another model for the rack. The most difficult part was creating a hole through which to mount the stepper motor. Tyler was able to help us out with this, and it only took a few extra minutes.

almost done modeling in tinkercad

almost done modeling in tinkercad

with the hole completed, we exported as an stl

with the hole completed, we exported as an stl

Lab 8 Documentation

By: Kelvin Liu
Professor: Dan

The Stamp

For my stamp, I decided to create a maze stamp. Instead of manually creating a maze in illustrator, I found a maze generator online. This generator has a lot of options, and it also shows the solution to the maze. After playing around with the settings for a bit, I found a maze that I liked.

the generated maze and solution

the generated maze and solution

I downloaded the generated maze as an SVG so I’d be able to edit the paths in illustrator. Then, I made the paths white, and created a red background. It was a straightforward and simple-enough process.

Process

the maze svg in illustrator

the maze svg in illustrator

colors set for the laser cutter

colors set for the laser cutter

Finished

the final stamp

the final stamp

The Trip

Instead of our usual friday lab session in the IMA studio, we took a field trip to the Autodesk offices on 浦电路. After listening to a presentation about Autodesk’s products and services, we were free to go around the presentation room. Out of what was in the room, I thought the Under Armour shoes with the 3D-printed soles were most interesting. It reminds me of how Nike and Adidas have developed ways of 3D printing fibers for better-fitting cleats and spikes. This was a cool trip overall.

Midterm//Sound Visualization Documentation

By: Kelvin Liu
Partner: Fernando
Professor: Dan

For our midterm project, Fernando and I created a sound visualizer. It is a Processing sketch that takes sound input from the computer’s microphone and visualizes its sound spectrum as a vertical stack of 3D cylinders. The result is a clean and simple to use sound visualizer. Since it creates visual representations of sound in the user’s environment, it can be understood as not only a tool, but also a source of entertainment.

with / without sound

with / without sound

When Fernando and I teamed up to work on the midterm, we didn’t have any concrete ideas. However, we did both want to work on a project somehow related to music. Eventually, we decided that a sound visualizer could fit the bill. After getting all of our ideas down, the project did not take long to finish. We worked together for the duration of the project.

We had originally planned to also make an arduino-based controller for the visualizer. We talked about using potentiometers to control colors, radii, and sensitivity. Ultimately, we decided against doing so, as having a customized controller didn’t really make sense for our use case. As a result, our project requires no additional hardware.

some quick math and sketches

some quick math and sketches

The sketch uses processing-sound to apply a fast-fourier transform to the microphone input. This produces amplitudes for 512 frequency bands over the input. Each of these bands was visualized as a cylinder in the stack. The cylinders are laid out by frequency, with lower frequencies at the top and higher frequencies at the bottom. The radius of a particular cylinder is determined by the amplitude of the cylinder’s corresponding frequency band.

The stack of cylinders is colored with a list of colors. During setup, the sketch generates two random colors to start the list. Each cylinder’s color is the result of linearly interpolating two adjacent colors in the list, as well as mixing in some white proportional to the radius. Users can add or remove colors by using the up or down arrow keys, respectively. Additionally, all colors in the list can be regenerated by pressing the mouse.

// sound visualization in processing

import processing.sound.AudioIn;
import processing.sound.FFT;

// constants
int NUM_BANDS = 512;

// variables
ArrayList<Integer> colors;
AudioIn in;
FFT fft;

void setup() {
  // defaults
  size(200, 800, P3D);
  noStroke();
  // use mic as input
  in = new AudioIn(this, 0);
  in.start();
  // apply fft to input
  fft = new FFT(this, NUM_BANDS);
  fft.input(in);
  // initial colors
  colors = new ArrayList<Integer>();
  colors.add(randomColor());
  colors.add(randomColor());
}

void draw() {
  // white background
  background(255);
  // centering
  translate(width/2, (height - NUM_BANDS)/2);
  // analyze current input
  fft.analyze();
  // calculate color chunk size
  int chunk = NUM_BANDS/(colors.size() - 1) + 1;
  // draw a cylinder for each band
  for (int i = 0; i < NUM_BANDS; i++) {
    // start color index
    int cInd = i / chunk;
    // interpolation amount
    float amt = map(i % chunk, 0, chunk - 1, 0, 1);
    // gradient between colors
    color fill = lerpColor(colors.get(cInd), colors.get(cInd + 1), amt);
    // cylinder radius
    float r = fft.spectrum[i]*width*50;
    // ensure it is between 5 and 100
    r = max(r, 5);
    r = min(r, 100);
    // add white to color based on radius
    amt = map(r, 5, 100, 0, 1);
    fill = lerpColor(fill, #FFFFFF, amt);
    // draw cylinder
    translate(0, 1);
    pushMatrix();
    fill(fill);
    cylinder(r, r, 1, 50);
    popMatrix();
  }

  // generate new colors on click
  if (mousePressed)
    newColors();
  // add/remove colors on UP/DOWN
  if (keyPressed) {
    if (keyCode == UP && colors.size() < NUM_BANDS/4)
      colors.add(randomColor());
    else if (keyCode == DOWN && colors.size() > 2)
      colors.remove(colors.size() - 1);
  }
}

// returns a random color
color randomColor() {
  return color(random(0, 255), random(0, 255), random(0, 255));
}

// generates new random colors
void newColors() {
  for (int i = 0; i < colors.size(); i++)
    colors.set(i, randomColor());
}

// creates a 3d cylinder
// from https://forum.processing.org/one/topic/draw-a-cone-cylinder-in-p3d.html
void cylinder(float bottom, float top, float h, int sides)
{
  pushMatrix();

  translate(0, h/2, 0);

  float angle;
  float[] x = new float[sides+1];
  float[] z = new float[sides+1];

  float[] x2 = new float[sides+1];
  float[] z2 = new float[sides+1];

  //get the x and z position on a circle for all the sides
  for (int i=0; i < x.length; i++) {
    angle = TWO_PI / (sides) * i;
    x[i] = sin(angle) * bottom;
    z[i] = cos(angle) * bottom;
  }

  for (int i=0; i < x.length; i++) {
    angle = TWO_PI / (sides) * i;
    x2[i] = sin(angle) * top;
    z2[i] = cos(angle) * top;
  }

  noStroke();

  //draw the bottom of the cylinder
  beginShape(TRIANGLE_FAN);

  vertex(0, -h/2, 0);

  for (int i=0; i < x.length; i++) {
    vertex(x[i], -h/2, z[i]);
  }

  endShape();

  //draw the center of the cylinder
  beginShape(QUAD_STRIP);

  for (int i=0; i < x.length; i++) {
    vertex(x[i], -h/2, z[i]);
    vertex(x2[i], h/2, z2[i]);
  }

  endShape();

  //draw the top of the cylinder
  beginShape(TRIANGLE_FAN);

  vertex(0, h/2, 0);

  for (int i=0; i < x.length; i++) {
    vertex(x2[i], h/2, z2[i]);
  }

  endShape();

  popMatrix();
}

Lab 6 Documentation

By: Kelvin Liu
Partner: Nathalia Lin
Professor: Dan

For this lab we were asked to create something that used serial communication between arduino and processing. Nathalia and I did not immediately know what we wanted to make, so we started throwing ideas around. Eventually, we decided that we wanted to incorporate an led turning on with something onscreen. We started by assembling the materials: a pushbutton switch, a green led, a 10k resistor, a 220 resistor, and a whole lot of jumper cables.

humble beginnings

humble beginnings

with the led connected, the circuitry was complete

with the led connected, the circuitry was complete

The pushbutton switch controls whether or not the led is turned on. We use arduino to read when the switch was closed connected one end to digital pin 2. We weren’t exactly sure how to do this at first, and ended up finding this tutorial really helpful. It took us a while to work out issues with our circuit, but we got it eventually. The code writes to serial whenever the button’s state changes. On the processing side, this is used to determine what shape will be drawn. If 0 is sent, then a blue circle will be drawn, otherwise, a red square will be drawn. Overall, we made a simple project that makes very simple use of serial communication.

humble beginnings

humble beginnings

with the led connected, the circuitry was complete

with the led connected, the circuitry was complete

code:

// serial-communication-ard.ino -- sends the state of a button

// constants
#define BUTTON_PIN 2

// variables
bool buttonState, prevButtonState;

void setup() {
  // set button as input
  pinMode(BUTTON_PIN, INPUT);
  // serial communication
  Serial.begin(9600);
}

void loop() {
  // read the button
  buttonState = digitalRead(BUTTON_PIN);
  // send state on state change
  if (buttonState != prevButtonState)
    Serial.write(buttonState);
  // update
  prevButtonState = buttonState;
  // loop delay
  delay(50);
}

// serial-communication-proc.ino -- draws shaped based on the state of a button

import processing.serial.*;

// variables
Serial myPort;
int valueFromArduino;

void setup() {
  size(500, 500);
  noStroke();
  ellipseMode(CENTER);
  rectMode(CENTER);
  printArray(Serial.list());
  myPort = new Serial(this, Serial.list()[ 1 ], 9600);
}

void draw() {
  // white background
  background(255);
  // get value from arduino
  while (myPort.available() > 0)
    valueFromArduino = myPort.read();
  // print value
  println(valueFromArduino);
  // draw ellipse or rect
  if (valueFromArduino == 0) {
    fill(10, 20, 150);
    ellipse(width/2, height/2, 200, 200);
  } else {
    fill(150, 20, 10);
    rect(width/2, height/2, 200, 200);
  }
}

Assignment 6: The Waseda Flutist Robot

This article focuses on the development of a robot that can play flute at Waseda University. The so-called flutist robot WF-4RII is the seventh iteration of flute-playing robots at the university. The WF-4RII attempts to emulate all the human organs involved with playing the flute, and the article goes into detail about the technical and mechanical components that enable the robot. While this description was interesting, I found the motivation behind this robot far more powerful.

The researchers hope to further their understanding of what goes into musical expression and “intuition”. The WF-4RII takes into account its own and previous human data when it plays. This makes sense to emulate how humans play, but does not necessarily create the same emotional response. The authors mention that they may explore neural networks in the future. Given the developments in convolutional networks and reinforcement learning in the recent years, I think there’s high application potential for the flutist robot. Furthermore, using these techniques will potentially allow the researchers further understanding, depending on the emergent characteristics that the robot undertakes.

Deliverable 5: Detailed Documentation

Introduction

I frequently misplace everyday items like my keys, phone, or glasses. Whenever this happens, the thing I’m looking for is almost always in my room. It takes a really long time to find, and the time could be better spent doing something productive. That’s where my robot comes in; my final project is a robot that finds misplaced items for its user, allowing them to focus on more important matters.

Materials

  • DFRobot RoMeo BLE V1.0
  • Raspberry Pi 3 Model B
  • Raspberry Pi Picamera
  • TowerPro SG90 servo
  • Digital IR obstacle sensor
  • HC-SR04 ultrasonic sensor (2)
  • 12V 1500mAh battery
  • 5V 7800mAh usb powerbank
  • Jumper wire (a lot)
  • 3mm MDF lasercut platform
  • M3 machine screw (16)
  • 10mm M3 nylon standoff (4)
  • 20mm M3 nylon standoff (5)
  • M3 hex nut (8)
  • Plastic twist ties (4)
  • DFRobot Cherokey 4WD base
    • DC motor (4)
    • Side chassis (2)
    • Chassis plate (4)
    • Wheel (4)
    • M2.5 machine screw (8)
    • M2.5 washer (8)
    • M2.5 split lock washer (8)
    • M2.5 hex nut (8)

Electronics

My robot requires both a Raspberry Pi (pi) and a DFRobot RoMeo (arduino). The boards communicate with one another using I2C; in this configuration, the pi is the master and the arduino is a slave. The pi is in charge of controlling the camera and servo, as well as running image classification. The arduino is in charge of controlling movement. It also uses the attached IR and ultrasonic sensors to avoid obstacles in the environment.

Mechanics

My robot utilizes the base of the DFRobot Cherokey 4WD mobile robot. I chose to use this assembly instead of creating my own because it was very stable and simple. The Cherokey kit also comes with a platform that is mounted onto the base. This platform provides a motor controller as well as some mount holes for a single arduino microcontroller. However, I chose to fabricate my own because the RoMeo board has an integrated motor controller and I needed to mount both the pi and arduino.

I started the process of designing my own mounting platform by sketching scale versions of each component individually. I was able to find some mechanical diagrams of the pi, but for the most part, I had to measure each component myself. This gave me detailed measurements of all of the mount holes I needed to create, as well as an idea of how large the platform itself would be.

sketch and measurements of individual components

sketch and measurements of individual components

After, I started to do the layout. I made sure to orient the components in a way that would make connecting all of the electronics simple. Since it was such a tight fit, I decided to use different height standoffs for the pi and arduino. Additionally, the batteries that power the boards are mounted underneath the platform. I sketched the layout with top, left, and right views.

full layout sketch

full layout sketch

Finally, I created a blueprint for use with the IMA laser cutter in Illustrator. I used cardboard for my first laser cut to ensure that my design was compatible with the components. I had to make a few minor adjustments to the spacing of the arduino mount holes. After fixing that, I used 3mm MDF, and my platform was complete.

blueprint-preview

blueprint-preview

Software

To reason about how my robot operates, I created a set of finite state machines: RESTING, CLASSIFYING, NOTIFYING, MOVING, and AVOIDING. The following diagram illustrates each of the possible states and events that cause transitions between states.

a diagram of the robot's state machines

a diagram of the robot’s state machines

Movement + IR + Ultrasonic Example

This example is a prototype of the movement logic used by the arduino. It uses an IR sensor to detect obstacles in front of the robot. If an object is detected, it will get distance readings from the ultrasonic sensors mounted on both sides of the robots. The robot will turn in the direction with a larger distance reading, on the assumption that there will be less obstacles in that direction. To make the main file less cluttered, I created a tiny library for the specifics of the DFRobot RoMeo board. It provides a nice API to utilize the onboard buttons (which I probably won’t use) and motor controllers.

// romeo.h - defines a simple interface for the romeo motor controller

#ifndef ROMEO_H
#define ROMEO_H

// BUTTONS
#define BUTTON_PIN 7
#define S1_VAL 0
#define S2_VAL 144
#define S3_VAL 334
#define S4_VAL 520
#define S5_VAL 744

// button api
int getButton();

// MOTOR CONTROLLERS
#define M1_DIRECTION_PIN 4
#define M1_SPEED_PIN 5
#define M2_SPEED_PIN 6
#define M2_DIRECTION_PIN 7

// a direction type
typedef enum {
  REVERSE,
  FORWARD
} direction;

// car api
// assumes the motor controllers and power are at the back of the board
// so M1 controls the left motors and M2 controls the right motors
void carForward(int leftSpeed, int rightSpeed);
void carReverse(int leftSpeed, int rightSpeed);
void carStop();
void carTurnLeft(int leftSpeed, int rightSpeed);
void carTurnRight(int leftSpeed, int rightSpeed);
#endif
// romeo.cpp - implements a simple api for interfacing with the romeo board

#include <Arduino.h>
#include "romeo.h"

// BUTTONS
// returns an integer from 1 to 5 corresponding to a button press, otherwise 0
int getButton() {
  unsigned int input = analogRead(BUTTON_PIN);
  if (input == S1_VAL)
    return 1;
  else if (input <= S2_VAL)
    return 2;
  else if (input <= S3_VAL)
    return 3;
  else if (input <= S4_VAL)
    return 4;
  else if (input <= S5_VAL)
    return 5;
  else
    return 0;
}

// MOTOR CONTROLLERS
// sets the direction for the left and right motor controllers
void setDirection(direction l, direction r) {
  digitalWrite(M1_DIRECTION_PIN, l);
  digitalWrite(M2_DIRECTION_PIN, r);
}
// sets the speed for the left and right motor controllers
void setSpeed(int l, int r) {
  analogWrite(M1_SPEED_PIN, l);
  analogWrite(M2_SPEED_PIN, r);
}
// makes the car move forward
void carForward(int leftSpeed, int rightSpeed) {
  setDirection(FORWARD, FORWARD);
  setSpeed(leftSpeed, rightSpeed);
}
// makes the car move reverse
void carReverse(int leftSpeed, int rightSpeed) {
  setDirection(REVERSE, REVERSE);
  setSpeed(leftSpeed, rightSpeed);
}
// makes the car stop
void carStop() {
  setSpeed(0, 0);
}
// makes the car turn left
void carTurnLeft(int leftSpeed, int rightSpeed) {
  setDirection(REVERSE, FORWARD);
  setSpeed(leftSpeed, rightSpeed);
}
// makes the car turn right
void carTurnRight(int leftSpeed, int rightSpeed) {
  setDirection(FORWARD, REVERSE);
  setSpeed(leftSpeed, rightSpeed);
}
// combo.ino -- a combined movement, ir sensor, and ultrasonic sensors example

#include "romeo.h"

#define IR_PIN 2
#define ECHO_L_PIN 11
#define ECHO_R_PIN 9
#define TRIG_L_PIN 12
#define TRIG_R_PIN 8

void setup() {
  // set pin modes
  pinMode(IR_PIN, INPUT);
  pinMode(ECHO_L_PIN, INPUT);
  pinMode(ECHO_R_PIN, INPUT);
  pinMode(TRIG_L_PIN, OUTPUT);
  pinMode(TRIG_R_PIN, OUTPUT);
  // enable serial monitor
  Serial.begin(9600);
}

void loop() {
  bool blocked = digitalRead(IR_PIN) == LOW;
  if (blocked) {
    Serial.println("car is blocked.");
    // get distance from each ultrasonic sensor
    int distL = getDistance(TRIG_L_PIN, ECHO_L_PIN);
    int distR = getDistance(TRIG_R_PIN, ECHO_R_PIN);
    // print
    Serial.print("left:  ");
    Serial.println(distL);
    Serial.print("right: ");
    Serial.println(distR);
    // decide
    Serial.println((distL > distR) ? "turning left" : "turning right");
    carReverse(100, 100);
    delay(500);
    carStop();
    delay(1000);
    if (distL > distR)
      carTurnLeft(100, 100);
    else
      carTurnRight(100, 100);
    delay(1500);
    carStop();
  } else {
    carForward(100, 100);
  }
  // loop delay
  delay(1000);
}

// returns a range reading in cm for the given trigger and echo pins
int getDistance(int trig, int echo) {
  // activate sensors
  digitalWrite(trig, LOW);
  delayMicroseconds(5);
  digitalWrite(trig, HIGH);
  delayMicroseconds(10);
  digitalWrite(trig, LOW);
  // get duration for echo
  unsigned long duration = pulseIn(echo, HIGH);
  // halve the duration and convert to cm
  return (int) (duration / 2) / 29.1;
}

I2C Example

This example is a prototype of the communication between the pi and arduino. The code begins by having the robot in the default RESTING state, but eventually puts it in the MOVING state. This causes the arduino to “pretend” to move and then transition to the CLASSIFYING state. It demonstrates how the pi and arduino will keep the robot’s state in sync and initiate state transitions from either side.

// State.h -- defines the possible states of the robot

typedef enum {
  RESTING,
  CLASSIFYING,
  NOTIFYING,
  MOVING,
  AVOIDING
} State;
#!/usr/bin/env python3

"""i2c.py -- testing rpi-arduino communication via i2c"""

import time
import smbus

# arduino slave address
ADDR = 0x77
# connect to /dev/i2c-1
BUS = smbus.SMBus(1)

def clean(s):
  """cleans the given string by removing whitespace and commas"""
  return s.strip().replace(',', '')

def rec():
  """returns a byte received from arduino"""
  return BUS.read_byte(ADDR)

def send(byte):
  """sends the given byte to the arduino"""
  BUS.write_byte(ADDR, byte)

def main():
  # parse State enum as a dict
  with open('State.h') as fin:
    STATES = dict(enumerate(line for line in map(clean, fin) if line.isupper()))
  # initial state is 0 (RESTING)
  state = 0
  print("my state is", STATES[state])
  # ensure the arduino is in sync
  send(state)
  # sleep for a bit
  time.sleep(1)
  # change to state 3 (MOVING) as if the search has started
  state = 3
  send(state)
  # request the arduino's state for about 3 seconds
  start = time.time()
  while (time.time() - start < 3):
    arduino_state = rec()
    print("the arduino's state is", STATES[arduino_state])
    # update to arduino's state if there's a difference
    if arduino_state != state:
      state = arduino_state
      print("my state is now", STATES[state])
    # sleep for about 250 ms
    time.sleep(250/1000)

if __name__ == '__main__':
  main()
// i2c.ino - testing rpi-arduino communication via i2c

#include <Wire.h>
#include "State.h"

#define ADDR 0x77

State state;

// arduino will always accept the rpi's state
void rec(int numBytes) {
  state = Wire.read();
  Serial.print("rpi told me to change: ");
  Serial.println(state);
}

void send() {
  Wire.write(state);
}

void setup() {
  // set as slave and define callbacks
  Wire.begin(ADDR);
  Wire.onReceive(rec);
  Wire.onRequest(send);
  // enable serial monitor
  Serial.begin(9600);
  // the robot will always start in RESTING state
  state = RESTING;
}

void loop() {
  Serial.println(state);
  switch (state) {
    // don't do anything for rpi-controlled states
    case RESTING:
    case CLASSIFYING:
    case NOTIFYING:
      break;
    // delay and then change to CLASSIFYING
    case AVOIDING:
    case MOVING:
      // pretend like something is happening
      delay(2000);
      state = CLASSIFYING;
      break;
  }
  delay(100);
}

Image Classification Example

The robot uses tensorflow to do offline image classification. Tensorflow provides many open source models for classification, and I plan to use a modified version of this script. The script uses a model called Inception-v3, which is trained for ImageNet. The result is fairly decent classification for a variety of general items.

At the moment, the robot uses python to do image classification. However, this is somewhat slow (on the order of seconds). Inception-v3 can be used with tensorflow’s C++ API, which should provide more performant results. However, I have not tested this yet, as it takes considerable time to build the tools required to use C++ tensorflow on Raspberry Pi.