Ranga Yogeshwar

EYE

Spätestens nach der NSA-Affäre wird deutlich, dass wir ständig überwacht werden. Ich protestiere dagegen und habe in Processing ein kleines Programm geschrieben. Es nutzt dabei die Kamera im Bildschirm. Die Augen verfolgen dich...

 

Zum Funktionsprinzip:

Das Programm nutzt die integrierte Webcam des Laptops. Aus zwei aufeinanderfolgenden Bildern wird durch Subtraktion der Bereich der Veränderung ermittelt. Dann bestimmt das Programm den Schwerpunkt dieser Veränderungswolke. Hieraus ergibt sich die Blickrichtung der Augen.

Um dieses Programm zu starten benötigt Ihr Processing. Mehr dazu unter: https://processing.org

 

Anbei der Code:

eye_basic2.pde
01/10/15 08:41:32 /Users/rangayogeshwar/Documents/CODING/Processing/AugeNSA/Eye basic2/eye_basic2/eye_basic2.pde
   1 
   2 
//  *********************************************
   3 
//                    eye   
   4 
//  *********************************************
   5 
//  Uses a webcam to identify the center of movement within
   6 
//  a region.
   7 
//  Uses simple motiondetection as placed under: http://www.learningprocessing.com
   8 
//  Example 16-13.
   9 
//  Substracting two consequent video-pictures lets you determine the area of change.
  10 
//  As a result black pixels are generated were the picture changes.
  11 
//  This is the input for a distribution along x- and y-axis.
  12 
//  I then use the arithmetic mean to determine the center-x and center-y values.
  13 
//  The function "eye" returns the normalized center-values (0-1) in x and y.
  14 
//  Use following methods:
  15 
//  eyeStart   initializes the screen (place it in the setup area)
  16 
//  eyeCenter  this function returns the normalized center as float[0]= x-axis, float[1]=y-axis
  17 
//  Version 1 ( October 2013) by Ranga Yogeshwar  (ranga@yogeshwar.de)
  18 
 
  19 
 
  20 
 
  21 
import processing.video.*;     // import this library as we request using the cam
  22 
 
  23 
float xc, yc, oxc, oyc;        // centervalue with respect to width and height
  24 
eyeSchirm auge[] = new eyeSchirm[2]; // auge is an array with 2 single eyes
  25 
color augenfarbe[] = new color[2];   // augenfarbe defines the iris color
  26 
 
  27 
void setup() {
  28 
  size(1024, 768);
  29 
  eyeStart();            // setup the eyes
  30 
 
  31 
  augenfarbe[0] = color(#04B45F);
  32 
 
  33 
  auge[0] = new eyeSchirm ( 341, 384,augenfarbe[0]);
  34 
  auge[1] = new eyeSchirm ( 682, 384, augenfarbe[0]);
  35 
 
  36 
}
  37 
 
  38 
 
  39 
void draw() {
  40 
  fill(50);
  41 
  rect(0, 0, width, height);// Background and Clear screen
  42 
  ncenter = eyeCenter();// ermitteltes Zentrum der Bewegung
  43 
  xc=ncenter[0]*width;
  44 
  yc=ncenter[1]*height;
  45 
 
  46 
  auge[0].eyepaint(ncenter[0], ncenter[1]);
  47 
  auge[1].eyepaint(ncenter[0], ncenter[1]);
  48 
 
  49 
 
  50 
  // ********** now we know the new values, so we can do something.
  51 
 
  52 
  //(0)  You can paint the bw video output (uncomment the next line
  53 
  //updatePixels();
  54 
 
  55 
  //(1) overlay a small square with the center of movement on the video image.
  56 
 
  57 
 
  58 
 
  59 
  //(2) paint new eyes that follow the movement
  60 
  // schirmAugen(ncenterx, ncentery);// Darstellung auf dem Bildschirm
  61 
}
  62 
 
  63 
 
  64 
 
  65 
 
  66 
 
  67 
 
  68 
Capture video;                 // Variable for capture device
  69 
 
  70 
PImage prevFrame;              // Previous Frame (as we compare changes)
  71 
float threshold = 30;          // How different must a pixel be to be a "motion" pixel
  72 
 
  73 
float centerx = 0;             // center of motion in x
  74 
float oldcenterx = 0;          // old center of motion in x
  75 
float centery = 0;              // center of motion in y
  76 
float oldcentery = 0;           // old center of motion in y
  77 
float[] ncenter = new float[2];// normalized returned centervalue for y (0-1)
  78 
 
  79 
float schwelle = 100;           // Schwelle ab welche die Augen die Position ändern
  80 
int n = 0;// Gesamtzahl der schwarzen Punkte (= Mass für die Bewegung im Bild)
  81 
float activity =0; // entspricht einem normierten Wert für Aktivität.
  82 
 
  83 
 
  84 
 
  85 
float[]  eyeCenter(){
  86 
 
  87 
  // **** Capture video
  88 
  if (video.available()) {
  89 
    // Save previous frame for motion detection!!
  90 
    prevFrame.copy(video, 0, 0, video.width, video.height, 0, 0, video.width, video.height); // Before we read the new frame, we always save the previous frame for comparison!
  91 
    prevFrame.updatePixels();
  92 
    video.read();
  93 
  }
  94 
 
  95 
  loadPixels();            // load the pixeldata from the image into the pixels array (See Ref PImage)
  96 
  video.loadPixels();
  97 
  prevFrame.loadPixels();
  98 
 
  99 
  //**** Hier wird die Intensität entlang der Breite x ermittelt.
 100 
  float[] intensityh= new float[video.width]; // setup the array horizontal
 101 
  float[] intensityv= new float[video.height]; // setup the array vertical
 102 
  // first reset the arrays
 103 
  for (int x = 0; x < video.width; x ++ ) {
 104 
    intensityh[x] =0;
 105 
  }
 106 
  for (int y = 0; y < video.height; y ++ ) {
 107 
    intensityv[y] =0;
 108 
  }
 109 
  n = 0;  // Reset n (Number of dark pixels showing movement)
 110 
 
 111 
  // Begin loop to walk through every pixel
 112 
  for (int x = 0; x < video.width; x ++ ) {
 113 
    for (int y = 0; y < video.height; y ++ ) {
 114 
 
 115 
      int loc = x + y*video.width;            // Step 1, what is the 1D pixel location from the 2d picture to the 1d array
 116 
      color current = video.pixels[loc];      // Step 2, what is the current color
 117 
      color previous = prevFrame.pixels[loc]; // Step 3, what is the previous color
 118 
 
 119 
      // Step 4, compare colors (previous vs. current)
 120 
      float r1 = red(current);
 121 
      float g1 = green(current);
 122 
      float b1 = blue(current);
 123 
      float r2 = red(previous);
 124 
      float g2 = green(previous);
 125 
      float b2 = blue(previous);
 126 
      float diff = dist(r1, g1, b1, r2, g2, b2);
 127 
 
 128 
      // Step 5, How different are the colors?
 129 
      // If the color at that pixel has changed, then there is motion at that pixel.
 130 
      if (diff > threshold) {
 131 
 
 132 
        pixels[loc] = color(0);           // If motion, display black
 133 
        intensityh[x] = intensityh[x]+1;  // update the distribution along x-axis
 134 
        intensityv[y] = intensityv[y]+1;  // update the distribution along y-axis
 135 
        n=n+1;
 136 
      }
 137 
      else {
 138 
        pixels[loc] = color(255);  // If change is too small, display white
 139 
      }
 140 
    }
 141 
  }
 142 
 
 143 
 
 144 
  // **** Now determine the mean values along x- and y axis to find the center.
 145 
 
 146 
  if (n > schwelle) {       // was there enough change within the picture?
 147 
  print (n);
 148 
    centerx = 0;            // reset the values before you start
 149 
    centery = 0;
 150 
    float c = 0;            // local variable used to count the mean along x
 151 
    float d = 0;            // local variable used to count the mean along y
 152 
    float mittex = video.width/2;    // center of the picture in x
 153 
    float mittey = video.height/2;   // center of the picture in y
 154 
 
 155 
    // *** calculate the arithmetic mean in x and y
 156 
    for (int x = 0; x < video.width; x ++ ) {
 157 
      c = c + intensityh[x]*(x-mittex);
 158 
    }
 159 
    for (int y = 0; y < video.height; y ++ ) {
 160 
      d = d + intensityv[y]*(y-mittey);
 161 
    }
 162 
    centerx = c/n + mittex;  // arithmetic mean in x as absolute value
 163 
    oldcenterx = centerx;
 164 
    centery = d/n + mittey;  // arithmetic mean in y as absolute value
 165 
    oldcentery = centery;
 166 
  }
 167 
  else
 168 
  {
 169 
    centerx = oldcenterx;
 170 
    centery = oldcentery;
 171 
  }
 172 
  ncenter[0] = centerx/video.width;  // normalized center in x (value between 0-1)
 173 
  ncenter[1] = centery/video.height; // normalized center in y (value between 0-1)
 174 
  activity = n/schwelle;
 175 
  activity = map(activity, 0, 10, 1,2);
 176 
activity = constrain(activity, 1,2);
 177 
  return ncenter;
 178 
 
 179 
}
 180 
 181 
void eyeStart(){
 182 
  
 183 
  video = new Capture(this, 160, 100, 30);// the camera must not be hires, so things work faster
 184 
  video.start();
 185 
  prevFrame = createImage(video.width, video.height, RGB);// Create an empty image the same size as the video
 186 
}
 187 
 188 
 189 
 
 190 
class eyeSchirm {
 191 
  float eyePosx = width/2;
 192 
  float eyePosy = height/2; // Position des Augenzentrums auf dem Schirm
 193 
  float eyeSize = width/6; // Durchmesser des Auges
 194 
  float eyePointx =0;
 195 
  float eyePointy = 0;// Blickrichtung des Auges (Wert -1 - 1)
 196 
  color eyeColor = color(100, 255, 100); // Farbe der Iris
 197 
 
 198 
  eyeSchirm(float x, float y, float d, float px, float py, color c) {
 199 
    eyePosx = x;
 200 
    eyePosy = y;
 201 
    eyeSize = d;
 202 
    eyePointx = 2*(0.5-px);
 203 
    eyePointy = 2*(0.5-py);
 204 
    eyeColor = c;
 205 
  }
 206 
 
 207 
  eyeSchirm(float x, float y, color c) {
 208 
    eyePosx = x;
 209 
    eyePosy = y;
 210 
    eyeColor = c;
 211 
  }
 212 
  eyeSchirm(float x, float y, float d) {
 213 
    eyePosx = x;
 214 
    eyePosy = y;
 215 
    eyeSize = d;
 216 
  }
 217 
 
 218 
  eyeSchirm(float x, float y) {
 219 
    eyePosx = x;
 220 
    eyePosy = y;
 221 
  }
 222 
 
 223 
 
 224 
  void eyepaint(float x, float y)
 225 
  {
 226 
    eyePointx=2*(0.5-x);
 227 
    eyePointy =2*(y-0.5);
 228 
    float ex = eyePosx+(eyeSize/3.2)*eyePointx;// Verschiebung der Pupille in x und y
 229 
    float ey = eyePosy+(eyeSize/3.2)*eyePointy;
 230 
    float di = eyeSize/2.5; // relativer Durchmesser Iris
 231 
    float dp = eyeSize/8; // relativer Durchmesser Pupille
 232 
   
 233 
 
 234 
    fill(255);
 235 
    stroke(50);
 236 
    ellipse(eyePosx, eyePosy, eyeSize, eyeSize); // Weisser Teil.
 237 
    fill(eyeColor);
 238 
    strokeWeight(1);
 239 
    noStroke();
 240 
    ellipse(ex, ey, di, di); // Iris
 241 
    fill(20);
 242 
    strokeWeight(1);
 243 
    noStroke();
 244 
    ellipse(ex, ey, dp*activity, dp*activity); // Pupille
 245 
  }
 246 
}
 247