Main Page | Namespace List | Class Hierarchy | Alphabetical List | Class List | File List | Namespace Members | Class Members | File Members | Related Pages

Modules/ImageProcessor/GT2004ImageProcessor/GT2004ImageProcessor.cpp

Go to the documentation of this file.
00001 /**
00002 * @file GT2004ImageProcessor.cpp
00003 *
00004 * Implementation of class GT2004ImageProcessor
00005 *
00006 * @author <a href="mailto:juengel@informatik.hu-berlin.de">Matthias Juengel</a>
00007 * @author <a href="mailto:roefer@tzi.de">Thomas Röfer</a>
00008 */
00009 
00010 #include "GT2004ImageProcessor.h"
00011 #include "Tools/Debugging/Debugging.h"
00012 #include "Tools/Debugging/GenericDebugData.h"
00013 #include "Tools/FieldDimensions.h"
00014 #include "Tools/Math/Geometry.h"
00015 #include "GT2004ImageProcessorTools.h"
00016 
00017 //~ #define EDGES
00018 #define PLOT(p,c) COMPLEX_DRAWING(imageProcessor_general,plot(p,c))
00019 
00020 const int Y = 0,
00021           U = cameraResolutionWidth_ERS7, /**< Relative offset of U component. */
00022           V = 2 * cameraResolutionWidth_ERS7; /**< Relative offset of V component. */
00023 
00024 GT2004ImageProcessor::GT2004ImageProcessor(const ImageProcessorInterfaces& interfaces)
00025 : ImageProcessor(interfaces),
00026 beaconDetector(image, cameraMatrix, prevCameraMatrix, imageInfo, colorTable, colorCorrector, landmarksPercept),
00027 goalRecognizer(image, cameraMatrix, prevCameraMatrix, colorTable, colorCorrector, obstaclesPercept, landmarksPercept),
00028 ballSpecialist(colorCorrector)
00029 {
00030   yThreshold = 15;
00031   vThreshold = 8;
00032   beaconDetector.analyzeColorTable();
00033   double c = cos(-3*pi/4);
00034   double s = sin(-3*pi/4);
00035   rotation2x2 = Matrix2x2<double>(
00036     Vector2<double>(c,-s),
00037     Vector2<double>(s,c)
00038   );
00039 }
00040 
00041 void GT2004ImageProcessor::execute()
00042 {
00043   ColorCorrector::load();
00044   INIT_DEBUG_IMAGE(imageProcessorPlayers, image);
00045 
00046   xFactor = yFactor = image.cameraInfo.focalLength;
00047   
00048   numberOfScannedPixels = 0;
00049 
00050   // reset everything
00051   landmarksPercept.reset(image.frameNumber);
00052   linesPercept.reset(image.frameNumber);
00053   ballPercept.reset(image.frameNumber);
00054   playersPercept.reset(image.frameNumber);
00055   obstaclesPercept.reset(image.frameNumber);
00056 #ifdef EDGES
00057   edgesPercept.reset(image.frameNumber);
00058   edgeSpecialist.reset();
00059 #endif
00060 
00061   // variations of the camera matrix for different object heights
00062   cmTricot = cameraMatrix;
00063   cmTricot.translation.z -= 100; // upper tricot border
00064 
00065   static bool prevCameraMatrixUninitialized = true;
00066   if (prevCameraMatrixUninitialized && cameraMatrix.frameNumber != 0)
00067   {
00068     prevCameraMatrix = cameraMatrix;
00069     prevCmTricot = cmTricot;
00070     prevCameraMatrixUninitialized = false;
00071   }
00072 
00073   longestBallRun = 0;
00074 
00075 //-----------------------------------------------------
00076   Vector3<double> vectorInDirectionOfViewCamera(1,0,0);
00077   Vector3<double> vectorInDirectionOfViewWorld;
00078   vectorInDirectionOfViewWorld = cameraMatrix.rotation * vectorInDirectionOfViewCamera;
00079 
00080   const int visualizationScale = 20;
00081   Vector3<double> vectorInDirectionOfViewWorldOld;
00082   vectorInDirectionOfViewWorldOld = prevCameraMatrix.rotation * vectorInDirectionOfViewCamera;
00083   Vector3<double> cameraMotionVectorWorld = vectorInDirectionOfViewWorld - vectorInDirectionOfViewWorldOld;
00084   long frameNumber = cameraMatrix.frameNumber, 
00085     prevFrameNumber = prevCameraMatrix.frameNumber;
00086   const RobotDimensions& r = getRobotConfiguration().getRobotDimensions();
00087   double timeDiff = (frameNumber - prevFrameNumber) * r.motionCycleTime; // in seconds
00088   Vector2<double> currentOnGround(vectorInDirectionOfViewWorld.x, vectorInDirectionOfViewWorld.y), 
00089       oldOnGround(vectorInDirectionOfViewWorldOld.x, vectorInDirectionOfViewWorldOld.y);
00090   currentOnGround.normalize();
00091   oldOnGround.normalize();
00092   Vector3<double> cameraSpeedVectorWorld(0, 0, 0);
00093   double panningVelocity = 0;
00094   if (timeDiff > 0) //should never happen otherwise, but better safe than sorry
00095   {
00096     cameraSpeedVectorWorld = cameraMotionVectorWorld/timeDiff;
00097     double cosAng = currentOnGround*oldOnGround;
00098     if (cosAng > 1.0)
00099       cosAng = 1.0;
00100     else
00101     if (cosAng <-1.0)
00102       cosAng = -1.0;
00103     panningVelocity = normalize(acos(cosAng))/timeDiff;
00104   }
00105   Vector3<double> cameraSpeedVectorCamera = cameraMatrix.invert().rotation * cameraSpeedVectorWorld;
00106   Vector2<double> cameraSpeedVectorImg(cameraSpeedVectorCamera.y,
00107         cameraSpeedVectorCamera.z);
00108   cameraSpeedVectorImg /= cameraSpeedVectorCamera.x + // perspective projection, 
00109     image.cameraInfo.focalLength; // focal length is added to translate the vector away from the lens in case the distance
00110                                     // is too small and hence the visualization would be greatly magnified
00111   cameraSpeedVectorImg *= image.cameraInfo.focalLength*visualizationScale;
00112   Drawings::Color arrowColor = Drawings::blue;
00113   if (fabs(panningVelocity) > pi/6)
00114     arrowColor = Drawings::red;
00115   ARROW(imageProcessor_horizon,
00116     image.cameraInfo.opticalCenter.x, image.cameraInfo.opticalCenter.y, 
00117     int(image.cameraInfo.opticalCenter.x-cameraSpeedVectorImg.x), int(image.cameraInfo.opticalCenter.y-cameraSpeedVectorImg.y),
00118     3, Drawings::ps_solid, arrowColor);
00119 //-----------------------------------------------------
00120   angleBetweenDirectionOfViewAndGround = 
00121     toDegrees(
00122     atan2(
00123     vectorInDirectionOfViewWorld.z,
00124     sqrt(sqr(vectorInDirectionOfViewWorld.x) + sqr(vectorInDirectionOfViewWorld.y))
00125     )
00126     );
00127   
00128   // Compute additional information about the image
00129   imageInfo.maxImageCoordinates.x = image.cameraInfo.resolutionWidth - 1;
00130   imageInfo.maxImageCoordinates.y = image.cameraInfo.resolutionHeight - 1;
00131   imageInfo.horizon = Geometry::calculateHorizon(cameraMatrix, image.cameraInfo);
00132   imageInfo.horizonInImage = Geometry::getIntersectionPointsOfLineAndRectangle(
00133   Vector2<int>(0,0), imageInfo.maxImageCoordinates, imageInfo.horizon, 
00134           imageInfo.horizonStart, imageInfo.horizonEnd);
00135   imageInfo.vertLine = imageInfo.horizon;
00136   imageInfo.vertLine.direction.x = -imageInfo.vertLine.direction.y;
00137   imageInfo.vertLine.direction.y = imageInfo.horizon.direction.x;
00138   imageInfo.vertLine.normalizeDirection();
00139 
00140   // Scan through image
00141   scanColumns();
00142   scanRows();
00143 
00144 #ifdef EDGES
00145   // get line-point percepts from specialist
00146   //edgeSpecialist.getEdgePointPercepts(linesPercept, cameraMatrix, prevCameraMatrix, image);
00147   // get edge percepts from specialist
00148   edgeSpecialist.getEdgesPercept(edgesPercept, cameraMatrix, prevCameraMatrix, image);
00149 #endif
00150 
00151   // Analyze results
00152   if (longestBallRun > 2)
00153     ballSpecialist.searchBall(image, colorTable, cameraMatrix, prevCameraMatrix,
00154       ballCandidate.x, ballCandidate.y, ballPercept);
00155   filterPercepts();
00156   
00157   landmarksPercept.cameraOffset = cameraMatrix.translation;
00158   if(imageInfo.horizonInImage)
00159   {
00160     beaconDetector.execute();
00161   }
00162   goalRecognizer.execute();
00163 
00164   // Drawing stuff
00165   if(imageInfo.horizonInImage)
00166   {
00167     LINE(imageProcessor_horizon,
00168      imageInfo.horizonStart.x, imageInfo.horizonStart.y, imageInfo.horizonEnd.x, 
00169      imageInfo.horizonEnd.y, 1, Drawings::ps_solid, Drawings::white );
00170   }
00171   DEBUG_DRAWING_FINISHED(imageProcessor_horizon);
00172   DEBUG_DRAWING_FINISHED(imageProcessor_scanLines);
00173   DEBUG_DRAWING_FINISHED(imageProcessor_general);
00174   DEBUG_DRAWING_FINISHED(imageProcessor_edges);
00175 
00176   SEND_DEBUG_IMAGE(imageProcessorPlayers);
00177   GENERATE_DEBUG_IMAGE(imageProcessorGeneral,
00178     Image correctedImage(image);
00179     ColorCorrector::correct(correctedImage);
00180     INIT_DEBUG_IMAGE(imageProcessorGeneral, correctedImage);
00181   )
00182   SEND_DEBUG_IMAGE(imageProcessorGeneral);
00183 
00184   GENERATE_DEBUG_IMAGE(segmentedImage1,
00185     Image correctedImage(image);
00186     ColorCorrector::correct(correctedImage);
00187     colorTable.generateColorClassImage(correctedImage, segmentedImage1ColorClassImage);
00188   )
00189   SEND_DEBUG_COLOR_CLASS_IMAGE(segmentedImage1);
00190 
00191   GENERATE_DEBUG_IMAGE(imageProcessorGradients,
00192     SUSANEdgeDetectionLite edgeDetectionU(GT2004BeaconDetector::edgeThresholdU);
00193     SUSANEdgeDetectionLite edgeDetectionV(GT2004BeaconDetector::edgeThresholdV);
00194     Image edgeDetectionImage;
00195     bool edgeU;
00196     bool edgeV;
00197     edgeDetectionImage.cameraInfo = image.cameraInfo;
00198     for (int y=1; y<edgeDetectionImage.cameraInfo.resolutionHeight-1; y++) 
00199     {
00200       for (int x=1; x<edgeDetectionImage.cameraInfo.resolutionWidth-1; x++) 
00201       {
00202         edgeU = edgeDetectionU.isEdgePoint(image, x, y, SUSANEdgeDetectionLite::componentB);
00203         edgeV = edgeDetectionV.isEdgePoint(image, x, y, SUSANEdgeDetectionLite::componentC);
00204         if (edgeU&&edgeV)
00205         {
00206           edgeDetectionImage.image[y][0][x] = 210;
00207           edgeDetectionImage.image[y][1][x] = 127;
00208           edgeDetectionImage.image[y][2][x] = 127;
00209         }
00210         else if (edgeU)
00211         {
00212           edgeDetectionImage.image[y][0][x] = 130;
00213           edgeDetectionImage.image[y][1][x] = 170;
00214           edgeDetectionImage.image[y][2][x] = 80;
00215         }
00216         else if (edgeV)
00217         {
00218           edgeDetectionImage.image[y][0][x] = 130;
00219           edgeDetectionImage.image[y][1][x] = 80;
00220           edgeDetectionImage.image[y][2][x] = 170;
00221         }
00222         else 
00223         {
00224           edgeDetectionImage.image[y][0][x] = image.image[y][0][x]/2;
00225           edgeDetectionImage.image[y][1][x] = image.image[y][1][x];
00226           edgeDetectionImage.image[y][2][x] = image.image[y][2][x];
00227         }
00228         edgeDetectionImage.image[y][3][x] = 128;
00229         edgeDetectionImage.image[y][4][x] = 128;
00230         edgeDetectionImage.image[y][5][x] = 128;
00231       }
00232     }
00233     INIT_DEBUG_IMAGE(imageProcessorGradients, edgeDetectionImage);
00234   )
00235   SEND_DEBUG_IMAGE(imageProcessorGradients);
00236 
00237   GENERATE_DEBUG_IMAGE(imageProcessorBall,
00238     Image correctedImage(image);
00239     ColorCorrector::correct(correctedImage);
00240     for (int iPBix=0; iPBix<correctedImage.cameraInfo.resolutionWidth; iPBix++) 
00241     {
00242       for (int iPBiy=0; iPBiy<correctedImage.cameraInfo.resolutionHeight; iPBiy++) 
00243       {
00244         correctedImage.image[iPBiy][0][iPBix]=ballSpecialist.getSimilarityToOrange(correctedImage.image[iPBiy][0][iPBix],correctedImage.image[iPBiy][1][iPBix],correctedImage.image[iPBiy][2][iPBix]);
00245         if (correctedImage.image[iPBiy][0][iPBix]==0)
00246         {
00247           correctedImage.image[iPBiy][1][iPBix]=127;
00248           correctedImage.image[iPBiy][2][iPBix]=127;
00249         }
00250         else
00251         {
00252           if (correctedImage.image[iPBiy][0][iPBix]>30)
00253             correctedImage.image[iPBiy][1][iPBix]=0;
00254           else
00255             correctedImage.image[iPBiy][1][iPBix]=255;
00256           if (correctedImage.image[iPBiy][0][iPBix]>60)
00257             correctedImage.image[iPBiy][2][iPBix]=0;
00258           else
00259             correctedImage.image[iPBiy][2][iPBix]=255;
00260         }
00261       }
00262     }
00263     INIT_DEBUG_IMAGE(imageProcessorBall, correctedImage);
00264   )
00265   SEND_DEBUG_IMAGE(imageProcessorBall);
00266   DEBUG_DRAWING_FINISHED(imageProcessor_ball4);
00267 
00268   //for(i = 0; i < LinesPercept::numberOfLineTypes; ++i)
00269   //    numberOfPoints[i] = linesPercept.numberOfPoints[i];
00270   // output number of found percepts
00271   /*OUTPUT(idText,text,"GT2004ImageProcessor: percept-counting " << endl
00272     << "\tflags: " << landmarksPercept.numberOfFlags << endl
00273     << "\tgoals: " << landmarksPercept.numberOfGoals << endl
00274     << "\tlines: " << linesPercept.numberOfPoints[1] << endl
00275     << "\tballs: " << ballPercept.ballWasSeen << endl
00276     << "\tblue:  " << playersPercept.numberOfBluePlayers << endl
00277     << "\tred:   " << playersPercept.numberOfRedPlayers << endl
00278     << "\tobst:  " << obstaclesPercept.numberOfPoints
00279   );*/
00280 
00281   prevCameraMatrix = cameraMatrix;
00282   prevCmTricot = cmTricot;
00283   /*OUTPUT(idText, text, "---");
00284   for(int ii = 0; ii < linesPercept.numberOfPoints[1]; ++ii)
00285     OUTPUT(idText, text, int(linesPercept.points[1][ii].angle * 180 / pi));*/
00286 }
00287 
00288 void GT2004ImageProcessor::scanColumns()
00289 {
00290   const int scanLineSpacing(4);
00291   const double verticalScanLineOffset(15.0);
00292                //verticalScanLineLength(50.0);
00293   Geometry::Line scanLine;
00294   Vector2<double> intersection;
00295   Vector2<int> topPoint,
00296                bottomPoint,
00297                topBorderPoint,
00298                bottomBorderPoint;
00299   double bottomBorderDist, topBorderDist;
00300   int i;
00301   bool noLines = false,
00302        skipLine;
00303 
00304   // Reset horizontal counters
00305   noRedCount = noBlueCount = 100;
00306   closestBottom = closestBottom = 40000;
00307   goalAtBorder = goalAtBorder = false;
00308 
00309   
00310   // scan lines are perpendicular to horizon
00311   scanLine.direction = imageInfo.vertLine.direction;
00312 
00313   // calculate and scan lines
00314   for(int dir = -1; dir <= 1; dir += 2)
00315   {
00316     for(i = 0; true; i++)
00317     {
00318       if (dir == 1 && i == 0) i = 1;
00319       scanLine.base.x = image.cameraInfo.resolutionWidth / 2 + imageInfo.horizon.direction.x * scanLineSpacing * dir * i;
00320       scanLine.base.y = image.cameraInfo.resolutionHeight / 2 + imageInfo.horizon.direction.y * scanLineSpacing * dir * i;
00321 
00322       //Does the scan line intersect with the image?
00323       if(!Geometry::getIntersectionPointsOfLineAndRectangle(
00324                                           Vector2<int>(0,0),
00325                                           imageInfo.maxImageCoordinates,
00326                                           scanLine, topBorderPoint, bottomBorderPoint))
00327       {
00328         break;
00329       }
00330       Geometry::getIntersectionOfLines(imageInfo.horizon, scanLine, intersection);
00331 
00332       if (fabs(scanLine.direction.y) > 0.001)
00333       {
00334         topBorderDist = (topBorderPoint.y - intersection.y) / scanLine.direction.y;
00335         bottomBorderDist = (bottomBorderPoint.y - intersection.y) / scanLine.direction.y;
00336       } else {
00337         topBorderDist = (topBorderPoint.x - intersection.x) / scanLine.direction.x;
00338         bottomBorderDist = (bottomBorderPoint.x - intersection.x) / scanLine.direction.x;
00339       }
00340 
00341       skipLine = false;
00342 
00343       // upper boundary for scan lines
00344       if (topBorderDist > -10)
00345         topPoint = topBorderPoint;
00346       else if (bottomBorderDist < -10)
00347         skipLine = true;
00348       else
00349       {
00350         topPoint.x = (int)(intersection.x - scanLine.direction.x * verticalScanLineOffset);
00351         topPoint.y = (int)(intersection.y - scanLine.direction.y * verticalScanLineOffset);
00352         if (topPoint.x < 0) topPoint.x = 0;
00353         if (topPoint.x >= image.cameraInfo.resolutionWidth) topPoint.x = image.cameraInfo.resolutionWidth - 1;
00354         if (topPoint.y < 0) topPoint.y = 0;
00355         if (topPoint.y >= image.cameraInfo.resolutionHeight) topPoint.y = image.cameraInfo.resolutionHeight - 1;
00356       }
00357         
00358       // lower boundaries for scan lines
00359       switch (i & 3)
00360       {
00361         case 1:
00362         case 3:
00363           if (bottomBorderDist < 30)
00364             bottomPoint = bottomBorderPoint;
00365           else if (topBorderDist > 30)
00366             skipLine = true;
00367           else
00368           {
00369             bottomPoint.x = (int)(intersection.x + scanLine.direction.x * 40);
00370             bottomPoint.y = (int)(intersection.y + scanLine.direction.y * 40);
00371             if (bottomPoint.x < 0) bottomPoint.x = 0;
00372             if (bottomPoint.x >= image.cameraInfo.resolutionWidth) bottomPoint.x = image.cameraInfo.resolutionWidth - 1;
00373             if (bottomPoint.y < 0) bottomPoint.y = 0;
00374             if (bottomPoint.y >= image.cameraInfo.resolutionHeight) bottomPoint.y = image.cameraInfo.resolutionHeight - 1;
00375           }
00376           noLines = true;
00377           break;
00378         case 2:
00379           if (bottomBorderDist < 60)
00380             bottomPoint = bottomBorderPoint;
00381           else if (topBorderDist > 60)
00382             skipLine = true;
00383           else
00384           {
00385             bottomPoint.x = (int)(intersection.x + scanLine.direction.x * 60);
00386             bottomPoint.y = (int)(intersection.y + scanLine.direction.y * 60);
00387             if (bottomPoint.x < 0) bottomPoint.x = 0;
00388             if (bottomPoint.x >= image.cameraInfo.resolutionWidth) bottomPoint.x = image.cameraInfo.resolutionWidth - 1;
00389             if (bottomPoint.y < 0) bottomPoint.y = 0;
00390             if (bottomPoint.y >= image.cameraInfo.resolutionHeight) bottomPoint.y = image.cameraInfo.resolutionHeight - 1;
00391           }
00392           noLines = true;
00393           break;
00394         case 0:
00395         default:
00396           bottomPoint = bottomBorderPoint;
00397           noLines = false;
00398           break;
00399       }
00400       if (!skipLine)
00401         scan(topPoint, bottomPoint, true, noLines);
00402     }
00403   }
00404   // finish clustering
00405   for(i = 0; i < 3; ++i)
00406   {
00407     clusterRobots(0,false,false);
00408   }
00409 }
00410 
00411 void GT2004ImageProcessor::scanRows()
00412 {
00413   Geometry::Line scanLine(imageInfo.horizon);
00414   double horizontalScanLineSpacing(20.0);
00415   Vector2<double> scanOffset(imageInfo.vertLine.direction * horizontalScanLineSpacing);
00416   scanLine.base += (scanOffset*2.0);
00417   Vector2<int> startPoint,
00418                  endPoint;
00419   //Test if the horizon is inside the image
00420   if(!Geometry::getIntersectionPointsOfLineAndRectangle(Vector2<int>(0,0),
00421                                                         imageInfo.maxImageCoordinates,
00422                                                         scanLine, startPoint, endPoint))
00423   {
00424     scanLine.base.x = (double)imageInfo.maxImageCoordinates.x / 2.0;
00425     scanLine.base.y = 1;
00426   }
00427   while(true)
00428   { 
00429     if(Geometry::getIntersectionPointsOfLineAndRectangle(
00430                                         Vector2<int>(0,0),
00431                                         imageInfo.maxImageCoordinates,
00432                                         scanLine, startPoint, endPoint))
00433     {
00434       LINE(imageProcessor_general,startPoint.x,startPoint.y,endPoint.x,endPoint.y,
00435        1,Drawings::ps_solid, Drawings::green);
00436       if(rand() > RAND_MAX / 2)
00437         scan(startPoint, endPoint, false, false);
00438       else
00439         scan(startPoint, endPoint, false, false);
00440       scanLine.base += scanOffset;
00441     }
00442     else
00443     {
00444       return;
00445     }
00446   }
00447 }
00448 
00449 void GT2004ImageProcessor::scan(const Vector2<int>& start, const Vector2<int>& end,
00450                                 bool vertical, bool noLines)
00451 {
00452 #ifdef EDGES
00453   edgeSpecialist.resetLine();
00454 #endif
00455 
00456   LinesPercept::LineType typeOfLinesPercept = LinesPercept::field;
00457   int pixelsSinceGoal = 0;
00458   int pixelsBetweenGoalAndObstacle = 0;
00459   
00460   Vector2<int> actual = start,
00461                diff = end - start,
00462                step(sgn(diff.x),sgn(diff.y)),
00463                size(abs(diff.x),abs(diff.y));
00464   bool isVertical = abs(diff.y) > abs(diff.x);
00465   int count,
00466       sum;
00467   double scanAngle = diff.angle();
00468 
00469   LINE(imageProcessor_scanLines,
00470     start.x, start.y, end.x, end.y,
00471     1, Drawings::ps_solid, Drawings::gray );
00472 
00473   // init Bresenham
00474   if(isVertical)
00475   {
00476     count = size.y;
00477     sum = size.y / 2;
00478   }
00479   else
00480   {
00481     count = size.x;
00482     sum = size.x / 2;
00483   }
00484 
00485   if(count > 7)
00486   {
00487     int numberOfPoints[LinesPercept::numberOfLineTypes],
00488         i;
00489     for(i = 0; i < LinesPercept::numberOfLineTypes; ++i)
00490       numberOfPoints[i] = linesPercept.numberOfPoints[i];
00491     RingBuffer<const unsigned char*,7> pixelBuffer;
00492     RingBuffer<const unsigned char*,7> pixelBufferFromUp;
00493     RingBuffer<unsigned char, 7> yBuffer;
00494     RingBuffer<unsigned char, 7> vBuffer;
00495     RingBuffer<colorClass, 7> colorClassBuffer;
00496 
00497     //the image is scanned vertically
00498     for(i = 0; i < 6; ++i) // fill buffer
00499     {
00500       unsigned char y,u,v;
00501       pixelBuffer.add(&image.image[actual.y][0][actual.x]);
00502       if (actual.x < 0 || actual.x >= image.cameraInfo.resolutionWidth || actual.y < 0 || actual.y >= image.cameraInfo.resolutionHeight)
00503         y = u = v = 0;
00504       else
00505       {
00506         y = colorCorrector.correct(actual.x, actual.y, 0, image.image[actual.y][0][actual.x]);
00507         u = colorCorrector.correct(actual.x, actual.y, 1, image.image[actual.y][1][actual.x]);
00508         v = colorCorrector.correct(actual.x, actual.y, 2, image.image[actual.y][2][actual.x]);
00509       }
00510       yBuffer.add(y);
00511       vBuffer.add(v);
00512       colorClass color = COLOR_CLASS(y, u, v);
00513       colorClassBuffer.add(color);
00514       
00515 #ifdef EDGES
00516       edgeSpecialist.checkPoint(actual, color, cameraMatrix, prevCameraMatrix, image);
00517 #endif
00518 
00519       // Bresenham
00520       if(isVertical)
00521       {
00522         actual.y += step.y;
00523         sum += size.x;
00524         if(sum >= size.y)
00525         {
00526           sum -= size.y;
00527           actual.x += step.x;
00528         }
00529       }
00530       else        
00531       {
00532         actual.x += step.x;
00533         sum += size.y;
00534         if(sum >= size.x)
00535         {
00536           sum -= size.x;
00537           actual.y += step.y;
00538         }
00539       }
00540     }
00541     lineColor = Drawings::numberOfColors;
00542 
00543     int redCount = 0,
00544         blueCount = 0,
00545         greenCount = 0,
00546         noGreenCount = 100,
00547         pinkCount = 0,
00548         noPinkCount = 100,
00549         yellowCount = 0,
00550         noYellowCount = 100,
00551         skyblueCount = 0,
00552         noSkyblueCount = 100,
00553         orangeCount = 0,
00554         noOrangeCount = 100;
00555     const unsigned char* firstRed = 0,
00556                        * lastRed = 0,
00557                        * firstBlue = 0,
00558                        * lastBlue = 0,
00559                        * firstGreen = 0,
00560                        * firstOrange = 0,
00561                        * lastOrange = 0,
00562                        * firstPink = 0,
00563                        * lastPink = 0,
00564                        * pFirst = pixelBuffer[2],
00565                        * up = pFirst;
00566     double upAngle = 0;
00567     enum{justUP, greenAndUP, down} state = justUP;
00568     bool borderFound = false; // there can be only one border point per scan line
00569 
00570     // obstacles
00571     enum{noGroundFound, foundBeginOfGround, foundEndOfGround} groundState = noGroundFound;
00572     Vector2<int> firstGround(0,0);
00573     int numberOfPixelsSinceFirstOccurrenceOfGroundColor = 0;
00574     int numberOfPixelsSinceFirstOccurrenceOfNonGroundColor = 0;
00575     int numberOfPixelsSinceLastGround = 0;
00576     int numberOfWhitePixelsSinceLastGround = 0;
00577     int numberOfGroundPixels = 0;
00578     int numberOfWhiteObstaclePixels = 0;
00579     int numberOfNotWhiteObstaclePixels = 0;
00580     bool beginOfGroundIsAtTheTopOfTheImage = false;
00581 
00582     struct BorderCandidate
00583     {
00584       bool found;
00585       Vector2<int> point;
00586       double angle;
00587     };
00588     BorderCandidate borderCandidate;
00589     borderCandidate.found = false;
00590 
00591     for(; i <= count; ++i)
00592     {
00593       pixelsSinceGoal++;
00594       numberOfScannedPixels++;
00595       unsigned char y,u,v;
00596       pixelBuffer.add(&image.image[actual.y][0][actual.x]);
00597       if (actual.x < 0 || actual.x >= image.cameraInfo.resolutionWidth || actual.y < 0 || actual.y >= image.cameraInfo.resolutionHeight)
00598         y = u = v = 0;
00599       else
00600       {
00601         y = colorCorrector.correct(actual.x, actual.y, 0, image.image[actual.y][0][actual.x]);
00602         u = colorCorrector.correct(actual.x, actual.y, 1, image.image[actual.y][1][actual.x]);
00603         v = colorCorrector.correct(actual.x, actual.y, 2, image.image[actual.y][2][actual.x]);
00604       }
00605       yBuffer.add(y);
00606       vBuffer.add(v);
00607       colorClass color = COLOR_CLASS(y, u, v);
00608       colorClassBuffer.add(color);
00609       
00610 #ifdef EDGES
00611       edgeSpecialist.checkPoint(actual, color, cameraMatrix, prevCameraMatrix, image);
00612 #endif
00613 
00614       // line point state machine
00615       const unsigned char* p3 = pixelBuffer[3];
00616       //UP: y-component increases ////////////////////////////////////////
00617       if(yBuffer[3] > yBuffer[4] + yThreshold)
00618       {
00619         up = p3;
00620         if(colorClassBuffer[6] == green)
00621         {
00622           Vector2<int> coords = getCoords(p3),
00623                        pointOnField; //position on field, relative to robot
00624           pixelBufferFromUp.init();
00625           if(Geometry::calculatePointOnField(coords.x, coords.y, cameraMatrix, prevCameraMatrix, image.cameraInfo, pointOnField))
00626             for(int k = pixelBuffer.getNumberOfEntries() - 1; k >= 0; --k)
00627               pixelBufferFromUp.add(pixelBuffer.getEntry(k));
00628           state = greenAndUP;
00629         }
00630         else 
00631           state = justUP;
00632       }
00633     
00634       //DOWN: y-component decreases //////////////////////////////////////
00635       else if(yBuffer[3] < 
00636               yBuffer[4] - yThreshold || 
00637               vBuffer[3] < 
00638               vBuffer[4] - vThreshold)
00639       {
00640         // is the pixel in the field ??
00641         if(state != down && // was an "up" pixel above?
00642            colorClassBuffer[0] == green) // is there green 3 pixels below ?
00643         {
00644           colorClass c = colorClassBuffer[6];
00645           if(!noLines || c == skyblue || c == yellow)
00646           {
00647             Vector2<int> coords = getCoords(p3),
00648                          pointOnField; //position on field, relative to robot
00649 
00650             if(Geometry::calculatePointOnField(coords.x, coords.y, cameraMatrix, prevCameraMatrix, image.cameraInfo, pointOnField))
00651             {
00652               Vector2<int> upCoords = getCoords(up);
00653               int height = (coords - upCoords).abs();
00654               int distance = (int) sqrt(sqr(cameraMatrix.translation.z) + sqr(pointOnField.abs())),
00655                 lineHeight = (int) Geometry::getSizeByDistance(image.cameraInfo, 25, distance);
00656 
00657               //The bright region is assumed to be on the ground. If it is small enough, it is a field line.
00658               if(height < 3 * lineHeight && state == greenAndUP &&
00659                  (colorClassBuffer[5] == white ||
00660                   colorClassBuffer[4] == white))
00661               {
00662                 double edgeAngle = calcEdgeAngle(coords, pointOnField, scanAngle, pixelBuffer);
00663                 Vector2<int> pointOnField2;
00664                 if(edgeAngle != LinesPercept::UNDEF && pixelBufferFromUp.getNumberOfEntries() > 0 &&
00665                    Geometry::calculatePointOnField(upCoords.x, upCoords.y, cameraMatrix, prevCameraMatrix, image.cameraInfo, pointOnField2))
00666                 {
00667                   upAngle = calcEdgeAngle(upCoords, pointOnField2, scanAngle, pixelBufferFromUp, true);
00668                   if(edgeAngle != LinesPercept::UNDEF && upAngle != LinesPercept::UNDEF)
00669                   {
00670                     // only add the closest field line. This makes filtering easier.
00671                     linesPercept.numberOfPoints[LinesPercept::field] = numberOfPoints[LinesPercept::field];
00672                     linesPercept.add(LinesPercept::field, pointOnField2, upAngle);
00673                     linesPercept.add(LinesPercept::field, pointOnField, edgeAngle);
00674                   }
00675                 }
00676               }
00677               else if(height >= 3 && !borderFound)
00678               {
00679                 switch(c)
00680                 {
00681                   case skyblue:
00682                     if(vertical)
00683                     {
00684                       borderFound = true;
00685                       if(getPlayer().getTeamColor() == Player::red && !linesPercept.numberOfPoints[LinesPercept::skyblueGoal])
00686                         goalAtBorder = pointOnField.abs() < closestBottom;
00687                       closestBottom = 40000;
00688                       double edgeAngle = calcEdgeAngle(coords, pointOnField, scanAngle, pixelBuffer, false, 2);
00689                       if(edgeAngle != LinesPercept::UNDEF)
00690                         linesPercept.add(LinesPercept::skyblueGoal, pointOnField, edgeAngle);
00691                       typeOfLinesPercept = LinesPercept::skyblueGoal;
00692                       pixelsSinceGoal = 0;
00693 
00694                       lastRed = lastBlue = 0;
00695                       redCount = blueCount = 0;
00696                     }
00697                     break;
00698                   case yellow:
00699                     if(vertical)
00700                     {
00701                       borderFound = true;
00702                       if(getPlayer().getTeamColor() == Player::blue && !linesPercept.numberOfPoints[LinesPercept::yellowGoal])
00703                         goalAtBorder = pointOnField.abs() < closestBottom;
00704                       closestBottom = 40000;
00705                       double edgeAngle = calcEdgeAngle(coords, pointOnField, scanAngle, pixelBuffer);
00706                       if(edgeAngle != LinesPercept::UNDEF)
00707                         linesPercept.add(LinesPercept::yellowGoal, pointOnField, edgeAngle);
00708                       typeOfLinesPercept = LinesPercept::yellowGoal;
00709                       pixelsSinceGoal = 0;
00710 
00711                       lastRed = lastBlue = 0;
00712                       redCount = blueCount = 0; 
00713                     }
00714                     break;
00715                   case white:
00716                     if(height > lineHeight * 3 && (vertical || height > 30))
00717                     {
00718                       //borderFound = true;
00719                       double edgeAngle = calcEdgeAngle(coords, pointOnField, scanAngle, pixelBuffer);
00720                       if(edgeAngle != LinesPercept::UNDEF)
00721                       {
00722                         //linesPercept.add(LinesPercept::border,pointOnField, edgeAngle);
00723                         borderCandidate.found = true;
00724                         borderCandidate.point = pointOnField;
00725                         borderCandidate.angle = edgeAngle;
00726                       }
00727                       typeOfLinesPercept = LinesPercept::border;
00728                       lastRed = lastBlue = 0;
00729                       redCount = blueCount = 0;
00730                     }
00731                     break;
00732                 }
00733               }
00734             }
00735           }
00736           state = down;
00737         }
00738       }
00739       
00740       colorClass c3 = colorClassBuffer[3];
00741       ++noOrangeCount;
00742       if (c3 == orange)
00743       {
00744         if(noOrangeCount > 1)
00745         {
00746           if (orangeCount > longestBallRun)
00747           {
00748             longestBallRun = orangeCount;
00749             ballCandidate = (getCoords(firstOrange) + getCoords(lastOrange)) / 2;
00750           }
00751           orangeCount = 1;
00752           firstOrange = p3;
00753           lastOrange = p3;
00754         }
00755         else
00756         {
00757           ++orangeCount;
00758           if(orangeCount > 2) // ignore robot if ball is below, distance would be wrong
00759           {
00760             lastRed = lastBlue = 0;
00761             redCount = blueCount = 0; 
00762           }
00763           lastOrange = p3;
00764         }
00765         firstGreen = 0;
00766         noOrangeCount = 0;
00767       }
00768       if(vertical)
00769       {
00770         // in scanColumns, recognize player and ball points
00771         ++noGreenCount;
00772         ++noPinkCount;
00773         ++noYellowCount;
00774         ++noSkyblueCount;
00775         switch(c3)
00776         {
00777           case blue: // count blue, remember last
00778             if(blueCount == 3)
00779               firstBlue = pixelBuffer[6];
00780             ++blueCount;
00781             lastBlue = p3;
00782             firstGreen = 0;
00783             break;
00784           case gray: // drop green above gray (i.e. robot legs)
00785             if(firstGreen && noGreenCount < 8)
00786               firstGreen = 0;
00787             break;
00788           case green: // find first green below a robot
00789             if(!firstGreen)
00790             { 
00791               firstGreen = p3;
00792             }
00793             if(noGreenCount > 6)
00794               greenCount = 1;
00795             else
00796               ++greenCount;
00797             noGreenCount = 0;
00798             break;
00799           case red:
00800             if(orangeCount < 6 || noOrangeCount > 4 || redCount > orangeCount)
00801             { // count red, remember last
00802               if(redCount == 3)
00803                 firstRed = pixelBuffer[6];
00804               ++redCount;
00805               lastRed = p3;
00806               firstGreen = 0;
00807               break;
00808             }
00809             // no break, red below orange is interpreted as orange
00810           case pink:
00811             if(noPinkCount > 6)
00812             {
00813               pinkCount = 1;
00814               firstPink = p3;
00815             }
00816             else
00817             {
00818               ++pinkCount;
00819             }
00820             lastPink = p3;
00821             noPinkCount = 0;
00822             break;
00823           case yellow:
00824             if(noYellowCount > 6)
00825               yellowCount = 1;
00826             else
00827               ++yellowCount;
00828             noYellowCount = 0;
00829             break;
00830           case skyblue:
00831             if(noSkyblueCount > 6)
00832               skyblueCount = 1;
00833             else
00834               ++skyblueCount;
00835             noSkyblueCount = 0;
00836             break;
00837           default:
00838             break;
00839           
00840         }//switch(c3)
00841 
00842         // obstacles state machine
00843         if(count > 90 && cameraMatrix.isValid)
00844         {
00845           DEBUG_IMAGE_SET_PIXEL_BLACK(imageProcessorPlayers, getCoords(pixelBuffer[0]).x, getCoords(pixelBuffer[0]).y);
00846           colorClass color = colorClassBuffer[0];
00847           if(groundState == noGroundFound)
00848           {
00849             DEBUG_IMAGE_SET_PIXEL_YELLOW(imageProcessorPlayers, getCoords(pixelBuffer[0]).x, getCoords(pixelBuffer[0]).y);
00850             if(color == green || color == orange)
00851             {
00852               if(numberOfPixelsSinceFirstOccurrenceOfGroundColor == 0)
00853               {
00854                 firstGround = getCoords(pixelBuffer[0]);
00855               }
00856               numberOfPixelsSinceFirstOccurrenceOfGroundColor++;
00857               if(numberOfPixelsSinceFirstOccurrenceOfGroundColor > 3 || color == orange)
00858               {
00859                 Vector2<int> coords = getCoords(pixelBuffer[0]);
00860                 int lineHeight = Geometry::calculateLineSize(coords, cameraMatrix, image.cameraInfo);
00861                 if(i < lineHeight * 4) beginOfGroundIsAtTheTopOfTheImage = true;
00862                 groundState = foundBeginOfGround;
00863                 pixelsBetweenGoalAndObstacle = pixelsSinceGoal;
00864                 numberOfPixelsSinceFirstOccurrenceOfNonGroundColor = 0;
00865               }
00866             }
00867             else if (color != noColor)
00868             {
00869               numberOfPixelsSinceFirstOccurrenceOfGroundColor = 0;
00870               if(color == white) numberOfWhiteObstaclePixels++;
00871               else numberOfNotWhiteObstaclePixels++;
00872             }
00873           }
00874           else if(groundState == foundBeginOfGround)
00875           {
00876             DEBUG_IMAGE_SET_PIXEL_GREEN(imageProcessorPlayers, getCoords(pixelBuffer[0]).x, getCoords(pixelBuffer[0]).y);
00877             if(color != green && color != orange && color != noColor)
00878             {
00879               numberOfPixelsSinceFirstOccurrenceOfNonGroundColor++;
00880               if(numberOfPixelsSinceFirstOccurrenceOfNonGroundColor > 3)
00881               {
00882                 groundState = foundEndOfGround;
00883                 numberOfPixelsSinceLastGround = 0;
00884                 numberOfWhitePixelsSinceLastGround = 0;
00885                 numberOfPixelsSinceFirstOccurrenceOfGroundColor = 0;
00886               }
00887             }
00888             else if (color != noColor)
00889             {
00890               numberOfGroundPixels++;
00891               numberOfPixelsSinceFirstOccurrenceOfNonGroundColor = 0;
00892             }
00893           }
00894           else if(groundState == foundEndOfGround)
00895           {
00896             numberOfPixelsSinceLastGround++;
00897             if(color == white) numberOfWhitePixelsSinceLastGround++;
00898             DEBUG_IMAGE_SET_PIXEL_RED(imageProcessorPlayers, getCoords(pixelBuffer[0]).x, getCoords(pixelBuffer[0]).y);
00899             if(color == green || color == orange)
00900             {
00901               numberOfPixelsSinceFirstOccurrenceOfGroundColor++;
00902               if(numberOfPixelsSinceFirstOccurrenceOfGroundColor > 3 || color == orange)
00903               {
00904                 numberOfPixelsSinceFirstOccurrenceOfNonGroundColor = 0;
00905                 groundState = foundBeginOfGround;
00906                 Vector2<int> coords = getCoords(pixelBuffer[0]),
00907                              pointOnField; //position on field, relative to robot
00908                 int lineHeight = Geometry::calculateLineSize(coords, cameraMatrix, image.cameraInfo);
00909                 // * 4 wenn 80% weiß (schräge linie). sonst * 1.5 (keine linie)
00910                 if(numberOfPixelsSinceLastGround > lineHeight * (numberOfPixelsSinceLastGround * 4 < numberOfWhitePixelsSinceLastGround * 5 ? 4 : 1.5))
00911                 {
00912                   DEBUG_IMAGE_SET_PIXEL_PINK(imageProcessorPlayers, getCoords(pixelBuffer[0]).x, getCoords(pixelBuffer[0]).y);
00913                   firstGround = getCoords(pixelBuffer[0]);
00914                   numberOfGroundPixels = 0;
00915                   beginOfGroundIsAtTheTopOfTheImage = false;
00916                 }
00917               }
00918             }
00919             else if (color != noColor)
00920             {
00921               numberOfPixelsSinceFirstOccurrenceOfGroundColor = 0;  
00922               if(color == white) 
00923                 numberOfWhiteObstaclePixels++;
00924               else 
00925                 numberOfNotWhiteObstaclePixels++;
00926             }
00927           }
00928         } // if(count > 100)
00929 
00930       }//if(vertical)
00931       
00932       PLOT(p3,ColorClasses::colorClassToDrawingsColor(c3));
00933 
00934       // Bresenham
00935       if(isVertical)
00936       {
00937         actual.y += step.y;
00938         sum += size.x;
00939         if(sum >= size.y)
00940         {
00941           sum -= size.y;
00942           actual.x += step.x;
00943         }
00944       }
00945       else        
00946       {
00947         actual.x += step.x;
00948         sum += size.y;
00949         if(sum >= size.x)
00950         {
00951           sum -= size.x;
00952           actual.y += step.y;
00953         }
00954       }
00955     }
00956 
00957     if(!borderFound && borderCandidate.found)
00958     {
00959       linesPercept.add(LinesPercept::border,borderCandidate.point, borderCandidate.angle);
00960     }
00961 
00962     if (orangeCount > longestBallRun)
00963     {
00964       longestBallRun = orangeCount;
00965       ballCandidate = (getCoords(firstOrange) + getCoords(lastOrange)) / 2;
00966     }
00967 
00968     const unsigned char* pLast = pixelBuffer[3];
00969     PLOT(pLast,Drawings::numberOfColors);
00970 
00971     if(vertical)
00972     { // line scanning finished, analyze results (only in scanColumns)
00973       int goal = getPlayer().getTeamColor() == Player::red ? LinesPercept::skyblueGoal
00974                                                            : LinesPercept::yellowGoal;
00975       if(linesPercept.numberOfPoints[goal] == numberOfPoints[goal]) // no goal point found in this column
00976       {
00977         Vector2<int> coords = getCoords(pLast),
00978                      pointOnField;
00979         if(Geometry::calculatePointOnField(coords.x, coords.y, cameraMatrix, prevCameraMatrix, image.cameraInfo, pointOnField))
00980         {
00981           int dist = pointOnField.abs();
00982           if(dist < closestBottom)
00983             closestBottom = dist;
00984         }
00985       }
00986               
00987       bool redFound = false,
00988            blueFound = false;
00989 
00990       // red robot point found?
00991       if(redCount > blueCount && (firstGreen && redCount > 2 || redCount > 20))
00992       { // drop border and goal points
00993         for(i = 1; i < LinesPercept::numberOfLineTypes; ++i)
00994           linesPercept.numberOfPoints[i] = numberOfPoints[i];
00995         if(redCount > 20)
00996         { // if robot is close, use upper boundary of tricot to calculate distance
00997           Vector2<int> coords = getCoords(firstRed),
00998                        pointOnField; //position on field, relative to robot
00999           if(Geometry::calculatePointOnField(coords.x, coords.y, cmTricot, prevCmTricot, image.cameraInfo, pointOnField) &&
01000              pointOnField.abs() < 500)
01001           {
01002             DOT(imageProcessor_general, coords.x, coords.y, Drawings::red, Drawings::white);
01003             linesPercept.add(LinesPercept::redRobot,pointOnField);
01004             typeOfLinesPercept = LinesPercept::redRobot;
01005             redFound = true;
01006           }
01007         }
01008         if(!redFound)
01009         { // otherwise, use foot point
01010           Vector2<int> coords = getCoords(firstGreen),
01011                        pointOnField; //position on field, relative to robot
01012           if(Geometry::calculatePointOnField(coords.x, coords.y, cameraMatrix, prevCameraMatrix, image.cameraInfo, pointOnField))
01013           {
01014             Vector2<int> redCoords = getCoords(lastRed);
01015             if((redCoords - coords).abs() < Geometry::getSizeByDistance(image.cameraInfo, 100, pointOnField.abs()))
01016             {
01017               linesPercept.add(LinesPercept::redRobot,pointOnField);
01018               typeOfLinesPercept = LinesPercept::redRobot;
01019               redFound = true;
01020             }
01021           }
01022         }
01023       }
01024       // blue robot point found?
01025       else if(blueCount > redCount && (firstGreen && blueCount > 2 || blueCount > 10))
01026       { // drop border and goal points
01027         for(i = 1; i < LinesPercept::numberOfLineTypes; ++i)
01028           linesPercept.numberOfPoints[i] = numberOfPoints[i];
01029         if(blueCount > 10)
01030         { // if robot is close, use upper boundary of tricot to calculate distance
01031           Vector2<int> coords = getCoords(firstBlue),
01032                        pointOnField; //position on field, relative to robot
01033           if(Geometry::calculatePointOnField(coords.x, coords.y, cmTricot, prevCmTricot, image.cameraInfo, pointOnField) &&
01034              pointOnField.abs() < 500)
01035           {
01036             DOT(imageProcessor_general, coords.x, coords.y, Drawings::pink, Drawings::white);
01037             linesPercept.add(LinesPercept::blueRobot,pointOnField);
01038             typeOfLinesPercept = LinesPercept::blueRobot;
01039             blueFound = true;
01040           }
01041         }
01042         if(!blueFound)
01043         { // otherwise, use foot point
01044           Vector2<int> coords = getCoords(firstGreen),
01045                        pointOnField; //position on field, relative to robot
01046           if(Geometry::calculatePointOnField(coords.x, coords.y, cameraMatrix, prevCameraMatrix, image.cameraInfo, pointOnField))
01047           {
01048             Vector2<int> blueCoords = getCoords(lastBlue);
01049             if((blueCoords - coords).abs() < Geometry::getSizeByDistance(image.cameraInfo, 100, pointOnField.abs()))
01050             {
01051               linesPercept.add(LinesPercept::blueRobot,pointOnField);
01052               typeOfLinesPercept = LinesPercept::blueRobot;
01053               blueFound = true;
01054             }
01055           }
01056         }
01057       }
01058       // cluster the robot points found
01059       clusterRobots(pLast, redFound, blueFound);
01060 
01061       // obstacles found?
01062       bool addObstaclesPoint = false;
01063       Vector2<int> newObstaclesNearPoint;
01064       Vector2<int> newObstaclesFarPoint;
01065       bool newObstaclesFarPointIsOnImageBorder = false;
01066       if(count > 90 && cameraMatrix.isValid)
01067       {
01068         Vector2<int> firstGroundOnField;
01069         bool firstGroundOnFieldIsOnField = 
01070           Geometry::calculatePointOnField(firstGround.x, firstGround.y, cameraMatrix, prevCameraMatrix, image.cameraInfo, firstGroundOnField);
01071 
01072         Vector2<int> imageBottom = getCoords(pixelBuffer[0]);
01073         Vector2<int> imageBottomOnField;
01074         bool imageBottomIsOnField = 
01075           Geometry::calculatePointOnField(imageBottom.x, imageBottom.y, cameraMatrix, prevCameraMatrix, image.cameraInfo, imageBottomOnField);
01076 
01077         if(groundState == foundBeginOfGround)
01078         {
01079           if(firstGroundOnFieldIsOnField)
01080           {
01081             addObstaclesPoint = true;
01082             newObstaclesNearPoint = imageBottomOnField;
01083             newObstaclesFarPoint = firstGroundOnField;
01084             newObstaclesFarPointIsOnImageBorder = beginOfGroundIsAtTheTopOfTheImage;
01085           }
01086         }
01087         else if(groundState == foundEndOfGround)
01088         {
01089           int lineHeight = Geometry::calculateLineSize(imageBottom, cameraMatrix, image.cameraInfo);
01090           if(numberOfPixelsSinceLastGround < lineHeight * 4)
01091           {
01092             if(firstGroundOnFieldIsOnField)
01093             {
01094               addObstaclesPoint = true;
01095               newObstaclesNearPoint = imageBottomOnField;
01096               newObstaclesFarPoint = firstGroundOnField;
01097               newObstaclesFarPointIsOnImageBorder = beginOfGroundIsAtTheTopOfTheImage;
01098             }
01099           }
01100           else if(imageBottomIsOnField)
01101           {
01102             addObstaclesPoint = true;
01103             newObstaclesNearPoint = imageBottomOnField;
01104             newObstaclesFarPoint = imageBottomOnField;
01105           }
01106         }
01107         else if(groundState == noGroundFound)
01108         {
01109           if(imageBottomIsOnField &&
01110             // the carpet often is not green under the robot
01111             imageBottomOnField.abs() > 150)
01112           {
01113             addObstaclesPoint = true;
01114             newObstaclesNearPoint = imageBottomOnField;
01115             newObstaclesFarPoint = imageBottomOnField;
01116           }
01117         }
01118       }// if(count > 90 && cameraMatrix.isValid)
01119       if(addObstaclesPoint)
01120       {
01121         if(
01122           angleBetweenDirectionOfViewAndGround > -80.0 &&
01123           !(newObstaclesFarPoint.x == 0 && newObstaclesFarPoint.y == 0)
01124           )
01125 
01126         {
01127           ObstaclesPercept::ObstacleType obstacleType = ObstaclesPercept::unknown;
01128           switch(typeOfLinesPercept)
01129           {
01130           case LinesPercept::skyblueGoal: 
01131             if(pixelsBetweenGoalAndObstacle < 15) 
01132               obstacleType = ObstaclesPercept::goal; 
01133             break;
01134           case LinesPercept::yellowGoal: 
01135             if(pixelsBetweenGoalAndObstacle < 15) 
01136               obstacleType = ObstaclesPercept::goal; 
01137             break;
01138           case LinesPercept::blueRobot:
01139             if(getPlayer().getTeamColor() == Player::blue)
01140               obstacleType = ObstaclesPercept::teammate;
01141             else 
01142               obstacleType = ObstaclesPercept::opponent;
01143             break;
01144           case LinesPercept::redRobot: 
01145             if(getPlayer().getTeamColor() == Player::red) 
01146               obstacleType = ObstaclesPercept::teammate;
01147             else 
01148               obstacleType = ObstaclesPercept::opponent;
01149             break;
01150           case LinesPercept::border:
01151             obstacleType = ObstaclesPercept::border;
01152           }
01153           obstaclesPercept.add(
01154             newObstaclesNearPoint, 
01155             newObstaclesFarPoint,
01156             newObstaclesFarPointIsOnImageBorder,
01157             obstacleType
01158             );
01159         }
01160       }
01161     }//if(vertical)
01162   }
01163 }
01164 
01165 void GT2004ImageProcessor::clusterRobots(const unsigned char* bottomPoint,
01166                                          bool redFound, bool blueFound)
01167 {
01168   Vector2<int> coords = getCoords(bottomPoint),
01169                pointOnField;
01170   int noRedCount2 = noRedCount;
01171 
01172   if(redFound)
01173   {
01174     lastRed = 
01175       linesPercept.points[LinesPercept::redRobot][linesPercept.numberOfPoints[LinesPercept::redRobot] - 1];
01176     if(noRedCount > 2)
01177       firstRed = closestRed = lastRed;
01178     else if(closestRed.abs() > lastRed.abs())
01179       closestRed = lastRed;
01180     noRedCount = 0;
01181   }
01182   // count number of columns without robot points, but ignore columns that are
01183   // not long enough to contain the robot. Drop single robot points of the other 
01184   // robot color.
01185   else if(noRedCount > 2 || !bottomPoint ||
01186           (Geometry::calculatePointOnField(coords.x, coords.y, cameraMatrix, prevCameraMatrix, image.cameraInfo, pointOnField) &&
01187           closestRed.abs() > pointOnField.abs()))
01188   {
01189     if(++noRedCount == 3 && (firstRed != lastRed || noBlueCount > 4))
01190     {
01191       SinglePlayerPercept percept;
01192       percept.direction = (atan2((double)firstRed.y,(double)firstRed.x) + atan2((double)lastRed.y,(double)lastRed.x)) / 2;
01193       double distance = closestRed.abs();
01194       percept.offset.x = cos(percept.direction) * distance;
01195       percept.offset.y = sin(percept.direction) * distance;
01196       percept.validity = 1;
01197       playersPercept.addRedPlayer(percept);
01198     }
01199   }
01200 
01201   if(blueFound)
01202   {
01203     lastBlue = 
01204       linesPercept.points[LinesPercept::blueRobot][linesPercept.numberOfPoints[LinesPercept::blueRobot] - 1];
01205     if(noBlueCount > 2)
01206       firstBlue = closestBlue = lastBlue;
01207     else if(closestBlue.abs() > lastBlue.abs())
01208       closestBlue = lastBlue;
01209     noBlueCount = 0;
01210   }
01211   // count number of columns without robot points, but ignore columns that are
01212   // not long enough to contain the robot. Drop single robot points of the other 
01213   // robot color.
01214   else if(noBlueCount > 2 || !bottomPoint ||
01215           (Geometry::calculatePointOnField(coords.x, coords.y, cameraMatrix, prevCameraMatrix, image.cameraInfo, pointOnField) &&
01216           closestBlue.abs() > pointOnField.abs()))
01217   {
01218     if(++noBlueCount == 3 && (firstBlue != lastBlue || noRedCount2 > 4))
01219     {
01220       SinglePlayerPercept percept;
01221       percept.direction = (atan2((double)firstBlue.y,(double)firstBlue.x) + atan2((double)lastBlue.y,(double)lastBlue.x)) / 2;
01222       double distance = closestBlue.abs();
01223       percept.offset.x = int(cos(percept.direction) * distance);
01224       percept.offset.y = int(sin(percept.direction) * distance);
01225       percept.validity = 1;
01226       playersPercept.addBluePlayer(percept);
01227     }
01228   }
01229 }
01230  
01231 void GT2004ImageProcessor::filterPercepts()
01232 {
01233   // Close robots produce far ghost robots. Remove them.
01234   int i;
01235   for(i = 0; i < playersPercept.numberOfRedPlayers + playersPercept.numberOfBluePlayers; ++i)
01236   {
01237     SinglePlayerPercept& pi = i < playersPercept.numberOfRedPlayers 
01238                       ? playersPercept.redPlayers[i]
01239                       : playersPercept.bluePlayers[i - playersPercept.numberOfRedPlayers];
01240     double iDist = pi.offset.abs(),
01241            iAngle = atan2(pi.offset.y, pi.offset.x),
01242            ignoreAngle = atan2(150, iDist);
01243     for(int j = 0; j < playersPercept.numberOfRedPlayers + playersPercept.numberOfBluePlayers; ++j)
01244       if(i != j)
01245       {
01246         SinglePlayerPercept& pj = j < playersPercept.numberOfRedPlayers 
01247                             ? playersPercept.redPlayers[j]
01248                             : playersPercept.bluePlayers[j - playersPercept.numberOfRedPlayers];
01249         if(iDist < pj.offset.abs() &&
01250            fabs(atan2(pj.offset.y, pj.offset.x) - iAngle) < ignoreAngle)
01251         {
01252           if(j < playersPercept.numberOfRedPlayers)
01253             pj = playersPercept.redPlayers[--playersPercept.numberOfRedPlayers];
01254           else
01255             pj = playersPercept.bluePlayers[--playersPercept.numberOfBluePlayers];
01256           if(i > j)
01257           {
01258             i = j; // check pi again
01259             break;
01260           }
01261           else
01262             --j; // check pj again
01263         }
01264       }
01265   }
01266 
01267   // If there are too few line points of a certain type, remove them all, they may be misreadings
01268   for(i = 0; i < LinesPercept::numberOfLineTypes; ++i)
01269     if(linesPercept.numberOfPoints[i] < 3)
01270       linesPercept.numberOfPoints[i] = 0;
01271 
01272   // Remove outliers in the border
01273   filterLinesPercept(linesPercept, LinesPercept::border, cameraMatrix, prevCameraMatrix, image);
01274   /*
01275   // First run: find them
01276   int prev = 0;
01277   for(i = 1; i < linesPercept.numberOfPoints[LinesPercept::border] - 1; ++i)
01278   {
01279     LinesPercept::LinePoint& pPrev = linesPercept.points[LinesPercept::border][prev],
01280                            & p = linesPercept.points[LinesPercept::border][i],
01281                            & pNext = linesPercept.points[LinesPercept::border][i + 1];
01282 
01283     // Field wall is convex. So, no point can be closer than its two neighbors
01284     int dist = p.abs();
01285     if(pPrev.abs() > dist && pNext.abs() > dist)
01286       p.angle = LinesPercept::UNDEF;
01287     else
01288       ++prev;
01289   }
01290 
01291   // second run: remove marked points
01292   prev = 0;
01293   for(i = 1; i < linesPercept.numberOfPoints[LinesPercept::border] - 1; ++i)
01294     if(linesPercept.points[LinesPercept::border][i].angle != LinesPercept::UNDEF)
01295       linesPercept.points[LinesPercept::border][prev++] = linesPercept.points[LinesPercept::border][i];
01296   linesPercept.numberOfPoints[LinesPercept::border] = prev;
01297   */
01298 
01299   // Remove outliers in the field lines
01300   filterLinesPercept(linesPercept, LinesPercept::field, cameraMatrix, prevCameraMatrix, image);
01301   /*
01302   // First run: find them
01303   prev = 0;
01304   for(i = 2; i <= linesPercept.numberOfPoints[LinesPercept::field]; i += 2)
01305   {
01306     LinesPercept::LinePoint& pPrev = linesPercept.points[LinesPercept::field][i - 2],
01307                            & p = linesPercept.points[LinesPercept::field][i];
01308 
01309     // end of line?
01310     double angle = fabs(((Vector2<int>&) pPrev).angle() - ((Vector2<int>&) p).angle());
01311     int dist1 = pPrev.abs(),
01312         dist2 = p.abs();
01313     if(i == linesPercept.numberOfPoints[LinesPercept::field] ||
01314        angle > 0.08 ||
01315        abs(dist1 - dist2) > 2 * fabs(tan(angle / 2) * (dist1 + dist2)))
01316     {
01317       if(i - prev < 6)
01318         while(prev < i)
01319           linesPercept.points[LinesPercept::field][prev++].angle = LinesPercept::UNDEF;
01320       prev = i;
01321     }
01322   }
01323 
01324   // second run: remove marked points
01325   prev = 0;
01326   for(i = 0; i < linesPercept.numberOfPoints[LinesPercept::field]; ++i)
01327     if(linesPercept.points[LinesPercept::field][i].angle != LinesPercept::UNDEF)
01328       linesPercept.points[LinesPercept::field][prev++] = linesPercept.points[LinesPercept::field][i];
01329   linesPercept.numberOfPoints[LinesPercept::field] = prev;
01330   */
01331 }
01332 
01333 double GT2004ImageProcessor::calcEdgeAngle(
01334   const Vector2<int>& pointInImage, 
01335   const Vector2<int>& pointOnField,
01336   double scanAngle,
01337   const RingBuffer<const unsigned char*,7>& pixelBuffer,
01338   const bool againstScanline,
01339   int channel) const
01340 {
01341   // find maximum gradient around point on the scan line
01342   int indices[] = {3, 4, 2, 5, 1};
01343   Vector2<double> maxGrad;
01344   double maxLength = -1;
01345   for(unsigned int i = 0; i < sizeof(indices) / sizeof(indices[0]); ++i)
01346   {
01347     Vector2<int> point = getCoords(pixelBuffer[indices[i]]);
01348     Vector2<double> grad = Vector2<double>(double(image.image[point.y][channel][point.x]) - image.image[point.y + 1][channel][point.x - 1] ,
01349       double(image.image[point.y + 1][channel][point.x]) - image.image[point.y][channel][point.x - 1]);
01350     double length = grad.abs();
01351 
01352     if(length > maxLength)
01353     {
01354       Vector2<double> gradRotated = (rotation2x2 * Vector2<double>(grad.x, -grad.y)).normalize();
01355       gradRotated.y *= -1;
01356       // filter gradient which are in wrong direction (belong to other side of line)
01357       double angle = gradRotated.angle();
01358       bool similarDirection = (angle > scanAngle && angle < scanAngle + pi) || (angle < scanAngle - pi && angle > scanAngle - pi2);
01359       if(againstScanline != similarDirection)
01360       {
01361         maxGrad = grad;
01362         maxLength = length;
01363       }
01364     }
01365   }
01366 
01367   if(maxLength > 0)
01368   {
01369     Vector2<double> gradRotated = (rotation2x2 * Vector2<double>(maxGrad.x, -maxGrad.y)).normalize();
01370     gradRotated.y *= -1;
01371     
01372     // check whether edge direction differs too much from scan direction
01373     double diff = fabs(scanAngle - gradRotated.angle());
01374     if(diff > pi)
01375       diff = pi2 - diff;
01376     if(diff < pi / 6 || diff > 5 * pi / 6)
01377       return LinesPercept::UNDEF;
01378 
01379     Vector2<int> endPoint(int(pointInImage.x + gradRotated.x * 10), int(pointInImage.y + gradRotated.y * 10));
01380     ARROW(imageProcessor_edges, 
01381       pointInImage.x,
01382       pointInImage.y,
01383       endPoint.x,
01384       endPoint.y,
01385       1, 1, (againstScanline ? Drawings::red : Drawings::pink)
01386     );
01387 
01388     Vector2<int> point;
01389     if(Geometry::calculatePointOnField(endPoint.x, endPoint.y, cameraMatrix, prevCameraMatrix, image.cameraInfo, point))
01390       return normalize((point - pointOnField).angle() + pi_2);
01391   }
01392   return LinesPercept::UNDEF;
01393 }
01394 
01395 void GT2004ImageProcessor::plot(const unsigned char* p,Drawings::Color color)
01396 {
01397   if(lineColor == Drawings::numberOfColors)
01398   {
01399     last = p;
01400     lineColor = color;
01401   }
01402   else if(color != lineColor)
01403   {
01404     Vector2<int> p1 = getCoords(last),
01405                  p2 = getCoords(p);
01406     LINE(imageProcessor_general,p1.x,p1.y,p2.x,p2.y,0,Drawings::ps_solid,lineColor);
01407     last = p;
01408     lineColor = color;
01409   }
01410 }
01411 
01412 bool GT2004ImageProcessor::handleMessage(InMessage& message)
01413 {
01414   switch(message.getMessageID())
01415   {
01416 /*
01417     case idLinesImageProcessorParameters:
01418     {
01419       GenericDebugData d;
01420       message.bin >> d;
01421       yThreshold = (int)d.data[1];
01422       vThreshold = (int)d.data[2];
01423       execute();
01424       return true;
01425     }
01426 */
01427     case idColorTable64: beaconDetector.analyzeColorTable();
01428       return true;
01429   }
01430   return false;
01431 }
01432 
01433 void GT2004ImageProcessor::filterLinesPercept(LinesPercept& percept, int type, const CameraMatrix& cameraMatrix, const CameraMatrix& prevCameraMatrix, const Image& image)
01434 {
01435   const int maxNumberOfEdgePoints = 200;
01436   double maxAngle = 0.25; // 8.6°
01437   double maxDist = 0.187; // 80°
01438 
01439   int i, j;
01440 
01441   // for all lines, calculate how many other lines are similar/near in sense of this special norm
01442   bool similar[maxNumberOfEdgePoints][maxNumberOfEdgePoints];
01443   for(i = 0; i < percept.numberOfPoints[type]; i++)
01444   {
01445     Vector2<double> dir1 = Vector2<double>(cos(percept.points[type][i].angle),sin(percept.points[type][i].angle));
01446 
01447     //int lineHeightI = Geometry::calculateLineSize(percept.points[type][i], cameraMatrix, image.cameraInfo);
01448     for(j = i + 1; j < percept.numberOfPoints[type]; j++)
01449     {
01450       // similar/near if distance is small and normal is in the same direction
01451       bool sim = false;
01452       // test projection first - to remove unneeded calculations of "expensive" distance
01453       double angleDiff = fabs(normalize(percept.points[type][i].angle - percept.points[type][j].angle));
01454       if(angleDiff < maxAngle)
01455       {
01456         // distance point to line:
01457         // http://mo.mathematik.uni-stuttgart.de/kurse/kurs8/seite44.html
01458         // NOTE: this is because in fact, the line.direction is here the *normal* to the real line
01459         Vector2<double> v1 = Vector2<double>((double)(percept.points[type][j] - percept.points[type][i]).x,(double)(percept.points[type][j] - percept.points[type][i]).y).normalize();
01460         double distance1 = fabs(v1 * dir1);
01461         if(distance1 < maxDist)
01462         {
01463           Vector2<double> dir2 = Vector2<double>(cos(percept.points[type][j].angle),sin(percept.points[type][j].angle));
01464           double distance2 = fabs(-v1 * dir2);
01465           sim = (distance2 < maxDist);
01466         }
01467       }
01468       similar[i][j] = sim;
01469       similar[j][i] = sim;
01470       // draw similarity-lines
01471       /*Vector2<int> p1,p2;
01472       Geometry::calculatePointInImage(percept.points[type][i], cameraMatrix, image.cameraInfo, p1);
01473       Geometry::calculatePointInImage(percept.points[type][j], cameraMatrix, image.cameraInfo, p2);
01474       if(sim) LINE(imageProcessor_edges, 
01475         p1.x, 
01476         p1.y, 
01477         p2.x, 
01478         p2.y,
01479         0, 0, 1
01480       );*/
01481     }
01482   }
01483 
01484   // remove points with low number of similar points
01485   int nextFree = 0;
01486 
01487   // for all points, calculate how many other are similar
01488   for(i = 0; i < percept.numberOfPoints[type]; i++)
01489   {
01490     int countSim = 0;
01491     //if(edgePoints[i].belongsToLineNo == -1) // only if point is not yet matched to an edge
01492     for (j = 0; j < percept.numberOfPoints[type]; j++)
01493     {
01494       //if(edgePoints[j].belongsToLineNo == -1) // only if point is not yet matched to an edge
01495       if(similar[i][j])
01496       {
01497         countSim++;
01498       }
01499     }
01500     if(countSim >= 2) // keep percept
01501     {
01502       percept.points[type][nextFree++] = percept.points[type][i];
01503     }
01504   }
01505   percept.numberOfPoints[type] = nextFree;
01506 }
01507 
01508 /*
01509 * $Log: GT2004ImageProcessor.cpp,v $
01510 * Revision 1.30  2004/06/30 17:36:58  thomas
01511 * modified obstacle-states to handle white robots while allowing field-lines
01512 *
01513 * Revision 1.29  2004/06/30 09:27:50  nistico
01514 * *** empty log message ***
01515 *
01516 * Revision 1.28  2004/06/29 17:11:27  thomas
01517 * modified to make border-percept better if near border and seeing outside-green and white-wall
01518 *
01519 * Revision 1.27  2004/06/29 10:09:12  thomas
01520 * added new algorithm for filtering border- and field-line-percepts
01521 *
01522 * Revision 1.26  2004/06/28 09:48:14  roefer
01523 * SelfLocator
01524 *
01525 * Revision 1.25  2004/06/22 09:43:17  nistico
01526 * The radial component of the measurement variance is
01527 * also affected by panning velocity now
01528 *
01529 * Revision 1.24  2004/06/22 08:24:38  nistico
01530 * Reintroduced clipping between [-1; 1] before acos()
01531 *
01532 * Revision 1.23  2004/06/21 08:15:57  nistico
01533 * The dot product of normalized vectors (versors) is guaranteed to be within [-1, 1] ;
01534 * the angle was not normalized, however. (-pi, pi)
01535 *
01536 * Revision 1.22  2004/06/20 21:55:36  roefer
01537 * Bug fixed in line point filtering
01538 *
01539 * Revision 1.21  2004/06/19 19:06:31  roefer
01540 * Filtering field lines and border points that might be parts of a robot
01541 *
01542 * Revision 1.20  2004/06/18 12:29:17  nistico
01543 * Added (redundant?) check for division_by_zero
01544 *
01545 * Revision 1.19  2004/06/18 10:24:51  nistico
01546 * Kalman measurement covariance matrix now adjusted also based on
01547 * head panning angular velocity
01548 *
01549 * Revision 1.18  2004/06/18 10:19:15  roefer
01550 * Bug removed
01551 *
01552 * Revision 1.17  2004/06/17 21:13:20  thomas
01553 * modified gradient of line-percepts now have accurate direction
01554 *
01555 * Revision 1.16  2004/06/17 15:52:05  nistico
01556 * Added visualization of camera motion vector on image
01557 * Fixed cameraMatrix.frameNumber=0 problem when playing logfiles
01558 * (the image frame number is copied)
01559 *
01560 * Revision 1.15  2004/06/17 13:34:02  risler
01561 * warning removed
01562 *
01563 * Revision 1.14  2004/06/17 10:50:22  roefer
01564 * Visualization of line orientation fixed
01565 *
01566 * Revision 1.13  2004/06/17 09:24:43  roefer
01567 * Lines percept includes line orientation
01568 *
01569 * Revision 1.12  2004/06/16 18:58:39  roefer
01570 * LinesPercept with angles, does not work yet
01571 *
01572 * Revision 1.11  2004/06/16 13:39:14  thomas
01573 * update edge-specialist
01574 *
01575 * Revision 1.10  2004/06/16 10:50:24  risler
01576 * added border obstacle type
01577 *
01578 * Revision 1.9  2004/06/15 15:40:57  roefer
01579 * edge detection commented out
01580 *
01581 * Revision 1.8  2004/06/15 10:58:26  thomas
01582 * added edge-specialist, edges-percept, debug-drawings etc. (not yet called from image-processor)
01583 *
01584 * Revision 1.7  2004/06/14 23:20:10  spranger
01585 * -changed some functions in Geometry from int to double including ballradius(fieldimensions)
01586 * -maybe all Geometric-functions in Geometry should be as precise as possible, avoiding int to double conversion errors
01587 *
01588 * Revision 1.6  2004/06/13 11:29:35  nistico
01589 * Improved visualization of edges used by BeaconDetector
01590 *
01591 * Revision 1.5  2004/06/05 19:48:45  nistico
01592 * Added one more special situation to BeaconDetector
01593 * imageProcessorGradients now visualized edges used by
01594 * BeaconDetector, for debugging
01595 *
01596 * Revision 1.4  2004/06/05 07:58:21  roefer
01597 * Compensation for motion distortions of images
01598 *
01599 * Revision 1.3  2004/06/01 12:00:33  nistico
01600 * - used Tim's Bresenham for scanForBeaconPart
01601 * - changed edge detection
01602 * - still work in progress
01603 *
01604 * Revision 1.2  2004/05/22 20:44:49  juengel
01605 * Removed manual calibration.
01606 *
01607 * Revision 1.1.1.1  2004/05/22 17:19:46  cvsadm
01608 * created new repository GT2004_WM
01609 *
01610 * Revision 1.4  2004/05/07 15:16:24  nistico
01611 * All geometry calculations are making use of intrinsic functions.
01612 * I updated most of the images processors to make correct use of this.
01613 * Please test if there are any problems, because i'm going to remove the
01614 * old code soon.
01615 *
01616 * Revision 1.3  2004/05/06 16:03:56  nistico
01617 * Supports ColorTable32K through CT32K_LAYOUT switch located into
01618 * GT2004ImageProcessorTools.h
01619 *
01620 * Revision 1.2  2004/05/05 13:27:38  tim
01621 * removed unnecessary lines of code
01622 *
01623 * Revision 1.1  2004/05/04 13:40:19  tim
01624 * added GT2004ImageProcessor
01625 *
01626 */

Generated on Thu Sep 23 19:57:29 2004 for GT2004 by doxygen 1.3.6