OpenCVWidget
|
00001 #include <QtGui> 00002 #include <QtOpenGL> 00003 00004 #include "opencvwidget.h" 00005 00006 using namespace cv; 00007 00008 OpenCVWidget::OpenCVWidget(QWidget *parent, QGLWidget *shareWidget) 00009 : QGLWidget(parent, shareWidget) 00010 { 00011 timer = new QTimer(); 00012 frameRate = -1; 00013 autoDisplay = true; 00014 } 00015 00016 OpenCVWidget::~OpenCVWidget() 00017 { 00018 } 00019 00020 QSize OpenCVWidget::sizeHint() const 00021 { 00022 return QSize(400, 300); 00023 } 00024 00025 void OpenCVWidget::startCapture() 00026 { 00027 if(captureSource.isNull() || captureSource.isEmpty()) { 00028 QMessageBox::critical(this,"Error","No capture source specified."); 00029 return; 00030 } 00031 00032 bool intConvert; 00033 captureSource.toInt(&intConvert); 00034 if(intConvert) { 00035 int source = captureSource.toInt(); 00036 capture.open(source); 00037 } else { 00038 QByteArray ba = captureSource.toLocal8Bit(); 00039 const char *source = ba.data(); 00040 capture.open(source); 00041 } 00042 00043 if(!capture.isOpened()) { 00044 QMessageBox::critical(this,"Error","Error initializing capture."); 00045 return; 00046 } 00047 00048 //Get an initial frame from the webcam 00049 capture >> image; 00050 00051 resizeGL(this->width(), this->height()); // Adjust the viewport for the new image. 00052 00053 //Connect the timer signal with the capture action 00054 connect(timer, SIGNAL(timeout()), this, SLOT(captureFrame())); 00055 00056 // Start the timer scheduled for firing according to the frame rate 00057 // If the frame rate is 0, the timer will not be started. 00058 if(frameRate < 0) { 00059 // Attempt to set the frame rate automatically. 00060 timer->start(floor(1.0f/(float)capture.get(CV_CAP_PROP_FPS)*1000.0f)); 00061 } else if(frameRate > 0) { 00062 timer->start(floor(1.0f/(float)frameRate*1000.0f)); 00063 } 00064 00065 // Since the initial frame was already captured, emit the signal and auto update, if specified: 00066 if(autoDisplay) { 00067 updateDisplay(); 00068 } 00069 emit frameCaptured(); 00070 } 00071 00072 void OpenCVWidget::startCapture(QString source, int frameRate, bool autoDisplay) 00073 { 00074 setSource(source); 00075 setFrameRate(frameRate); 00076 enableAutoDisplay(autoDisplay); 00077 startCapture(); 00078 } 00079 00080 void OpenCVWidget::setSource(QString source) 00081 { 00082 captureSource = source; 00083 } 00084 00085 QString OpenCVWidget::getSource() 00086 { 00087 return captureSource; 00088 } 00089 00090 void OpenCVWidget::setFrameRate(int rate) 00091 { 00092 frameRate = rate; 00093 if(rate == 0) { 00094 pauseCapture(); 00095 } 00096 } 00097 00098 int OpenCVWidget::getFrameRate() 00099 { 00100 return frameRate; 00101 } 00102 00103 void OpenCVWidget::stopCapture() 00104 { 00105 timer->stop(); 00106 capture.release(); 00107 } 00108 00109 void OpenCVWidget::pauseCapture() 00110 { 00111 timer->stop(); 00112 } 00113 00114 void OpenCVWidget::resumeCapture() 00115 { 00116 //Start the timer scheduled for firing according to the frame rate 00117 if(frameRate < 0) { 00118 // Attempt to set the frame rate automatically. 00119 timer->start(floor(1.0f/(float)capture.get(CV_CAP_PROP_FPS)*1000.0f)); 00120 } else { 00121 timer->start(floor(1.0f/(float)frameRate*1000.0f)); 00122 } 00123 } 00124 00125 void OpenCVWidget::advanceCapture() 00126 { 00127 if(frameRate == 0 && capture.isOpened()) { 00128 captureFrame(); 00129 } 00130 } 00131 00132 void OpenCVWidget::grab() 00133 { 00134 if(frameRate == 0 && capture.isOpened()) { 00135 capture.grab(); 00136 } 00137 } 00138 00139 void OpenCVWidget::loadImage(int loadFlag) 00140 { 00141 stopCapture(); 00142 00143 QByteArray ba = captureSource.toLocal8Bit(); 00144 const char *source = ba.data(); 00145 image = cv::imread(source, loadFlag); 00146 resizeGL(this->width(), this->height()); 00147 if(autoDisplay) { 00148 updateDisplay(); 00149 } 00150 emit frameCaptured(); 00151 00152 //Draw the scene 00153 glDraw(); 00154 } 00155 00156 void OpenCVWidget::loadImage(QString source, bool autoDisplay, int loadFlag) 00157 { 00158 setSource(source); 00159 enableAutoDisplay(autoDisplay); 00160 loadImage(loadFlag); 00161 } 00162 00163 Mat OpenCVWidget::getImage() 00164 { 00165 return image; 00166 } 00167 00168 void OpenCVWidget::enableAutoDisplay(bool value) 00169 { 00170 autoDisplay = value; 00171 } 00172 00173 bool OpenCVWidget::getAutoDisplay() 00174 { 00175 return autoDisplay; 00176 } 00177 00178 void OpenCVWidget::updateDisplay() 00179 { 00180 displayImage = image.clone(); 00181 } 00182 00183 void OpenCVWidget::initializeGL() 00184 { 00185 // Create the surface we will use for the texture: 00186 static const int coords[4][3] = { { +1, -1 }, { -1, -1 }, { -1, +1 }, { +1, +1 } }; 00187 for (int j = 0; j < 4; ++j) { 00188 /* A note about texture coordinates: 00189 OpenCV uses a nice, sane coordinate system with origin in the upper left corner. 00190 Just like any other image processing tool (let's just forget the fact that math-wise 00191 that is silly). 00192 OpenGL, however, uses a math-inspired coordinate system with origin in the lower 00193 left. 00194 Right here, the texture is mapped, so the image is automatically flipped in the y- 00195 direction. Better do it here, than actually flipping the image elsewhere. 00196 */ 00197 texCoords.append(QVector2D(j == 0 || j == 3, j == 2 || j == 3)); 00198 vertices.append(QVector2D(coords[j][0], coords[j][1])); 00199 } 00200 00201 glEnable(GL_DEPTH_TEST); 00202 glEnable(GL_CULL_FACE); 00203 glEnable(GL_TEXTURE_2D); 00204 } 00205 00206 void OpenCVWidget::paintGL() 00207 { 00208 if(displayImage.empty()) { 00209 displayImage = Mat::zeros(1, 1, CV_8UC3); // Paint a black background until we have something to show. 00210 } 00211 00212 qglClearColor(Qt::black); // Create a nice, black background for the parts of the widget with no image. 00213 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 00214 00215 glLoadIdentity(); 00216 00217 glVertexPointer(2, GL_FLOAT, 0, vertices.constData()); 00218 glTexCoordPointer(2, GL_FLOAT, 0, texCoords.constData()); 00219 glEnableClientState(GL_VERTEX_ARRAY); 00220 glEnableClientState(GL_TEXTURE_COORD_ARRAY); 00221 00222 00223 // Below are three different methods of mapping the image as a texture. Use only one. 00224 // Basically, stick to method 3 unless you have very good reason for not doing that. 00225 00226 // Method 1: 00227 /* 00228 This method binds the texture using mipmaps for smooth resizing. However, this means that a new 00229 mipmap must be calculated for each frame. This is slow (OpenGL is built for using static textures 00230 where the mipmaps are simple pre-calculated at the start of the program), so while it works it is 00231 so slow that I cannot recommend this approach. 00232 00233 // Additions from http://www.nullterminator.net/gltexture.html: 00234 glGenTextures(1, &texture); // Allocate a texture name. 00235 glBindTexture(GL_TEXTURE_2D, texture); // Select our current texture. 00236 glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE); // Select modulate to mix texture with color for shading. 00237 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST_MIPMAP_NEAREST); // When texture area is small, use the closest mipmap. 00238 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); // When texture area is large, use the first mipmap. 00239 gluBuild2DMipmaps( GL_TEXTURE_2D, 3, displayImage.cols, displayImage.rows, GL_BGR, GL_UNSIGNED_BYTE, displayImage.data); // Build our texture mipmaps from the raw OpenCV image data. 00240 // End of additions 00241 */ 00242 00243 // Method 2: 00244 /* 00245 Alternative way, going via a QPixmap. Do not use, it is slow. 00246 Remaining here to maintain my sanity, should the other solutions break. 00247 texture = bindTexture(QPixmap(QString("side1.png")), GL_TEXTURE_2D); 00248 glBindTexture(GL_TEXTURE_2D, texture); 00249 */ 00250 00251 // Method 3: 00252 // Non-mipmap way of mapping the texture (fast and clean): 00253 glGenTextures(1, &texture); // Allocate a texture name. 00254 glBindTexture(GL_TEXTURE_2D, texture); // Select our current texture. 00255 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); // When the texture area is larger then the image, upscale using linear interpolation. 00256 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); // When the texture area is smaller than the image, downsample using linear interpolation. 00257 glTexImage2D(GL_TEXTURE_2D, 0, 3, displayImage.cols, displayImage.rows, 0, GL_BGR, GL_UNSIGNED_BYTE, displayImage.data); 00258 00259 // End of different methods, the last few lines are common for all methods. 00260 00261 glDrawArrays(GL_TRIANGLE_FAN, 0, 4); // Draw it! 00262 00263 glDeleteTextures(1, &texture); 00264 00265 } 00266 00267 void OpenCVWidget::resizeGL(int width, int height) 00268 { 00269 // Make sure the image keeps its aspect ratio, regardless of widget size: 00270 // (also, center it in the widget) 00271 float imgRatio = (float)image.cols/(float)image.rows; 00272 float windowRatio = (float)width/(float)height; 00273 if(windowRatio < imgRatio) { 00274 glViewport(0, (height-width/imgRatio)/2, width, width/imgRatio); 00275 } else { 00276 glViewport((width-height*imgRatio)/2, 0, height*imgRatio, height); 00277 } 00278 00279 glMatrixMode(GL_PROJECTION); 00280 glLoadIdentity(); 00281 glOrtho(-1.0, +1.0, +1.0, -1.0, 0.0, 1.0); 00282 00283 glMatrixMode(GL_MODELVIEW); 00284 } 00285 00286 void OpenCVWidget::captureFrame() 00287 { 00288 //Get an image from the webcam 00289 capture >> image; 00290 if(image.empty()) { 00291 stopCapture(); 00292 return; 00293 } 00294 if(autoDisplay) { 00295 updateDisplay(); 00296 } 00297 00298 emit frameCaptured(); 00299 00300 //Draw the scene 00301 glDraw(); 00302 } 00303 00304 double OpenCVWidget::getCaptureProperty(int propId) 00305 { 00306 return this->capture.get(propId); 00307 } 00308 00309 bool OpenCVWidget::setCaptureProperty(int propId, double value) 00310 { 00311 return this->capture.set(propId, value); 00312 } 00313 00314 bool OpenCVWidget::isOpened() 00315 { 00316 return capture.isOpened(); 00317 } 00318 00319 void OpenCVWidget::mousePressEvent(QMouseEvent* event) 00320 { 00321 emit mouseClicked(event); 00322 } 00323 00324 void OpenCVWidget::mouseMoveEvent(QMouseEvent* event) 00325 { 00326 emit mouseMoved(event); 00327 } 00328 00329 cv::Point OpenCVWidget::mapPoint(cv::Point widgetCoords) 00330 { 00331 if(image.empty()) { // Bail out if we have no image to map to. 00332 return cv::Point(0,0); 00333 } 00334 00335 cv::Point mappedPoint; 00336 float viewportRatio = (float)this->width()/(float)this->height(); 00337 float imageRatio = (float)image.cols/(float)image.rows; 00338 00339 mappedPoint.x = (int)((float)widgetCoords.x/(float)this->width()*(float)image.cols); 00340 mappedPoint.y = (int)((float)widgetCoords.y/(float)this->height()*(float)image.rows); 00341 if(viewportRatio > imageRatio) { // The viewport has black bars on the sides. 00342 int imageWidth = image.cols*(float)this->height()/(float)image.rows; 00343 int offset = (this->width()-imageWidth)/2; 00344 mappedPoint.x = image.cols*((float)(widgetCoords.x-offset)/(float)imageWidth); 00345 } else if(viewportRatio < imageRatio) { // The viewport has black bars on top and bottom. 00346 int imageHeight = image.rows*(float)this->width()/(float)image.cols; 00347 int offset = (this->height()-imageHeight)/2; 00348 mappedPoint.y = image.rows*((float)(widgetCoords.y-offset)/(float)imageHeight); 00349 } 00350 00351 return mappedPoint; 00352 }