So, i know there is lots of post concerning the difference between these two widgets. Like here : https://blog.qt.io/blog/2014/09/10/q...qopenglwidget/
But i don't get why some code are working on QGLWidget and not on QOpenGLWidget. I'm using an extension of OpenGL to capture the video stream from a camera and put it in a texture. This is working with a QGLWidget but return a GL_FAILURE_NV when i change the widget in a QOpenGLWidget. I try to create a widget i could call in a QApplication without creating a new Window. That's why i try to create a qOpenGLWidget.

The openGL context doesn't seem to be a problem because when the OpenGL extension need it to retrieve the video stream configuration, it succeed. It only fail when it tries to capture it. And i really don't get it.

Here is the widget code:
Qt Code:
  1. #include "widget.h"
  2. #include "ui_widget.h"
  3.  
  4. Widget::Widget(QOpenGLWidget *parent) :
  5. QOpenGLWidget(parent),
  6. ui(new Ui::Widget)
  7. {
  8. ui->setupUi(this);
  9. }
  10.  
  11. Widget::~Widget()
  12. {
  13. wglMakeCurrent(wglGetCurrentDC(), this->contextGL);
  14. nvGl.cleanupVideo(wglGetCurrentDC());
  15. delete ui;
  16. }
  17.  
  18. void Widget::initializeGL(){
  19. initializeOpenGLFunctions();
  20. glewInit();
  21. glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  22. nvC = NVConfig();
  23. nvC.GlobalInit();
  24. this->contextGL = wglGetCurrentContext();
  25. nvGl = NVGL(nvC.l_vioConfig,wglGetCurrentDC());
  26. QTimer::singleShot(16,this,SLOT(Tick()));
  27. }
  28.  
  29. void Widget::resizeGL(int w, int h){
  30. if(h==0)
  31. h =1;
  32. glViewport(0,0,w,h);
  33. glMatrixMode(GL_PROJECTION);
  34. glLoadIdentity();
  35. perspectiveGL(45.0f,w/h,0.1f,100.0f);
  36. glMatrixMode(GL_MODELVIEW);
  37. glLoadIdentity();
  38.  
  39. }
  40. void Widget::Tick(){
  41. nvC.DetectIStatus();
  42. nvC.setCaptureDevice();
  43. Capture();
  44. paintGL();
  45. QTimer::singleShot(16,this,SLOT(Tick()));
  46. }
  47.  
  48. void Widget::Capture(){
  49. nvGl.StartCapture();
  50. }
  51.  
  52.  
  53. void Widget::paintGL(){
  54. glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  55. glLoadIdentity();
  56. glTranslatef(-0.5f,-.5f,-1.1f);
  57. glBegin(GL_QUADS);
  58. glTexCoord2f(0,0);
  59. glVertex3f(0,0,0);
  60. glTexCoord2f(0,1.0f);
  61. glVertex3f(0,1.0f,0);
  62. glTexCoord2f(1.0f,1.0f);
  63. glVertex3f(1.0f,1.0f,0);
  64. glTexCoord2f(1.0f,0);
  65. glVertex3f(1.0f,0,0);
  66. glEnd();
  67. }
  68.  
  69. void Widget::perspectiveGL(GLdouble fovY, GLdouble aspect, GLdouble zNear, GLdouble zFar){
  70. const GLdouble pi = 3.1415926535897932384626433832795;
  71. GLdouble fW, fH;
  72. fH = tan( fovY / 360 * pi) * zNear;
  73. fW = fH * aspect;
  74. glFrustum( -fW,fW,-fH,fH,zNear,zFar);
  75. }
To copy to clipboard, switch view to plain text mode 

And here is the code of the class capturing the stream:
Qt Code:
  1. #include "nvgl.h"
  2. #include <stdio.h>
  3. #include <assert.h>
  4.  
  5.  
  6. NVGL::NVGL()
  7. {
  8.  
  9. }
  10.  
  11. NVGL::NVGL(NVVIOCONFIG_V2 &_l_vioConfig,HDC widget)
  12. {
  13. l_vioConfig=_l_vioConfig;
  14. LockDevice(widget);
  15. InitStream();
  16. InitBuffer();
  17. }
  18. void NVGL::LockDevice(HDC widget){
  19. UINT numDevices = wglEnumerateVideoCaptureDevicesNV(widget, NULL);
  20. if (numDevices <= 0) {
  21. printf("No devices found");
  22. }
  23. HVIDEOINPUTDEVICENV* m_videoDevices = (HVIDEOINPUTDEVICENV*)malloc(numDevices *
  24. sizeof(m_videoDevices[0]));
  25. if (!m_videoDevices) {
  26. fprintf(stderr, "malloc failed. OOM?");
  27. }
  28. if (numDevices != wglEnumerateVideoCaptureDevicesNV(widget,m_videoDevices)) {
  29. free(m_videoDevices);
  30. }
  31. // Find an available device we can lock
  32. for (UINT i=0; i< numDevices; ++i) {
  33. BOOL bLocked;
  34. bLocked = wglLockVideoCaptureDeviceNV(widget, m_videoDevices[i]);
  35. if (bLocked) {
  36. m_device = m_videoDevices[i];
  37. break;
  38. }
  39. }
  40. free(m_videoDevices);
  41. if (m_device == NULL) {
  42. printf("no lockable decives found");
  43. }
  44. // wglBindVideoCaptureDeviceNV needs a context current
  45. bool bRet = wglBindVideoCaptureDeviceNV(1, m_device);
  46. assert(bRet && "Failed trying to bind the video capture device!");
  47. }
  48.  
  49. void NVGL::StartCapture(){
  50. GLuint sequenceNum=0;
  51. GLuint64EXT timestamp=0;
  52.  
  53. glBeginVideoCaptureNV(1);
  54. GLenum eval = glVideoCaptureNV(1, &sequenceNum, &timestamp);
  55. switch(eval) {
  56. case GL_SUCCESS_NV:
  57. qDebug("\n************************************************");
  58. break;
  59. case GL_PARTIAL_SUCCESS_NV:
  60. qDebug("\n2ooooooooooooooooooooooooooooooooooooooooooooooooo");
  61.  
  62. break;
  63. case GL_FAILURE_NV:
  64. qDebug("\n3##################################################");
  65.  
  66. break;
  67. default:
  68. qDebug("\nkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk");
  69.  
  70. break;
  71. }
  72. glEndVideoCaptureNV(1);
  73. GLenum test= glGetError();
  74. }
  75.  
  76. void NVGL::InitStream(){
  77. // Setup CSC for each stream.
  78. GLfloat mat[4][4];
  79. float scale = 1.0f;
  80. GLfloat max[] = {5000, 5000, 5000, 5000};;
  81. GLfloat min[] = {0, 0, 0, 0};
  82. UINT m_videoSlot= 1;
  83. // Initialize matrix to the identity.
  84. mat[0][0] = scale; mat[0][1] = 0; mat[0][2] = 0; mat[0][3] = 0;
  85. mat[1][0] = 0; mat[1][1] = scale; mat[1][2] = 0; mat[1][3] = 0;
  86. mat[2][0] = 0; mat[2][1] = 0; mat[2][2] = scale; mat[2][3] = 0;
  87. mat[3][0] = 0; mat[3][1] = 0; mat[3][2] = 0; mat[3][3] = scale;
  88. GLfloat offset[] = {0, 0, 0, 0};
  89. mat[0][0] = 1.164f *scale;
  90. mat[0][1] = 1.164f *scale;
  91. mat[0][2] = 1.164f *scale;
  92. mat[0][3] = 0;
  93. mat[1][0] = 0;
  94. mat[1][1] = -0.392f *scale;
  95. mat[1][2] = 2.017f *scale;
  96. mat[1][3] = 0;
  97. mat[2][0] = 1.596f *scale;
  98. mat[2][1] = -0.813f *scale;
  99. mat[2][2] = 0.f;
  100. mat[2][3] = 0;
  101. mat[3][0] = 0;
  102. mat[3][1] = 0;
  103. mat[3][2] = 0;
  104. mat[3][3] = 1;
  105. offset[0] =-0.87f;
  106. offset[1] = 0.53026f;
  107. offset[2] = -1.08f;
  108. offset[3] = 0;
  109. for (int i=0; i < l_vioConfig.vioConfig.inConfig.numStreams; i++) {
  110. glVideoCaptureStreamParameterfvNV(m_videoSlot, i,GL_VIDEO_COLOR_CONVERSION_MATRIX_NV,&mat[0][0]);
  111. glVideoCaptureStreamParameterfvNV(m_videoSlot, i,GL_VIDEO_COLOR_CONVERSION_MAX_NV,&max[0]);
  112. glVideoCaptureStreamParameterfvNV(m_videoSlot, i,GL_VIDEO_COLOR_CONVERSION_MIN_NV,&min[0]);
  113. glVideoCaptureStreamParameterfvNV(m_videoSlot, i,GL_VIDEO_COLOR_CONVERSION_OFFSET_NV,&offset[0]);
  114. }
  115. }
  116.  
  117. void NVGL::InitBuffer(){
  118. UINT m_videoSlot= 1;
  119. GLint bufferPitch;
  120.  
  121. /*GLint videoBufferFormat = GL_YCBYCR8_422_NV;
  122.   // Create video buffer objects
  123.   glGenBuffersARB(l_vioConfig.vioConfig.inConfig.numStreams,&m_videoBuffer);
  124.   // Allocate space in the buffer objects.
  125.   for (NvU32 i = 0; i < l_vioConfig.vioConfig.inConfig.numStreams; i++) {
  126.   // Set the buffer object capture data format for each stream.
  127.   glVideoCaptureStreamParameterivNV(m_videoSlot, i,GL_VIDEO_BUFFER_INTERNAL_FORMAT_NV,&videoBufferFormat);
  128.   // Bind the buffer.
  129.   glBindBufferARB(GL_VIDEO_BUFFER_NV, m_videoBuffer);
  130.   // Get the video buffer pitch
  131.   glGetVideoCaptureStreamivNV(m_videoSlot, i,GL_VIDEO_BUFFER_PITCH_NV,&bufferPitch);
  132.   // Allocate space in the buffer object
  133.   glBufferDataARB(GL_VIDEO_BUFFER_NV, bufferPitch * 720, NULL,GL_STREAM_READ_ARB);
  134.   // Bind the buffer object to the video capture stream.
  135.   glBindVideoCaptureStreamBufferNV(m_videoSlot, i, GL_FRAME_NV, m_videoBuffer);
  136.   }*/
  137.  
  138. // Create and initialize video texture objects.
  139. glEnable(GL_TEXTURE_2D);
  140. glGenTextures(l_vioConfig.vioConfig.inConfig.numStreams, &m_videoTexture);
  141. for (UINT i = 0; i < l_vioConfig.vioConfig.inConfig.numStreams; i++) {
  142. glBindTexture(GL_TEXTURE_2D, m_videoTexture);
  143. // Set texture parameters
  144. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  145. if(l_vioConfig.vioConfig.inConfig.signalFormat==NVVIOSIGNALFORMAT_720P_50_00_SMPTE296){
  146. // Set texture format and size.
  147. glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, 1280, 720, 0,GL_RGBA, GL_UNSIGNED_BYTE, NULL);
  148. }else{
  149. glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, 1920, 1080, 0,GL_RGBA, GL_UNSIGNED_BYTE, NULL);
  150.  
  151. }
  152. // Bind the outputs for the stream
  153. glBindVideoCaptureStreamTextureNV(m_videoSlot, i, GL_FRAME_NV,GL_TEXTURE_RECTANGLE_NV,m_videoTexture);
  154. }
  155. }
  156.  
  157. HRESULT NVGL::cleanupVideo(HDC m_hDC)
  158. {
  159. BOOL bRet;
  160. // Unbind and release the capture device.
  161. bRet = wglBindVideoCaptureDeviceNV(1, NULL);
  162. assert(bRet && "Failed trying to unbind the video capture device!");
  163. // wglReleaseVideoCaptureDeviceNV should work even without a context current
  164. wglMakeCurrent(m_hDC, NULL);
  165. bRet = wglReleaseVideoCaptureDeviceNV(m_hDC, m_device);
  166. assert(bRet && "Failed trying to release the video capture device!");
  167. return S_OK;
  168. }
To copy to clipboard, switch view to plain text mode 

If you have any idea, thanks.