#include "widget.h"
#include "ui_widget.h"
Widget::Widget(QOpenGLWidget *parent) :
QOpenGLWidget(parent),
ui(new Ui::Widget)
{
ui->setupUi(this);
}
Widget::~Widget()
{
wglMakeCurrent(wglGetCurrentDC(), this->contextGL);
nvGl.cleanupVideo(wglGetCurrentDC());
delete ui;
}
void Widget::initializeGL(){
initializeOpenGLFunctions();
glewInit();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
nvC = NVConfig();
nvC.GlobalInit();
this->contextGL = wglGetCurrentContext();
nvGl = NVGL(nvC.l_vioConfig,wglGetCurrentDC());
QTimer::singleShot(16,
this,
SLOT(Tick
()));
}
void Widget::resizeGL(int w, int h){
if(h==0)
h =1;
glViewport(0,0,w,h);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
perspectiveGL(45.0f,w/h,0.1f,100.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
void Widget::Tick(){
nvC.DetectIStatus();
nvC.setCaptureDevice();
Capture();
paintGL();
QTimer::singleShot(16,
this,
SLOT(Tick
()));
}
void Widget::Capture(){
nvGl.StartCapture();
}
void Widget::paintGL(){
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glTranslatef(-0.5f,-.5f,-1.1f);
glBegin(GL_QUADS);
glTexCoord2f(0,0);
glVertex3f(0,0,0);
glTexCoord2f(0,1.0f);
glVertex3f(0,1.0f,0);
glTexCoord2f(1.0f,1.0f);
glVertex3f(1.0f,1.0f,0);
glTexCoord2f(1.0f,0);
glVertex3f(1.0f,0,0);
glEnd();
}
void Widget::perspectiveGL(GLdouble fovY, GLdouble aspect, GLdouble zNear, GLdouble zFar){
const GLdouble pi = 3.1415926535897932384626433832795;
GLdouble fW, fH;
fH = tan( fovY / 360 * pi) * zNear;
fW = fH * aspect;
glFrustum( -fW,fW,-fH,fH,zNear,zFar);
}
#include "widget.h"
#include "ui_widget.h"
Widget::Widget(QOpenGLWidget *parent) :
QOpenGLWidget(parent),
ui(new Ui::Widget)
{
ui->setupUi(this);
}
Widget::~Widget()
{
wglMakeCurrent(wglGetCurrentDC(), this->contextGL);
nvGl.cleanupVideo(wglGetCurrentDC());
delete ui;
}
void Widget::initializeGL(){
initializeOpenGLFunctions();
glewInit();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
nvC = NVConfig();
nvC.GlobalInit();
this->contextGL = wglGetCurrentContext();
nvGl = NVGL(nvC.l_vioConfig,wglGetCurrentDC());
QTimer::singleShot(16,this,SLOT(Tick()));
}
void Widget::resizeGL(int w, int h){
if(h==0)
h =1;
glViewport(0,0,w,h);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
perspectiveGL(45.0f,w/h,0.1f,100.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
void Widget::Tick(){
nvC.DetectIStatus();
nvC.setCaptureDevice();
Capture();
paintGL();
QTimer::singleShot(16,this,SLOT(Tick()));
}
void Widget::Capture(){
nvGl.StartCapture();
}
void Widget::paintGL(){
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glTranslatef(-0.5f,-.5f,-1.1f);
glBegin(GL_QUADS);
glTexCoord2f(0,0);
glVertex3f(0,0,0);
glTexCoord2f(0,1.0f);
glVertex3f(0,1.0f,0);
glTexCoord2f(1.0f,1.0f);
glVertex3f(1.0f,1.0f,0);
glTexCoord2f(1.0f,0);
glVertex3f(1.0f,0,0);
glEnd();
}
void Widget::perspectiveGL(GLdouble fovY, GLdouble aspect, GLdouble zNear, GLdouble zFar){
const GLdouble pi = 3.1415926535897932384626433832795;
GLdouble fW, fH;
fH = tan( fovY / 360 * pi) * zNear;
fW = fH * aspect;
glFrustum( -fW,fW,-fH,fH,zNear,zFar);
}
To copy to clipboard, switch view to plain text mode
#include "nvgl.h"
#include <stdio.h>
#include <assert.h>
NVGL::NVGL()
{
}
NVGL::NVGL(NVVIOCONFIG_V2 &_l_vioConfig,HDC widget)
{
l_vioConfig=_l_vioConfig;
LockDevice(widget);
InitStream();
InitBuffer();
}
void NVGL::LockDevice(HDC widget){
UINT numDevices = wglEnumerateVideoCaptureDevicesNV(widget, NULL);
if (numDevices <= 0) {
printf("No devices found");
}
HVIDEOINPUTDEVICENV* m_videoDevices = (HVIDEOINPUTDEVICENV*)malloc(numDevices *
sizeof(m_videoDevices[0]));
if (!m_videoDevices) {
fprintf(stderr, "malloc failed. OOM?");
}
if (numDevices != wglEnumerateVideoCaptureDevicesNV(widget,m_videoDevices)) {
free(m_videoDevices);
}
// Find an available device we can lock
for (UINT i=0; i< numDevices; ++i) {
BOOL bLocked;
bLocked = wglLockVideoCaptureDeviceNV(widget, m_videoDevices[i]);
if (bLocked) {
m_device = m_videoDevices[i];
break;
}
}
free(m_videoDevices);
if (m_device == NULL) {
printf("no lockable decives found");
}
// wglBindVideoCaptureDeviceNV needs a context current
bool bRet = wglBindVideoCaptureDeviceNV(1, m_device);
assert(bRet && "Failed trying to bind the video capture device!");
}
void NVGL::StartCapture(){
GLuint sequenceNum=0;
GLuint64EXT timestamp=0;
glBeginVideoCaptureNV(1);
GLenum eval = glVideoCaptureNV(1, &sequenceNum, ×tamp);
switch(eval) {
case GL_SUCCESS_NV:
qDebug("\n************************************************");
break;
case GL_PARTIAL_SUCCESS_NV:
qDebug("\n2ooooooooooooooooooooooooooooooooooooooooooooooooo");
break;
case GL_FAILURE_NV:
qDebug("\n3##################################################");
break;
default:
qDebug("\nkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk");
break;
}
glEndVideoCaptureNV(1);
GLenum test= glGetError();
}
void NVGL::InitStream(){
// Setup CSC for each stream.
GLfloat mat[4][4];
float scale = 1.0f;
GLfloat max[] = {5000, 5000, 5000, 5000};;
GLfloat min[] = {0, 0, 0, 0};
UINT m_videoSlot= 1;
// Initialize matrix to the identity.
mat[0][0] = scale; mat[0][1] = 0; mat[0][2] = 0; mat[0][3] = 0;
mat[1][0] = 0; mat[1][1] = scale; mat[1][2] = 0; mat[1][3] = 0;
mat[2][0] = 0; mat[2][1] = 0; mat[2][2] = scale; mat[2][3] = 0;
mat[3][0] = 0; mat[3][1] = 0; mat[3][2] = 0; mat[3][3] = scale;
GLfloat offset[] = {0, 0, 0, 0};
mat[0][0] = 1.164f *scale;
mat[0][1] = 1.164f *scale;
mat[0][2] = 1.164f *scale;
mat[0][3] = 0;
mat[1][0] = 0;
mat[1][1] = -0.392f *scale;
mat[1][2] = 2.017f *scale;
mat[1][3] = 0;
mat[2][0] = 1.596f *scale;
mat[2][1] = -0.813f *scale;
mat[2][2] = 0.f;
mat[2][3] = 0;
mat[3][0] = 0;
mat[3][1] = 0;
mat[3][2] = 0;
mat[3][3] = 1;
offset[0] =-0.87f;
offset[1] = 0.53026f;
offset[2] = -1.08f;
offset[3] = 0;
for (int i=0; i < l_vioConfig.vioConfig.inConfig.numStreams; i++) {
glVideoCaptureStreamParameterfvNV(m_videoSlot, i,GL_VIDEO_COLOR_CONVERSION_MATRIX_NV,&mat[0][0]);
glVideoCaptureStreamParameterfvNV(m_videoSlot, i,GL_VIDEO_COLOR_CONVERSION_MAX_NV,&max[0]);
glVideoCaptureStreamParameterfvNV(m_videoSlot, i,GL_VIDEO_COLOR_CONVERSION_MIN_NV,&min[0]);
glVideoCaptureStreamParameterfvNV(m_videoSlot, i,GL_VIDEO_COLOR_CONVERSION_OFFSET_NV,&offset[0]);
}
}
void NVGL::InitBuffer(){
UINT m_videoSlot= 1;
GLint bufferPitch;
/*GLint videoBufferFormat = GL_YCBYCR8_422_NV;
// Create video buffer objects
glGenBuffersARB(l_vioConfig.vioConfig.inConfig.numStreams,&m_videoBuffer);
// Allocate space in the buffer objects.
for (NvU32 i = 0; i < l_vioConfig.vioConfig.inConfig.numStreams; i++) {
// Set the buffer object capture data format for each stream.
glVideoCaptureStreamParameterivNV(m_videoSlot, i,GL_VIDEO_BUFFER_INTERNAL_FORMAT_NV,&videoBufferFormat);
// Bind the buffer.
glBindBufferARB(GL_VIDEO_BUFFER_NV, m_videoBuffer);
// Get the video buffer pitch
glGetVideoCaptureStreamivNV(m_videoSlot, i,GL_VIDEO_BUFFER_PITCH_NV,&bufferPitch);
// Allocate space in the buffer object
glBufferDataARB(GL_VIDEO_BUFFER_NV, bufferPitch * 720, NULL,GL_STREAM_READ_ARB);
// Bind the buffer object to the video capture stream.
glBindVideoCaptureStreamBufferNV(m_videoSlot, i, GL_FRAME_NV, m_videoBuffer);
}*/
// Create and initialize video texture objects.
glEnable(GL_TEXTURE_2D);
glGenTextures(l_vioConfig.vioConfig.inConfig.numStreams, &m_videoTexture);
for (UINT i = 0; i < l_vioConfig.vioConfig.inConfig.numStreams; i++) {
glBindTexture(GL_TEXTURE_2D, m_videoTexture);
// Set texture parameters
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
if(l_vioConfig.vioConfig.inConfig.signalFormat==NVVIOSIGNALFORMAT_720P_50_00_SMPTE296){
// Set texture format and size.
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, 1280, 720, 0,GL_RGBA, GL_UNSIGNED_BYTE, NULL);
}else{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, 1920, 1080, 0,GL_RGBA, GL_UNSIGNED_BYTE, NULL);
}
// Bind the outputs for the stream
glBindVideoCaptureStreamTextureNV(m_videoSlot, i, GL_FRAME_NV,GL_TEXTURE_RECTANGLE_NV,m_videoTexture);
}
}
HRESULT NVGL::cleanupVideo(HDC m_hDC)
{
BOOL bRet;
// Unbind and release the capture device.
bRet = wglBindVideoCaptureDeviceNV(1, NULL);
assert(bRet && "Failed trying to unbind the video capture device!");
// wglReleaseVideoCaptureDeviceNV should work even without a context current
wglMakeCurrent(m_hDC, NULL);
bRet = wglReleaseVideoCaptureDeviceNV(m_hDC, m_device);
assert(bRet && "Failed trying to release the video capture device!");
return S_OK;
}
#include "nvgl.h"
#include <stdio.h>
#include <assert.h>
NVGL::NVGL()
{
}
NVGL::NVGL(NVVIOCONFIG_V2 &_l_vioConfig,HDC widget)
{
l_vioConfig=_l_vioConfig;
LockDevice(widget);
InitStream();
InitBuffer();
}
void NVGL::LockDevice(HDC widget){
UINT numDevices = wglEnumerateVideoCaptureDevicesNV(widget, NULL);
if (numDevices <= 0) {
printf("No devices found");
}
HVIDEOINPUTDEVICENV* m_videoDevices = (HVIDEOINPUTDEVICENV*)malloc(numDevices *
sizeof(m_videoDevices[0]));
if (!m_videoDevices) {
fprintf(stderr, "malloc failed. OOM?");
}
if (numDevices != wglEnumerateVideoCaptureDevicesNV(widget,m_videoDevices)) {
free(m_videoDevices);
}
// Find an available device we can lock
for (UINT i=0; i< numDevices; ++i) {
BOOL bLocked;
bLocked = wglLockVideoCaptureDeviceNV(widget, m_videoDevices[i]);
if (bLocked) {
m_device = m_videoDevices[i];
break;
}
}
free(m_videoDevices);
if (m_device == NULL) {
printf("no lockable decives found");
}
// wglBindVideoCaptureDeviceNV needs a context current
bool bRet = wglBindVideoCaptureDeviceNV(1, m_device);
assert(bRet && "Failed trying to bind the video capture device!");
}
void NVGL::StartCapture(){
GLuint sequenceNum=0;
GLuint64EXT timestamp=0;
glBeginVideoCaptureNV(1);
GLenum eval = glVideoCaptureNV(1, &sequenceNum, ×tamp);
switch(eval) {
case GL_SUCCESS_NV:
qDebug("\n************************************************");
break;
case GL_PARTIAL_SUCCESS_NV:
qDebug("\n2ooooooooooooooooooooooooooooooooooooooooooooooooo");
break;
case GL_FAILURE_NV:
qDebug("\n3##################################################");
break;
default:
qDebug("\nkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk");
break;
}
glEndVideoCaptureNV(1);
GLenum test= glGetError();
}
void NVGL::InitStream(){
// Setup CSC for each stream.
GLfloat mat[4][4];
float scale = 1.0f;
GLfloat max[] = {5000, 5000, 5000, 5000};;
GLfloat min[] = {0, 0, 0, 0};
UINT m_videoSlot= 1;
// Initialize matrix to the identity.
mat[0][0] = scale; mat[0][1] = 0; mat[0][2] = 0; mat[0][3] = 0;
mat[1][0] = 0; mat[1][1] = scale; mat[1][2] = 0; mat[1][3] = 0;
mat[2][0] = 0; mat[2][1] = 0; mat[2][2] = scale; mat[2][3] = 0;
mat[3][0] = 0; mat[3][1] = 0; mat[3][2] = 0; mat[3][3] = scale;
GLfloat offset[] = {0, 0, 0, 0};
mat[0][0] = 1.164f *scale;
mat[0][1] = 1.164f *scale;
mat[0][2] = 1.164f *scale;
mat[0][3] = 0;
mat[1][0] = 0;
mat[1][1] = -0.392f *scale;
mat[1][2] = 2.017f *scale;
mat[1][3] = 0;
mat[2][0] = 1.596f *scale;
mat[2][1] = -0.813f *scale;
mat[2][2] = 0.f;
mat[2][3] = 0;
mat[3][0] = 0;
mat[3][1] = 0;
mat[3][2] = 0;
mat[3][3] = 1;
offset[0] =-0.87f;
offset[1] = 0.53026f;
offset[2] = -1.08f;
offset[3] = 0;
for (int i=0; i < l_vioConfig.vioConfig.inConfig.numStreams; i++) {
glVideoCaptureStreamParameterfvNV(m_videoSlot, i,GL_VIDEO_COLOR_CONVERSION_MATRIX_NV,&mat[0][0]);
glVideoCaptureStreamParameterfvNV(m_videoSlot, i,GL_VIDEO_COLOR_CONVERSION_MAX_NV,&max[0]);
glVideoCaptureStreamParameterfvNV(m_videoSlot, i,GL_VIDEO_COLOR_CONVERSION_MIN_NV,&min[0]);
glVideoCaptureStreamParameterfvNV(m_videoSlot, i,GL_VIDEO_COLOR_CONVERSION_OFFSET_NV,&offset[0]);
}
}
void NVGL::InitBuffer(){
UINT m_videoSlot= 1;
GLint bufferPitch;
/*GLint videoBufferFormat = GL_YCBYCR8_422_NV;
// Create video buffer objects
glGenBuffersARB(l_vioConfig.vioConfig.inConfig.numStreams,&m_videoBuffer);
// Allocate space in the buffer objects.
for (NvU32 i = 0; i < l_vioConfig.vioConfig.inConfig.numStreams; i++) {
// Set the buffer object capture data format for each stream.
glVideoCaptureStreamParameterivNV(m_videoSlot, i,GL_VIDEO_BUFFER_INTERNAL_FORMAT_NV,&videoBufferFormat);
// Bind the buffer.
glBindBufferARB(GL_VIDEO_BUFFER_NV, m_videoBuffer);
// Get the video buffer pitch
glGetVideoCaptureStreamivNV(m_videoSlot, i,GL_VIDEO_BUFFER_PITCH_NV,&bufferPitch);
// Allocate space in the buffer object
glBufferDataARB(GL_VIDEO_BUFFER_NV, bufferPitch * 720, NULL,GL_STREAM_READ_ARB);
// Bind the buffer object to the video capture stream.
glBindVideoCaptureStreamBufferNV(m_videoSlot, i, GL_FRAME_NV, m_videoBuffer);
}*/
// Create and initialize video texture objects.
glEnable(GL_TEXTURE_2D);
glGenTextures(l_vioConfig.vioConfig.inConfig.numStreams, &m_videoTexture);
for (UINT i = 0; i < l_vioConfig.vioConfig.inConfig.numStreams; i++) {
glBindTexture(GL_TEXTURE_2D, m_videoTexture);
// Set texture parameters
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
if(l_vioConfig.vioConfig.inConfig.signalFormat==NVVIOSIGNALFORMAT_720P_50_00_SMPTE296){
// Set texture format and size.
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, 1280, 720, 0,GL_RGBA, GL_UNSIGNED_BYTE, NULL);
}else{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, 1920, 1080, 0,GL_RGBA, GL_UNSIGNED_BYTE, NULL);
}
// Bind the outputs for the stream
glBindVideoCaptureStreamTextureNV(m_videoSlot, i, GL_FRAME_NV,GL_TEXTURE_RECTANGLE_NV,m_videoTexture);
}
}
HRESULT NVGL::cleanupVideo(HDC m_hDC)
{
BOOL bRet;
// Unbind and release the capture device.
bRet = wglBindVideoCaptureDeviceNV(1, NULL);
assert(bRet && "Failed trying to unbind the video capture device!");
// wglReleaseVideoCaptureDeviceNV should work even without a context current
wglMakeCurrent(m_hDC, NULL);
bRet = wglReleaseVideoCaptureDeviceNV(m_hDC, m_device);
assert(bRet && "Failed trying to release the video capture device!");
return S_OK;
}
To copy to clipboard, switch view to plain text mode
Bookmarks