How can I convert uvmap to IplImage (opencv) ?

How can I convert uvmap to IplImage (opencv) ?

Hello

I'm baeckgoo.

I'm trying to use Perceptual Computing SDK.However I have one problem, which is that Image converted to IplImage is not same as your example Image displayed.

I used your example as camera_uvmap.

Can I help you to correct my source code?

And, specifically, I'd like to know this code lines of uvmap and depth map..

I can know that this lines functions when mouse clicked on my display.

I want to know data type and data memory accurately. this line expresses the image as 32 bit. How can I convert this image from 32bit to IplImage 8bit in opencv?

I sincerely want your reply.

Thank you for your time.

baeckgoo.

//

if (depth_render.m_mouse.x<pdepth.imageInfo.width && depth_render.m_mouse.y<pdepth.imageInfo.height) {
for (int y=0;y<(int)pdepth.imageInfo.height;y++) {
((short*)ddepth.planes[0])[y*pdepth.imageInfo.width+depth_render.m_mouse.x]|=0x000045FF;
int xx=(int)(uvmap[(y*pdepth.imageInfo.width+depth_render.m_mouse.x)*2+0]*pcolor.imageInfo.width+0.5);
int yy=(int)(uvmap[(y*pdepth.imageInfo.width+depth_render.m_mouse.x)*2+1]*pcolor.imageInfo.height+0.5);
if (xx>=0 && xx<(int)pcolor.imageInfo.width && yy>=0 && yy<(int)pcolor.imageInfo.height)
((pxcU32*)dcolor.planes[0])[yy*pcolor.imageInfo.width+xx]|=0x00FF0000;
}
for (int x=0;x<(int)pdepth.imageInfo.width;x++) {
((short*)ddepth.planes[0])[depth_render.m_mouse.y*pdepth.imageInfo.width+x]|=0x0000FFFF;
int xx=(int)(uvmap[(depth_render.m_mouse.y*pdepth.imageInfo.width+x)*2+0]*pcolor.imageInfo.width+0.5);
int yy=(int)(uvmap[(depth_render.m_mouse.y*pdepth.imageInfo.width+x)*2+1]*pcolor.imageInfo.height+0.5);
if (xx>=0 && xx<(int)pcolor.imageInfo.width && yy>=0 && yy<(int)pcolor.imageInfo.height)
((pxcU32*)dcolor.planes[0])[yy*pcolor.imageInfo.width+xx]|=0x00FF0000;
}
} else {
for (int y=0;y<(int)pdepth.imageInfo.height;y++) {
for (int x=0;x<(int)pdepth.imageInfo.width;x++) {
int xx=(int)(uvmap[(y*pdepth.imageInfo.width+x)*2+0]*pcolor.imageInfo.width+0.5);
int yy=(int)(uvmap[(y*pdepth.imageInfo.width+x)*2+1]*pcolor.imageInfo.height+0.5);
if (xx>=0 && xx<(int)pcolor.imageInfo.width && yy>=0 && yy<(int)pcolor.imageInfo.height)
((pxcU32 *)dcolor.planes[0])[yy*pcolor.imageInfo.width+xx]|=0x00FF0000;
}
}
}

//

my source code is following.

/*******************************************************************************

INTEL CORPORATION PROPRIETARY INFORMATION
This software is supplied under the terms of a license agreement or nondisclosure
agreement with Intel Corporation and may not be copied or disclosed except in
accordance with the terms of that agreement
Copyright(c) 2011-2012 Intel Corporation. All Rights Reserved.

*******************************************************************************/
#include <stdio.h>
#include <conio.h>
#include <windows.h>
#include <wchar.h>
#include <vector>
#include <cv.h>
#include <highgui.h>

#include "pxcsession.h"
#include "pxcsmartptr.h"
#include "pxccapture.h"
#include "util_render.h"
#include "util_capture.h"
#include "util_cmdline.h"

int wmain(int argc, WCHAR* argv[]) {
PXCSmartPtr<PXCSession> session;
pxcStatus sts=PXCSession_Create(&session);
if (sts<PXC_STATUS_NO_ERROR || session==NULL) {
wprintf_s(L"Failed to create a session\n");
return 3;
}

UtilCmdLine cmdl(session);
if (!cmdl.Parse(L"-nframes-csize-sdname",argc,argv)) return 3;

UtilCapture capture(session);
for (std::list<PXCSizeU32>::iterator itr=cmdl.m_csize.begin();itr!=cmdl.m_csize.end();itr++)
capture.SetFilter(PXCImage::IMAGE_TYPE_COLOR,*itr);
if (cmdl.m_sdname) capture.SetFilter(cmdl.m_sdname);

PXCCapture::VideoStream::DataDesc request;
memset(&request, 0, sizeof(request));
request.streams[0].format=PXCImage::COLOR_FORMAT_RGB32;
request.streams[1].format=PXCImage::COLOR_FORMAT_DEPTH;
sts = capture.LocateStreams (&request);
if (sts<PXC_STATUS_NO_ERROR) {
wprintf_s(L"Failed to locate video stream(s)\n");
return 1;
}

PXCCapture::VideoStream::ProfileInfo pcolor;
capture.QueryVideoStream(0)->QueryProfile(&pcolor);
PXCCapture::VideoStream::ProfileInfo pdepth;
capture.QueryVideoStream(1)->QueryProfile(&pdepth);

pxcCHAR line[64];
swprintf_s(line,64,L"Depth %dx%d", pdepth.imageInfo.width, pdepth.imageInfo.height);
UtilRender depth_render(line);
swprintf_s(line,64,L"UV %dx%d", pcolor.imageInfo.width, pcolor.imageInfo.height);
UtilRender uv_render(line);

////////////////////////////////////////////////////////////////OPENCV_CONFIGURATION////////////////////////////////////
//IplImage *img_rgb=0;
CvSize size_rgb;
size_rgb.height=480;
size_rgb.width=640;
//img_rgb=cvCreateImage(size_rgb,8,3);

//IplImage *img_depth=0;
CvSize size_depth;
size_depth.height=240;
size_depth.width=320;
//img_depth=cvCreateImage(size_depth,8,1);

short *depth_data;
unsigned char *rgb_data;

////////////////////////////////////////////////////////////////OPENCV_CONFIGURATION////////////////////////////////////

for (int f=0;f<(int)cmdl.m_nframes;f++) {
PXCSmartArray<PXCImage> images(2);
PXCSmartSP sp;

sts=capture.ReadStreamAsync(images, &sp);
if (sts<PXC_STATUS_NO_ERROR) break;
sp->Synchronize();

PXCImage::ImageData dcolor;
PXCImage::ImageData ddepth;
images[0]->AcquireAccess(PXCImage::ACCESS_READ_WRITE,PXCImage::COLOR_FORMAT_RGB32,&dcolor);
images[1]->AcquireAccess(PXCImage::ACCESS_READ,&ddepth);
float *uvmap=(float*)ddepth.planes[2];
if (!uvmap) break;

if (depth_render.m_mouse.x<pdepth.imageInfo.width && depth_render.m_mouse.y<pdepth.imageInfo.height) {
for (int y=0;y<(int)pdepth.imageInfo.height;y++) {
((short*)ddepth.planes[0])[y*pdepth.imageInfo.width+depth_render.m_mouse.x]|=0x000045FF;
int xx=(int)(uvmap[(y*pdepth.imageInfo.width+depth_render.m_mouse.x)*2+0]*pcolor.imageInfo.width+0.5);
int yy=(int)(uvmap[(y*pdepth.imageInfo.width+depth_render.m_mouse.x)*2+1]*pcolor.imageInfo.height+0.5);
if (xx>=0 && xx<(int)pcolor.imageInfo.width && yy>=0 && yy<(int)pcolor.imageInfo.height)
((pxcU32*)dcolor.planes[0])[yy*pcolor.imageInfo.width+xx]|=0x00FF0000;
}
for (int x=0;x<(int)pdepth.imageInfo.width;x++) {
((short*)ddepth.planes[0])[depth_render.m_mouse.y*pdepth.imageInfo.width+x]|=0x0000FFFF;
int xx=(int)(uvmap[(depth_render.m_mouse.y*pdepth.imageInfo.width+x)*2+0]*pcolor.imageInfo.width+0.5);
int yy=(int)(uvmap[(depth_render.m_mouse.y*pdepth.imageInfo.width+x)*2+1]*pcolor.imageInfo.height+0.5);
if (xx>=0 && xx<(int)pcolor.imageInfo.width && yy>=0 && yy<(int)pcolor.imageInfo.height)
((pxcU32*)dcolor.planes[0])[yy*pcolor.imageInfo.width+xx]|=0x00FF0000;
}
} else {
for (int y=0;y<(int)pdepth.imageInfo.height;y++) {
for (int x=0;x<(int)pdepth.imageInfo.width;x++) {
int xx=(int)(uvmap[(y*pdepth.imageInfo.width+x)*2+0]*pcolor.imageInfo.width+0.5);
int yy=(int)(uvmap[(y*pdepth.imageInfo.width+x)*2+1]*pcolor.imageInfo.height+0.5);
if (xx>=0 && xx<(int)pcolor.imageInfo.width && yy>=0 && yy<(int)pcolor.imageInfo.height)
((pxcU32 *)dcolor.planes[0])[yy*pcolor.imageInfo.width+xx]|=0x00FF0000;
}
}
}
////////////////////////////////////////////////////////////////OPENCV/////////////////////////////////////////////////

rgb_data=dcolor.planes[0];
IplImage* rgb_img=cvCreateImage(size_rgb,8,3);

for(int y=0; y<480; y++)
{
for(int x=0; x<640; x++)
{
for(int k=0; k<3 ; k++)
{
rgb_img->imageData[y*640*3+x*3+k]=rgb_data[y*640*3+x*3+k];
}
}
}

depth_data=(short *)ddepth.planes[0];
IplImage* depth_img=cvCreateImage(size_depth,8,1);
short temp=0;
for(int y=0; y<240; y++)
{
for(int x=0; x<320; x++)
{
// temp=*depth_data>>2;

// depth_img->imageData[y*320+x]=temp;
depth_img->imageData[y*320+x]=depth_data[y*320+x];
}
}

cvShowImage("test_rgb",rgb_img);
cvWaitKey(1);
cvReleaseImage(&rgb_img);

cvShowImage("test_depth",depth_img);
cvWaitKey(1);
cvReleaseImage(&depth_img);

/*
depth_data=(short *)ddepth.planes[0];
IplImage* d_img=cvCreateImage(size_depth,16,1);
memcpy(d_img->imageData,depth_data,sizeof(short)*320*240);
cvShowImage("test",d_img);
cvWaitKey(1);
cvReleaseImage(&d_img);
*/

////////////////////////////////////////////////////////////////OPENCV/////////////////////////////////////////////////

images[0]->ReleaseAccess(&dcolor);
images[1]->ReleaseAccess(&ddepth);

if (!depth_render.RenderFrame(images[1])) break;
if (!uv_render.RenderFrame(images[0])) break;
}

return 0;
}

1 post / 0 new
For more complete information about compiler optimizations, see our Optimization Notice.