· 5 years ago · Apr 24, 2020, 05:12 PM
1#include "yWebcam.h"
2#include "Engine/yEngineServices.h"
3#include "Textures/yTexture.h"
4#include "Textures/yTextureManager.h"
5#include "Macros/yMacros.h"
6#include "OgreStagingTexture.h"
7#include "OgreTextureGpu.h"
8#include "OgrePixelFormatGpuUtils.h"
9#include "OgreImage2.h"
10#include "OgreRoot.h"
11#include <QTime>
12#include <QVariant>
13#include <dshow.h>
14#include <iostream>
15
16
17yWebcam::yWebcam()
18{
19 yAssert( yUtils::isLogicThread(), "new yWebcam must be called in Logic thread" );
20
21 mCapturing = false;
22 mCaptureReady = false;
23 mDeviceOpen = false;
24 mDeviceIndex = -1;
25
26 mTexture = nullptr;
27
28 mDatabase = nullptr;
29
30 mDistanceToObect = 1.0;
31}
32
33#include <iostream>
34yWebcam::~yWebcam(){
35}
36
37bool yWebcam::openDevice( std::string deviceName ){
38
39 yAssert( yUtils::isLogicThread(), "new yWebcam must be called in Logic thread" );
40
41 std::vector<std::string> deviceList;
42 deviceList = getVideoDevices();
43
44 if( deviceName.empty() && deviceList.size() > 0 ){
45 mDeviceIndex = 0;
46 }
47 else{
48 for(size_t i=0; i<deviceList.size(); ++i){
49 if( deviceList.at(i) == deviceName ){
50 mDeviceIndex = i;
51 break;
52 }
53 }
54 }
55
56 if( mDeviceIndex >= 0 ){//device specified found
57
58 if( !mCvCapture.open( mDeviceIndex ) ){
59 std::cout<<yWarning<<"Could not open webcam at index "<<mDeviceIndex<<std::endl;
60 }
61 else{
62
63 std::cout<<"yWebcam. Opened device: "<<deviceList.at(mDeviceIndex)<<std::endl;
64
65 mDeviceOpen = true;
66
67// mCvCapture.set(cv::CAP_PROP_FRAME_WIDTH,1280);
68// mCvCapture.set(cv::CAP_PROP_FRAME_HEIGHT,720);
69// mCvCapture.set(cv::CAP_PROP_FPS,30);
70
71// mCvCapture.set(cv::CAP_PROP_WHITE_BALANCE_BLUE_U,7000);
72// mCvCapture.set(cv::CAP_PROP_WHITE_BALANCE_RED_V,7000);
73// mCvCapture.set(cv::CAP_PROP_AUTO_EXPOSURE,0.75);
74// mCvCapture.set(cv::CAP_PROP_AUTO_WB,1);
75
76
77 mCvCapture >> mCvFrame;
78
79 mTexture = yEngineServices::getTextureManager().createTexture( "WebcamCapture_"+std::to_string( mDeviceIndex ),
80 mCvFrame.size().width * 2.0f, mCvFrame.size().height * 2.0f, 0,
81 Ogre::PixelFormatGpu::PFG_RGBA8_UNORM_SRGB,
82 Ogre::TextureFlags::ManualTexture,
83 Ogre::TextureTypes::Type2DArray );
84
85 }
86 }
87
88 return mDeviceOpen;
89}
90
91int yWebcam::loadConfigParameter( QString parameterName ){
92 QSqlQuery query = mDatabase->exec("SELECT Value FROM CONFIG WHERE ParameterName='" + parameterName + "'; ");
93 query.next();
94 return query.value(0).toInt();
95}
96
97
98void yWebcam::closeDevice(){
99 yAssert( yUtils::isLogicThread(), "openWebcam must be called in Logic thread" );
100
101 stopCapturing();
102 mCvCapture.release();
103}
104
105void yWebcam::runCaptureThread(){
106 while( mCapturing ){
107 if( !mCaptureReady ){
108 mCvCapture >> mCvFrame;
109 if( !mCvFrame.empty() ){
110 cv::cvtColor( mCvFrame, mCvFrame, cv::COLOR_BGR2RGBA, 0 );
111 cv::resize( mCvFrame, mCvFrame, cv::Size( mCvFrame.cols * 2.0f, mCvFrame.rows * 2.0f ), 0, 0, cv::INTER_LINEAR);
112 resizeStereoImage();
113 //processImage();
114 mCaptureReady = true;
115 }
116 }
117 }
118}
119
120bool yWebcam::frameRenderingQueued(const Ogre::FrameEvent &evt){
121
122 if( mCaptureReady ){
123 Ogre::Image2 image;
124
125 size_t sizeBytes = Ogre::PixelFormatGpuUtils::calculateSizeBytes(
126 mTexture->mOgreTexture->getWidth(), mTexture->mOgreTexture->getHeight(), 1u, 1u, mTexture->mOgreTexture->getPixelFormat(), 1u, 4u );
127 Ogre::uint8 *data = reinterpret_cast<Ogre::uint8*>(
128 OGRE_MALLOC_SIMD( sizeBytes, Ogre::MEMCATEGORY_GENERAL ) );
129 image.loadDynamicImage( data, mTexture->mOgreTexture->getWidth(), mTexture->mOgreTexture->getHeight(), 1u,
130 Ogre::TextureTypes::Type2DArray, mTexture->mOgreTexture->getPixelFormat(),
131 true, 1u );
132
133 memcpy( data, (Ogre::uint8*)mCvFrame.data, sizeBytes );
134 image.uploadTo( mTexture->mOgreTexture, 0, mTexture->mOgreTexture->getNumMipmaps() - 1u );
135
136 mCaptureReady = false;
137 }
138
139 return true;
140}
141
142void yWebcam::resizeStereoImage(){
143
144 cv::Mat eyeImage[2];
145
146 eyeImage[0] = mCvFrame( cv::Rect( 0, 0, mCvFrame.cols/2, mCvFrame.rows ) );
147 eyeImage[1] = mCvFrame( cv::Rect( mCvFrame.cols/2, 0, mCvFrame.cols/2, mCvFrame.rows ) );
148
149 //transparent BG image
150 cv::Mat bgImage;
151 mCvFrame.copyTo( bgImage );
152 std::vector<cv::Mat> rgbaChannels(4);
153 cv::split( bgImage, rgbaChannels );
154 rgbaChannels[3].setTo(0);
155 cv::merge( rgbaChannels, bgImage );
156
157 float clampedDistance = Ogre::Math::Clamp( mDistanceToObect, 0.1f, 0.6f );
158 float mainScale = 0.29f + clampedDistance*0.5f;
159
160 float scaleX = mainScale;
161 float scaleY = mainScale*0.65f; //0.65 needed ratio
162 float offsetX[2];
163 offsetX[0] = 215.0f - clampedDistance*150;
164 offsetX[1] = 230.0f - clampedDistance*150;
165
166 float offsetY = 275.0f - clampedDistance*110;
167
168 for( size_t i=0; i<2; ++i ){
169 cv::Mat resizedEyeImage;
170 cv::resize( eyeImage[i], resizedEyeImage, cv::Size( eyeImage[i].cols * scaleX, eyeImage[i].rows * scaleY ), 0, 0, cv::INTER_LINEAR);
171
172
173 cv::Point2f src_p[4];
174 cv::Point2f dst_p[4];
175
176 float w = (float)resizedEyeImage.cols;
177 float h = (float)resizedEyeImage.rows;
178 float hw = w / 2.0f;
179 float hh = h / 2.0f;
180
181 // The 4 points that select quadilateral on the input , from top-left in clockwise order
182 float perspectiveOffset = 45.0f;
183 src_p[0] = cv::Point2f( -perspectiveOffset, 0.0f);
184 src_p[1] = cv::Point2f( w + perspectiveOffset, 0.0f);
185 src_p[2] = cv::Point2f( w, h);
186 src_p[3] = cv::Point2f(0.0f, h);
187
188 // The 4 points where the mapping is to be done , from top-left in clockwise order
189 dst_p[0] = cv::Point2f(0.0f, 0.0f);
190 dst_p[1] = cv::Point2f( w, 0.0f);
191 dst_p[2] = cv::Point2f( w, h);
192 dst_p[3] = cv::Point2f(0.0f, h);
193
194 cv::Mat trans_mat33 = cv::getPerspectiveTransform(src_p, dst_p);
195 cv::warpPerspective( resizedEyeImage, resizedEyeImage, trans_mat33, resizedEyeImage.size(), cv::INTER_LINEAR);
196
197 resizedEyeImage.copyTo( bgImage( cv::Rect( i*mCvFrame.cols/2 + offsetX[i], offsetY, resizedEyeImage.cols, resizedEyeImage.rows) ) );
198 }
199
200
201 mCvFrame = bgImage;
202}
203
204void yWebcam::processImage(){
205
206 if( !mDatabase ){
207 mDatabase = new QSqlDatabase( QSqlDatabase::addDatabase("QSQLITE") );
208 mDatabase->setDatabaseName( "Config.db" );
209 mDatabase->open();
210
211 mDatabase->exec("CREATE TABLE IF NOT EXISTS CONFIG( ParameterName TEXT UNIQUE, Value TEXT );");
212
213 mConfig.hsvMin[0] = loadConfigParameter( "hMin"+QString::number(0) );
214 mConfig.hsvMax[0] = loadConfigParameter( "hMax"+QString::number(0) );
215
216 mConfig.hsvMin[1] = loadConfigParameter( "sMin"+QString::number(0) );
217 mConfig.hsvMax[1] = loadConfigParameter( "sMax"+QString::number(0) );
218
219 mConfig.hsvMin[2] = loadConfigParameter( "vMin"+QString::number(0) );
220 mConfig.hsvMax[2] = loadConfigParameter( "vMax"+QString::number(0) );
221
222
223 mConfig.rgbMin[0] = loadConfigParameter( "rMin"+QString::number(0) );
224 mConfig.rgbMax[0] = loadConfigParameter( "rMax"+QString::number(0) );
225
226 mConfig.rgbMin[1] = loadConfigParameter( "gMin"+QString::number(0) );
227 mConfig.rgbMax[1] = loadConfigParameter( "gMax"+QString::number(0) );
228
229 mConfig.rgbMin[2] = loadConfigParameter( "bMin"+QString::number(0) );
230 mConfig.rgbMax[2] = loadConfigParameter( "bMax"+QString::number(0) );
231 }
232
233 std::vector<cv::Mat> hsvChannels(3);
234 std::vector<cv::Mat> rgbaChannels(4);
235
236 cv::Mat hsvImage;
237 cv::Mat maskHSV;
238 cv::Mat maskRGB;
239 cv::Mat frameMask;
240
241 cv::cvtColor( mCvFrame, hsvImage, cv::COLOR_BGR2HSV, 0 );
242 cv::split( hsvImage, hsvChannels );
243 cv::split( mCvFrame, rgbaChannels );
244
245 cv::inRange( hsvImage, mConfig.hsvMin, mConfig.hsvMax, maskHSV);
246 cv::inRange( mCvFrame, mConfig.rgbMin, mConfig.rgbMax, maskRGB );
247
248
249 cv::multiply( maskHSV, maskHSV, frameMask );
250
251
252 cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3));
253 cv::morphologyEx(frameMask, frameMask, cv::MORPH_OPEN, kernel);
254 cv::morphologyEx(frameMask, frameMask, cv::MORPH_CLOSE, kernel);
255
256 cv::threshold(frameMask,frameMask,0, 255, cv::THRESH_BINARY_INV);
257
258 rgbaChannels[3] = frameMask;
259 cv::merge( rgbaChannels, mCvFrame );
260}
261
262std::vector<std::string> yWebcam::getVideoDevices()
263{
264
265 std::vector<std::string> deviceList;
266
267 IEnumMoniker *pEnum = NULL;
268
269 // Create the System Device Enumerator.
270 ICreateDevEnum *pDevEnum;
271 HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
272 CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pDevEnum));
273
274 bool found = false;
275
276 if (SUCCEEDED(hr))
277 {
278 // Create an enumerator for the category.
279 hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0);
280 if (hr == S_FALSE)
281 {
282 hr = VFW_E_NOT_FOUND; // The category is empty. Treat as an error.
283 }
284 pDevEnum->Release();
285 }
286
287 if( !pEnum ) return deviceList;
288
289 IMoniker *pMoniker = NULL;
290
291 while (pEnum->Next(1, &pMoniker, NULL) == S_OK)
292 {
293 IPropertyBag *pPropBag;
294 HRESULT hr = pMoniker->BindToStorage(0, 0, IID_PPV_ARGS(&pPropBag));
295 if (FAILED(hr))
296 {
297 pMoniker->Release();
298 continue;
299 }
300
301 VARIANT var;
302 VariantInit(&var);
303
304 // Get description or friendly name.
305 hr = pPropBag->Read(L"Description", &var, 0);
306 if (FAILED(hr))
307 {
308 hr = pPropBag->Read(L"FriendlyName", &var, 0);
309 }
310 if (SUCCEEDED(hr))
311 {
312 std::wstring ws(var.bstrVal, SysStringLen(var.bstrVal));
313 std::string str( ws.begin(), ws.end() );
314 deviceList.push_back( str );
315 VariantClear(&var);
316 }
317
318 hr = pPropBag->Write(L"FriendlyName", &var);
319
320 // WaveInID applies only to audio capture devices.
321 hr = pPropBag->Read(L"WaveInID", &var, 0);
322 if (SUCCEEDED(hr))
323 {
324 //printf("WaveIn ID: %d\n", var.lVal);
325 VariantClear(&var);
326 }
327
328// hr = pPropBag->Read(L"DevicePath", &var, 0);
329// if (SUCCEEDED(hr))
330// {
331// // The device path is not intended for display.
332// printf("Device path: %S\n", var.bstrVal);
333// VariantClear(&var);
334// }
335
336 pPropBag->Release();
337 pMoniker->Release();
338 }
339
340 return deviceList;
341}
342
343void yWebcam::startCapturing(){
344 if( !mCapturing && mDeviceOpen ){
345 mCapturing = true;
346 mCaptureThread = std::thread(&yWebcam::runCaptureThread,this);
347 Ogre::Root::getSingletonPtr()->addFrameListener(this);
348 }
349}
350
351void yWebcam::stopCapturing(){
352 if( mCapturing && mDeviceOpen ){
353 mCapturing = false;
354 Ogre::Root::getSingletonPtr()->removeFrameListener(this);
355 mCaptureThread.join();
356 }
357}
358
359void yWebcam::notifyDistanceToObject( float distance ){
360 mDistanceToObect = distance;
361}