Project

General

Profile

BitmapToolkit Scol plugin
NeuralNetwork.cpp
Go to the documentation of this file.
1/*
2-----------------------------------------------------------------------------
3This source file is part of OpenSpace3D
4For the latest info, see http://www.openspace3d.com
5
6Copyright (c) 2012 I-maginer
7
8This program is free software; you can redistribute it and/or modify it under
9the terms of the GNU Lesser General Public License as published by the Free Software
10Foundation; either version 2 of the License, or (at your option) any later
11version.
12
13This program is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
15FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
16
17You should have received a copy of the GNU Lesser General Public License along with
18this program; if not, write to the Free Software Foundation, Inc., 59 Temple
19Place - Suite 330, Boston, MA 02111-1307, USA, or go to
20http://www.gnu.org/copyleft/lesser.txt
21
22-----------------------------------------------------------------------------
23*/
24
25#include <iostream>
26#include <sstream>
27#include <fstream>
28#include <algorithm>
29
30#include "NeuralNetwork.h"
31#include "tinyxml2.h"
32
33// this is here only to know where to send a finished training message
34// defined in PluginBT
35#include "Prerequisites.h"
36
37/********************* CONSTRUCTOR AND DESTRUCTOR *************************************/
38NeuralNetwork::NeuralNetwork(int featurePerSample, float sensibility, MlMode mode, MlType type)
39{
40 m_bayes = cv::ml::NormalBayesClassifier::create();
41 m_svm = cv::ml::SVM::create();
42 m_dt = cv::ml::DTrees::create();
43 m_rt = cv::ml::RTrees::create();
44 m_boost = cv::ml::Boost::create();
45 m_ann = cv::ml::ANN_MLP::create();
46 m_knn = cv::ml::KNearest::create();
47 // Parameters
48 // KNN
49 m_knn_p = 3;
50
51 // SVM
52 m_svm->setType(cv::ml::SVM::C_SVC); // C_SVC o NU_SVC
53 m_svm->setKernel(cv::ml::SVM::RBF); // SIGMOID or RBF;
54 m_svm->setGamma(1.0); // for poly/rbf/sigmoid
55 m_svm->setCoef0(0.0); // for poly/sigmoid
56 m_svm->setC(0.5);
57 m_svm->setNu(0.5);
58
59 // DT
60 m_dt->setMaxCategories(6);
61
62 // Boost
63 m_boost->setBoostType(cv::ml::Boost::DISCRETE);
64 m_boost->setWeakCount(20);
65 m_boost->setWeightTrimRate(0);
66
67 // RT
68 m_rt->setMaxCategories(6);
69
70 // initialize variables
71 m_mode = mode;
72 m_type = type;
73 m_newHistoryData = false;
74 m_featurePerSample = featurePerSample;
75 m_nbSamples = 0;
76 m_nbMaxSamples = 0;
77 m_nbAnchorPoints = 0;
78 m_sensibility = sensibility;
79 m_lastInputDate = 0;
80 m_averageTime = 0;
81
82 m_trainingThread = 0;
83 m_detectionThread = 0;
84 SetTrainingState(NOT_TRAINED);
85}
86
88{
89 if (m_trainingThread != 0)
90 {
91 m_trainingThread->join();
92 delete m_trainingThread;
93 m_trainingThread = 0;
94 }
95
96 SetTrainingState(NOT_TRAINED);
97
98 if (m_detectionThread != 0)
99 {
100 m_detectionThread->join();
101 delete m_detectionThread;
102 m_detectionThread = 0;
103 }
104
105 m_samples.clear();
106 m_categories.clear();
107 m_dataHistory.clear();
108}
109
110
111/********************* ACCESSOR *************************************/
113{
114 boost::mutex::scoped_lock lock(m_trainingCriticalSection);
115 return m_trainingState;
116}
117
118void NeuralNetwork::SetTrainingState(NeuralNetwork::TrainingState state)
119{
120 boost::mutex::scoped_lock lock(m_trainingCriticalSection);
121 m_trainingState = state;
122}
123
124int NeuralNetwork::GetCategoryPos(std::string name)
125{
126 int iCategory = -1;
127 for (unsigned int i = 0; i < m_categories.size(); i++)
128 {
129 if (m_categories[i].compare(name) == 0)
130 iCategory = i;
131 }
132 return iCategory;
133}
134
135std::string NeuralNetwork::GetCategory(unsigned int pos)
136{
137 if(pos < m_categories.size())
138 return m_categories[pos];
139 else
140 {
141 return std::string("none");
142 }
143}
144
145std::vector<std::string> NeuralNetwork::GetCategories()
146{
147 std::vector<std::string> names;
148 for(unsigned int i = 0; i < m_categories.size(); i++)
149 {
150 names.push_back(m_categories[i]);
151 }
152
153 return names;
154}
155
156/********************* TRAINING INTERFACE *************************************/
157void NeuralNetwork::AddTrainingData(std::vector<cv::Point3d> input, std::string label)
158{
159 if ((GetTrainingState() == TRAINING) || ((int)input.size() < m_featurePerSample) || ((m_lastInputDate != 0) && ((cv::getTickCount() - m_lastInputDate) < 30)))
160 return;
161
162 // reset data
163 /*if (GetTrainingState() == TRAINED)
164 {
165 SetTrainingState(NOT_TRAINED);
166 m_categories.clear();
167 m_samples.clear();
168 m_dataHistory.clear();
169 m_lastDetected.clear();
170 }*/
171
172 // get the category pos
173 int cat = GetCategoryPos(label);
174 if (cat == -1)
175 {
176 // add the new category
177 m_categories.push_back(label);
178 cat = m_categories.size() -1;
179 }
180
181 // for each sample
182 for (unsigned int i = 0; i < input.size(); i = i + m_featurePerSample)
183 {
184 if ((i + m_featurePerSample) > (int)input.size())
185 continue;
186
187 // create the sample vector with m_featurePerSample size
188 std::vector<MlFeature> sample;
189 for (int k = 0; k < (int)m_featurePerSample; k++)
190 {
191 sample.push_back(MlFeature(input[i + k], cv::getTickCount()));
192 }
193
194 // look for the correct category
195 SamplesMap::iterator catIt = m_samples.find(cat);
196 if (catIt == m_samples.end())
197 m_samples[cat].push_back(sample);
198 else
199 catIt->second.push_back(sample);
200 }
201
202 m_lastInputDate = cv::getTickCount();
203}
204
205std::vector<std::vector<MlFeature>> NeuralNetwork::ComputeData(std::vector<std::vector<MlFeature>> ldata)
206{
207 std::vector<std::vector<MlFeature>> sortedData;
208 std::vector<std::vector<MlFeature>> outData;
209 sortedData.resize(m_featurePerSample);
210
211 //debug
212 unsigned int lsize = ldata.size();
213
214 // sort data from vector samples<features> to vector features<samples>
215 for(unsigned int i = 0; i < ldata.size(); i++)
216 {
217 std::vector<MlFeature> samples;
218
219 for (unsigned int k = 0; k < m_featurePerSample; k++)
220 {
221 // look if we have enough features in the sample
222 if(k < ldata[i].size())
223 sortedData[k].push_back(ldata[i][k]);
224 else
225 {
226 sortedData[k].push_back(MlFeature());
227 }
228 }
229 }
230
231 // add middle points if there is not enough data
232 for(unsigned int k = 0; k < sortedData.size(); k++)
233 {
234 if ((int)sortedData[k].size() < (m_nbAnchorPoints + 1))
235 {
236 // add a point to interlpolate
237 if (sortedData[k].size() < 2)
238 sortedData[k].insert(sortedData[k].begin(), MlFeature());
239
240 //add interpolated points
241 while ((int)sortedData[k].size() <= (m_nbAnchorPoints + 1))
242 {
243 for (int j = 0; j < ((int)sortedData[k].size() - 1); j++)
244 {
245 MlFeature p1 = sortedData[k][j];
246 MlFeature p2 = sortedData[k][j + 1];
247
248 MlFeature pn = (p2 - p1);
249 pn.x /= 2.0f;
250 pn.y /= 2.0f;
251 pn.z /= 2.0f;
252 pn.t /= 2;
253 j++;
254 sortedData[k].insert(sortedData[k].begin() + j, (p1 + pn));
255 }
256
257 //add null point
258 //sortedData[k].push_back(MlFeature());
259 }
260 }
261
262 // smooth path
263 // finds smallest interval and replaces two points on median point
264 while ((int)sortedData[k].size() > (m_nbAnchorPoints + 1))
265 {
266 double d;
267 double d_min = std::numeric_limits<double>::max();
268
269 std::vector<MlFeature>::iterator p_min = sortedData[k].begin();
270 ++p_min;
271
272 std::vector<MlFeature>::iterator p = p_min;
273 std::vector<MlFeature>::iterator i = p_min;
274 ++i;
275
276 std::vector<MlFeature>::iterator last = sortedData[k].end();
277 --last;
278
279 for (; i != last; ++i)
280 {
281 d = sqrt(pow((*p).x - (*i).x, 2) + pow((*p).y - (*i).y, 2) + pow((*p).z - (*i).z, 2));
282 if (d < d_min)
283 {
284 d_min = d;
285 p_min = p;
286 }
287 p = i;
288 }
289
290 p = p_min;
291 i = ++p_min;
292
293 MlFeature pt;
294 pt.x = ((*p).x + (*i).x) / 2;
295 pt.y = ((*p).y + (*i).y) / 2;
296 pt.z = ((*p).z + (*i).z) / 2;
297 pt.t = ((*p).t + (*i).t) / 2;
298
299 *i = pt; // changes coord of a base point
300 sortedData[k].erase(p); // erases an odd point
301 }
302 }
303
304 if (m_mode == ML_POSE)
305 {
306 return sortedData;
307 }
308 else
309 {
310 // change data to angles
311 for(unsigned int k = 0; k < sortedData.size(); k++)
312 {
313 std::vector<MlFeature>::iterator i = sortedData[k].begin();
314 std::vector<MlFeature>::iterator p = i++;
315 std::vector<MlFeature> ndata;
316
317 for (; i != sortedData[k].end(); ++i)
318 {
319 MlFeature pt2 = (*i);
320 MlFeature pt1 = (*p);
321 MlFeature np;
322 np.t = pt2.t - pt1.t;
323
324 //np.x = pt2.x - pt1.x;
325 //np.y = pt2.y - pt1.y;
326 //np.z = pt2.z - pt1.z;
327 //np.x *= np.t;
328 //np.y *= np.t;
329 //np.z *= np.t;
330
331 float xangles = 0.0f;
332 float yangles = 0.0f;
333 float zangles = 0.0f;
334
335 xangles = (float) (atan2((pt2.y - pt1.y), (pt2.x - pt1.x)) * 180.0 / (double)SCOL_PI);
336 if (xangles < 0)
337 xangles = 360.f + xangles;
338
339 yangles = (float) (atan2((pt2.y - pt1.y), (pt2.z - pt1.z)) * 180.0 / (double)SCOL_PI);
340 if (yangles < 0)
341 yangles = 360.f + yangles;
342
343 /*zangles = (float) (atan2((pt2.z - pt1.z), (pt2.x - pt1.x)) * 180.0 / (double)SCOL_PI);
344 if (zangles < 0)
345 zangles = 360.f + zangles;*/
346
347 np.x = xangles;
348 np.y = yangles;
349 np.z = np.t;
350 ndata.push_back(np);
351
352 p = i;
353 }
354
355 outData.push_back(ndata);
356 }
357
358 // change data to relative
359 /*
360 for(unsigned int k = 0; k < sortedData.size(); k++)
361 {
362 std::vector<MlFeature>::iterator i = sortedData[k].begin();
363 std::vector<MlFeature>::iterator p = i++;
364 std::vector<MlFeature> ndata;
365
366 for (; i != sortedData[k].end(); ++i)
367 {
368 MlFeature pt2 = (*i);
369 MlFeature pt1 = (*p);
370
371 MlFeature np;
372 int tl = pt2.t - pt1.t;
373
374 // mutiply by time interval
375 np.x = (pt2.x - pt1.x);// * tl;
376 np.y = (pt2.y - pt1.y);// * tl;
377 np.z = (pt2.z - pt1.z);// * tl;
378 np.t = tl <= 0 ? 0 : tl;
379 ndata.push_back(np);
380
381 p = i;
382 }
383
384 outData.push_back(ndata);
385 }*/
386
387 return outData;
388 }
389}
390
392{
393 if (GetTrainingState() != TRAINING && (m_categories.size() > 0))
394 {
395 // prepare training
396 SetTrainingState(TRAINING);
397 if (m_trainingThread != 0)
398 {
399 m_trainingThread->join();
400 delete m_trainingThread;
401 m_trainingThread = 0;
402 }
403
404 if (m_detectionThread != 0)
405 {
406 m_detectionThread->join();
407 delete m_detectionThread;
408 m_detectionThread = 0;
409 }
410
411 // launch the training in another thread
412 m_trainingThread = new boost::thread(boost::bind(&NeuralNetwork::TrainingThread, this));
413 }
414}
415
416// Called by the trainingthread, never by the main thread!
417void NeuralNetwork::TrainingThread()
418{
419 m_nbMaxSamples = 0;
420 m_nbMinSamples = std::numeric_limits<int>::max();
421 m_nbSamples = 0;
422 m_averageTime = 0;
423
424 //calculate the max samples
425 for(SamplesMap::iterator i = m_samples.begin(); i != m_samples.end(); i++)
426 {
427 m_nbMaxSamples = std::max(m_nbMaxSamples, (int)i->second.size());
428 m_nbMinSamples = std::min(m_nbMinSamples, (int)i->second.size());
429 int lasttime = 0;
430 for (unsigned int j = 0; j < i->second.size(); j++)
431 {
432 if (lasttime != 0)
433 m_averageTime += i->second[j][0].t - lasttime;
434 lasttime = i->second[j][0].t;
435
436 m_nbSamples++;
437 }
438 }
439
440 // speed average
441 m_averageTime /= m_nbSamples;
442 // set a minimum
443 if (m_averageTime == 0)
444 m_averageTime = 30;
445
446 // nb samples average
447 m_nbAnchorPoints = m_nbSamples; //(int)((float)(m_nbSamples / m_samples.size())/* * ((float)m_averageTime / 100.0f)*/);
448
449 //compute SamplesMap to cv::Mat trainIn
450 cv::Mat trainIn(m_nbAnchorPoints * m_categories.size(), m_featurePerSample * 3, CV_32FC1);
451
452 cv::Mat trainOut(m_nbAnchorPoints * m_categories.size(), 1, CV_32FC1);
453 // init the categories to -1
454 trainOut.setTo(-1);
455
456 cv::Mat var_type(trainIn.cols + 1, 1, CV_8U);
457 var_type.setTo(cv::ml::VAR_NUMERICAL);
458 var_type.at<uchar>(trainIn.cols, 0) = cv::ml::VAR_CATEGORICAL;
459
460 int sampleIdx = 0;
461 try
462 {
463 for(SamplesMap::iterator i = m_samples.begin(); i != m_samples.end(); i++)
464 {
465 int featureIdx = 0;
466
467 //not enough data for motion
468 if ((i->second.size() < 2) && (m_mode == ML_MOTION))
469 continue;
470
471 std::vector<std::vector<MlFeature>> data = ComputeData(i->second);
472
473 //features
474 for (unsigned int j = 0; j < data.size(); j++)
475 {
476 //samples
477 for (unsigned int k = 0; (k < m_nbAnchorPoints) && (k < data[j].size()); k++)
478 {
479 // repeat the last data top complete the train mat
480 trainIn.at<float>(k + (i->first * m_nbAnchorPoints), featureIdx) = (float)data[j][k].x;
481 trainIn.at<float>(k + (i->first * m_nbAnchorPoints), featureIdx +1) = (float)data[j][k].y;
482 trainIn.at<float>(k + (i->first * m_nbAnchorPoints), featureIdx +2) = (float)data[j][k].z;
483
484 // increment sample number only on the first feature
485 if(j == 0)
486 {
487 trainOut.at<float>(sampleIdx) = data[j][k].t == 0 ? -1 : (float)i->first;
488 sampleIdx ++;
489 }
490 }
491
492 featureIdx = featureIdx + 3;
493 }
494 }
495
496 cv::Ptr<cv::ml::TrainData> traindata = cv::ml::TrainData::create(trainIn, cv::ml::ROW_SAMPLE, trainOut);
497 switch (m_type)
498 {
499 case KNN:
500 m_knn->clear();
501 m_knn->train(traindata);
502 break;
503
504 case Bayes:
505 m_bayes->clear();
506 m_bayes->train(traindata);
507 break;
508
509 case SVM:
510 m_svm->clear();
511 m_svm->train(traindata);
512 break;
513
514 case DT:
515 m_dt->clear();
516 m_dt->train(traindata);
517 break;
518
519 case Boost:
520 m_boost->clear();
521 m_boost->train(traindata);
522 break;
523
524 case RT:
525 m_rt->clear();
526 m_rt->train(traindata);
527 break;
528
529 default:
530 break;
531 }
532
533 trainIn.release();
534 trainOut.release();
535 SetTrainingState(TRAINED);
536
537 // launch the detection in another thread
538 m_detectionThread = new boost::thread(boost::bind(&NeuralNetwork::DetectionThread, this));
539
540 // post the scol event
541 OBJpostEvent(WM_ML_TRAINING_FINISHED, SCOL_PTR this, 0);
542 }
543 catch(cv::Exception &e)
544 {
545 MMechostr(MSKRUNTIME, "Machine learning - Trainning Error : %s", e.what());
546 trainIn.release();
547 trainOut.release();
548 SetTrainingState(NOT_TRAINED);
549 }
550}
551
552void NeuralNetwork::ValidateDetectedData()
553{
554 {
555 boost::mutex::scoped_lock lock(m_detectionCriticalSection);
556 bool tooLong = (m_lastInputDate != 0) && (((int)cv::getTickCount() - m_lastInputDate) > (m_averageTime * 3));
557 if (tooLong || ((int)m_dataHistory.size() >= m_nbMaxSamples))
558 {
559 //post the last category found
560 if(m_lastDetected.size() != 0)
561 {
562 unsigned int catId = (unsigned int)m_lastDetected[m_lastDetected.size() - 1][0];
563 std::string* catname = new std::string(GetCategory(catId));
564 m_lastDetected.clear();
565 m_dataHistory.clear();
566 m_newHistoryData = false;
567 m_lastInputDate = 0;
568 OBJpostEvent(WM_ML_DETECTION, SCOL_PTR this, SCOL_PTR catname);
569 }
570 else
571 {
572 // if the last input is too old we reset the history
573 if(tooLong)
574 {
575 m_dataHistory.clear();
576 m_newHistoryData = false;
577 m_lastInputDate = 0;
578 }
579 }
580 }
581 }
582 boost::this_thread::sleep_for(boost::chrono::milliseconds(m_averageTime));
583}
584
585void NeuralNetwork::DetectionThread()
586{
587 while (GetTrainingState() == TRAINED)
588 {
589 std::vector<std::vector<MlFeature>> data;
590 {
591 boost::mutex::scoped_lock lock(m_detectionCriticalSection);
592
593 // need a minimum of data
594 if (m_dataHistory.empty() || ((int)m_dataHistory.size() < m_nbMinSamples))
595 continue;
596
597 // this revert the vector to features<samples>
598 data = ComputeData(m_dataHistory);
599 }
600
601 if (!m_newHistoryData)
602 {
603 ValidateDetectedData();
604 continue;
605 }
606 else
607 {
608 m_newHistoryData = false;
609 }
610
611 cv::Mat recognizeIn(m_nbAnchorPoints, m_featurePerSample * 3, CV_32FC1);
612 cv::Mat recognizeOut(m_nbAnchorPoints, 1, CV_32FC1);
613 recognizeOut.setTo(-1);
614
615 //features
616 int featureIdx = 0;
617 for (unsigned int i = 0; i < data.size(); i++)
618 {
619 //samples
620 for (unsigned int k = 0; (k < m_nbAnchorPoints) && (k < data[i].size()); k++)
621 {
622 recognizeIn.at<float>(k, featureIdx) = (float)data[i][k].x;
623 recognizeIn.at<float>(k, featureIdx+1) = (float)data[i][k].y;
624 recognizeIn.at<float>(k, featureIdx+2) = (float)data[i][k].z;
625 }
626 featureIdx = featureIdx + 3;
627 }
628
629 int cat = -1;
630 float accuracy = 0.0f;
631 try
632 {
633 Reconize(recognizeIn, recognizeOut);
634 cat = Evaluate(recognizeOut, accuracy);
635 }
636 catch(cv::Exception &e)
637 {
638 MMechostr(MSKRUNTIME, "Machine learning - Recognized Error : %s", e.what());
639 ValidateDetectedData();
640 continue;
641 }
642
643 if (cat == -1)
644 {
645 ValidateDetectedData();
646 continue;
647 }
648
649 std::vector<float> result;
650 result.push_back((float)cat);
651 result.push_back(accuracy);
652
653 //keep the detected categories
654 m_lastDetected.push_back(result);
655
656 ValidateDetectedData();
657 }
658}
659
660void NeuralNetwork::Save(std::string saveFile)
661{
662 if(m_samples.empty())
663 return;
664
666 tinyxml2::XMLNode* rootnode = xmlDoc.NewElement("MLDATA");
667 xmlDoc.InsertEndChild(rootnode);
668
669 for(SamplesMap::iterator i = m_samples.begin(); i != m_samples.end(); i++)
670 {
671 std::string catname = GetCategory((unsigned int)i->first);
672 std::vector<std::vector<MlFeature>> data = i->second;
673
674 tinyxml2::XMLElement* catnode = xmlDoc.NewElement("CATEGORY");
675 catnode->SetAttribute("name", catname.c_str());
676
677 //samples
678 for (unsigned int j = 0; j < data.size(); j++)
679 {
680 tinyxml2::XMLElement* samplenode = xmlDoc.NewElement("SAMPLE");
681
682 //features
683 for (unsigned int k = 0; k < data[j].size(); k++)
684 {
685 tinyxml2::XMLElement* featurenode = xmlDoc.NewElement("FEATURE");
686 featurenode->SetAttribute("x", data[j][k].x);
687 featurenode->SetAttribute("y", data[j][k].y);
688 featurenode->SetAttribute("z", data[j][k].z);
689 featurenode->SetAttribute("time", data[j][k].t);
690 samplenode->InsertEndChild(featurenode);
691 }
692
693 catnode->InsertEndChild(samplenode);
694 }
695
696 rootnode->InsertEndChild(catnode);
697 }
698
699 xmlDoc.SaveFile(saveFile.c_str());
700}
701
702void NeuralNetwork::Load(std::string filename)
703{
705 if (xmlDoc.LoadFile(filename.c_str()) != tinyxml2::XML_NO_ERROR)
706 return;
707
708 // reset training samples
709 m_samples.clear();
710 m_categories.clear();
711
712 SetTrainingState(NOT_TRAINED);
713
714 tinyxml2::XMLElement* rootnode = xmlDoc.FirstChildElement("MLDATA");
715 if (rootnode)
716 {
717 //get name + id
718 tinyxml2::XMLElement* catnode = 0;
719 while ((catnode = rootnode->FirstChildElement("CATEGORY")) != 0)
720 {
721 std::string catname = catnode->Attribute("name");
722 int catid = GetCategoryPos(catname);
723 if (catid == -1)
724 {
725 // add the new category
726 m_categories.push_back(catname);
727 catid = m_categories.size() -1;
728 }
729
730 tinyxml2::XMLElement* samplenode = 0;;
731 while ((samplenode = catnode->FirstChildElement("SAMPLE")) != 0)
732 {
733 std::vector<MlFeature> sample;
734 tinyxml2::XMLElement* featurenode = 0;
735 int numFeatures = 0;
736
737 while ((featurenode = samplenode->FirstChildElement("FEATURE")) != 0)
738 {
739 MlFeature feature;
740 feature.x = featurenode->FloatAttribute("x");
741 feature.y = featurenode->FloatAttribute("y");
742 feature.z = featurenode->FloatAttribute("z");
743 feature.t = featurenode->IntAttribute("time");
744
745 sample.push_back(feature);
746 samplenode->DeleteChild(featurenode);
747 numFeatures++;
748 }
749
750 m_featurePerSample = std::max((int)m_featurePerSample, numFeatures);
751
752 SamplesMap::iterator catIt = m_samples.find(catid);
753 if (catIt == m_samples.end())
754 m_samples[catid].push_back(sample);
755 else
756 catIt->second.push_back(sample);
757
758 catnode->DeleteChild(samplenode);
759 }
760
761 rootnode->DeleteChild(catnode);
762 }
763
764 Train();
765 }
766}
767
768void NeuralNetwork::Reconize(cv::Mat reconizeIn, cv::Mat reconizeOut)
769{
770 if (GetTrainingState() != TRAINED)
771 return;
772
773 cv::Mat sample;
774 cv::Mat out;
775 cv::ml::DTrees::Node* n = 0;
776 for (int r = 0; r < reconizeIn.rows; r++)
777 {
778 sample = reconizeIn.row(r);
779 float v = 0.0f;
780 switch (m_type)
781 {
782 case KNN:
783 v = m_knn->findNearest(sample, m_knn_p, out);
784 break;
785
786 case Bayes:
787 v = m_bayes->predict(sample);
788 break;
789
790 case SVM:
791 v = m_svm->predict(sample);
792 break;
793
794 case DT:
795 v = m_dt->predict(sample);
796 //n = m_dt->predict(sample);
797 //if(n)
798 //v = (float)n->value;
799 break;
800
801 case Boost:
802 v = m_boost->predict(sample);
803 break;
804
805 case RT:
806 v = m_rt->predict(sample);
807 break;
808
809 default:
810 break;
811 }
812
813 reconizeOut.at<float>(r, 0) = v;
814 }
815}
816
817int NeuralNetwork::Evaluate(cv::Mat& predicted, float &accuracy)
818{
819 std::vector<int> countCategories;
820 countCategories.resize(m_categories.size());
821 for (unsigned int i = 0; i < countCategories.size(); i++)
822 countCategories[i] = 0;
823
824 // look for category index found
825 for (int i = 0; i < predicted.rows; i++)
826 {
827 float p = predicted.at<float>(i, 0);
828 if (p >= 0.0)
829 {
830 // increment the category count
831 countCategories[(int)p]++;
832 }
833 }
834
835 int maxCategory = -1;
836 int nbCorrelFound = 0;
837 accuracy = 0.0f;
838
839 for (unsigned int i = 0; i < countCategories.size(); i++)
840 {
841 /*float catWeight = 1.0f;
842 float catAccuracy = 1.0f;
843 SamplesMap::iterator catIt = m_samples.find(i);
844 if(catIt != m_samples.end())
845 {
846 //catAccuracy = (float)m_dataHistory.size() / (float)catIt->second.size();
847 catAccuracy = (float)catIt->second.size() / (float)m_nbAnchorPoints;
848 catWeight = ((float)countCategories[i] / (float)m_nbAnchorPoints) / catAccuracy;
849 }
850
851 if (catWeight > accuracy)
852 {
853 accuracy = catWeight;
854 maxCategory = i;
855 }*/
856
857 if (countCategories[i] > nbCorrelFound)
858 {
859 nbCorrelFound = countCategories[i];
860 maxCategory = i;
861 }
862 }
863
864 accuracy = (float)nbCorrelFound / (float)m_nbAnchorPoints;
865 if (accuracy < m_sensibility)
866 return -1;
867
868 return maxCategory;
869}
870
871void NeuralNetwork::AddDetectionData(std::vector<cv::Point3d> input)
872{
873 if ((GetTrainingState() != TRAINED) || (input.size() == 0) || ((m_lastInputDate != 0) && ((int)(cv::getTickCount() - m_lastInputDate) < m_averageTime)))
874 return;
875
876 boost::mutex::scoped_lock lock(m_detectionCriticalSection);
877
878 // decompose input to several samples and add to the history
879 for (unsigned int i = 0; i < input.size(); i += m_featurePerSample)
880 {
881 if ((i + m_featurePerSample) > (int)input.size())
882 continue;
883
884 std::vector<MlFeature> sample;
885 for (unsigned int k = 0; k < m_featurePerSample; k++)
886 {
887 sample.push_back(MlFeature(input[i+k], cv::getTickCount()));
888 }
889
890 m_dataHistory.push_back(sample);
891
892 m_newHistoryData = true;
893 }
894
895 //remove old data
896 while ((int)m_dataHistory.size() > m_nbMaxSamples)
897 {
898 m_dataHistory.erase(m_dataHistory.begin());
899 }
900 m_lastInputDate = cv::getTickCount();
901}
#define SCOL_PI
int WM_ML_TRAINING_FINISHED
Definition MlToolkit.cpp:34
int WM_ML_DETECTION
Definition MlToolkit.cpp:35
void Save(std::string saveFile)
NeuralNetwork(int featurePerSample, float sensibility, MlMode mode=ML_POSE, MlType type=KNN)
std::string GetCategory(unsigned int pos)
std::vector< std::string > GetCategories()
void Reconize(cv::Mat reconizeIn, cv::Mat reconizeOut)
int GetCategoryPos(std::string name)
TrainingState GetTrainingState()
void Load(std::string filename)
void AddTrainingData(std::vector< cv::Point3d > input, std::string label)
void AddDetectionData(std::vector< cv::Point3d >)
XMLError LoadFile(const char *filename)
XMLElement * NewElement(const char *name)
XMLError SaveFile(const char *filename, bool compact=false)
void SetAttribute(const char *name, const char *value)
Sets the named attribute to value.
Definition tinyxml2.h:1194
const char * Attribute(const char *name, const char *value=0) const
float FloatAttribute(const char *name) const
See IntAttribute()
Definition tinyxml2.h:1134
int IntAttribute(const char *name) const
Definition tinyxml2.h:1110
void DeleteChild(XMLNode *node)
Definition tinyxml2.cpp:646
const XMLElement * FirstChildElement(const char *value=0) const
Definition tinyxml2.cpp:722
XMLNode * InsertEndChild(XMLNode *addThis)
Definition tinyxml2.cpp:653
@ XML_NO_ERROR
Definition tinyxml2.h:924