RM: added into dispatcher strucure
This commit is contained in:
parent
ddd5d80750
commit
d91dded858
@ -9,10 +9,11 @@
|
||||
//sets relations of speed for the different layers
|
||||
map<int,float> DestructionPower_Properties::SpeedTable =
|
||||
{
|
||||
{0,0.99}
|
||||
{0,0.99},
|
||||
{1,0.7},
|
||||
{2,0.7}
|
||||
};
|
||||
|
||||
|
||||
bool DestructionPower::PreProcessing(coor mySize,const vector<Part*>* partArray)
|
||||
{
|
||||
cout << "DestructionPower Preprocessing... ";
|
||||
@ -55,7 +56,7 @@ void DestructionPower::DestructionOfSurrounding(const coor constraintCoordinate)
|
||||
m_constraintMatrix[constraintCoordinate.col][constraintCoordinate.row].DestructionArray[i] /=divisor;
|
||||
else
|
||||
//create default destructionPower //TODO find some better solution for default
|
||||
m_constraintMatrix[constraintCoordinate.col][constraintCoordinate.row].DestructionArray[i] =1-m_constraintMatrix[constraintCoordinate.col][constraintCoordinate.row].SpeedTable[i];
|
||||
m_constraintMatrix[constraintCoordinate.col][constraintCoordinate.row].DestructionArray[i] = m_constraintMatrix[constraintCoordinate.col][constraintCoordinate.row].SpeedTable[i];
|
||||
}
|
||||
}
|
||||
|
||||
@ -102,6 +103,6 @@ DestructionPower_Properties::DestructionPower_Properties() {
|
||||
|
||||
{
|
||||
DestructionArray.emplace_back((DestructionPower_Properties::SpeedTable[i]*DESTRUCTION_INIT));
|
||||
DestructionArray.back()<0.99 ? DestructionArray.back()*=aging:DestructionArray.back();
|
||||
DestructionArray.back()<0.8 ? DestructionArray.back()=aging*DestructionArray.back()+(float)0.01:DestructionArray.back();
|
||||
}
|
||||
}
|
||||
|
@ -3,8 +3,8 @@
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#define DESTRUCTION_COUNT 1
|
||||
//TODO!! increase Destructioncount
|
||||
#define DESTRUCTION_COUNT 2
|
||||
|
||||
#include "DestructionPower_Properties.h"
|
||||
#include "../AbstraktionLayer_Base.h"
|
||||
|
@ -71,7 +71,7 @@ HistogramComparer localImage;
|
||||
calcHist(&hsv_img1, 1, channels, Mat(), hist_img1, 2, histSize, ranges, true, false);
|
||||
// normalize(hist_img1, hist_img1, 0, 1, NORM_MINMAX, -1, Mat());
|
||||
|
||||
ref_partArray[iterator]->m_aHistogram.image=hsv_img1;
|
||||
ref_partArray[iterator]->m_Histogram.image=hsv_img1;
|
||||
iterator++;
|
||||
|
||||
}
|
||||
@ -89,7 +89,7 @@ bool AbstractionLayer_Histogram::EvaluateQuality (const coor constraintCoordinat
|
||||
//evaluateQuality = evaluateProbabilaty
|
||||
for(int i = 0;i < qVector.size();i++)
|
||||
{
|
||||
if(PlaceOfPartGood(constraintCoordinate, qVector[i].second->m_aHistogram.image))
|
||||
if(PlaceOfPartGood(constraintCoordinate, qVector[i].second->m_Histogram.image))
|
||||
{
|
||||
qVector[i].first=1;
|
||||
|
||||
@ -147,7 +147,7 @@ bool HistogramComparer::CompareHistogram(Mat hist_img1,Mat hist_img2)
|
||||
|
||||
bool AbstractionLayer_Histogram::SetConstraintOnPosition(const coor constraintCoordinate, const AbstractionLayer_Histogram_Properties constraint)
|
||||
{
|
||||
m_constraintMatrix[constraintCoordinate.col+1][constraintCoordinate.row+1].image=constraint.image;
|
||||
m_constraintMatrix[constraintCoordinate.col][constraintCoordinate.row].image=constraint.image;
|
||||
//m_constraintMatrix[constraintCoordinate.col+1][constraintCoordinate.row+1].m_connections=constraint.m_connections;
|
||||
}
|
||||
|
||||
@ -156,5 +156,5 @@ bool AbstractionLayer_Histogram::SetConstraintOnPosition(const coor constraintCo
|
||||
bool AbstractionLayer_Histogram::RemoveConstraintOnPosition(const coor constraintCoordinate)
|
||||
{
|
||||
Mat dummy(1,1,0);
|
||||
m_constraintMatrix[constraintCoordinate.col+1][constraintCoordinate.row+1].image = dummy;
|
||||
m_constraintMatrix[constraintCoordinate.col][constraintCoordinate.row].image = dummy;
|
||||
}
|
||||
|
@ -46,7 +46,8 @@ void Puzzle::putIntoBox()
|
||||
for(int rotations=0;rotations<4;rotations++)
|
||||
{
|
||||
tmpPart.m_a1.shift(1);
|
||||
//TODO! add all other layers with their rotaionvariance here
|
||||
//TODO! add all other layer with their rotaionvariance into "tmpPart"
|
||||
//if it piece is roation invariant no need to do anything
|
||||
myBox.emplace_back(tmpPart);
|
||||
|
||||
}
|
||||
@ -65,6 +66,8 @@ void Puzzle::shuffle()
|
||||
void Puzzle::removeConstrains(coor removeCoordinates)
|
||||
{
|
||||
this->a1.RemoveConstraintOnPosition(removeCoordinates);
|
||||
this->a3.RemoveConstraintOnPosition(removeCoordinates);
|
||||
//TODO!! Add other layer remove here
|
||||
}
|
||||
void Puzzle::setConstraints(coor setConstraints, Part* constraintPiece)
|
||||
{
|
||||
@ -75,6 +78,10 @@ void Puzzle::setConstraints(coor setConstraints, Part* constraintPiece)
|
||||
|
||||
//a1
|
||||
this->a1.SetConstraintOnPosition(setConstraints,constraintPiece->m_a1);
|
||||
//a3
|
||||
this->a3.SetConstraintOnPosition(setConstraints,constraintPiece->m_Histogram);
|
||||
|
||||
//TODO!! Add other layer remove here
|
||||
}
|
||||
|
||||
void Puzzle::createRandomPuzzle()
|
||||
|
@ -35,7 +35,10 @@ bool next(vector<LogEntry>& log,Puzzle& puzzleMat)
|
||||
setsolution(log,puzzleMat);
|
||||
else
|
||||
setsolution(log,puzzleMat);
|
||||
if(log.back().myCoor.row==26)
|
||||
cout << log.back().myCoor.row << ", " << log.back().myCoor.col << endl;
|
||||
return true;
|
||||
|
||||
}
|
||||
|
||||
void createNextLogElement(vector<LogEntry>& log, Puzzle& puzzleMat)
|
||||
@ -55,7 +58,6 @@ coor calculateNextCoor(vector<LogEntry>& log, Puzzle& puzzleMat)
|
||||
{
|
||||
//level 1:
|
||||
//go left to right, then increase current row
|
||||
|
||||
if (log.size() == 1)
|
||||
return {0,0};
|
||||
|
||||
@ -73,11 +75,15 @@ void solve(vector<LogEntry>& log,Puzzle& puzzleMat)
|
||||
{
|
||||
log.back().abstractionLevel = puzzleMat.dp.getNextAbstractionLayer(log.back().myCoor,log.back().abstractionLevel); //sets in abstractionLevel
|
||||
//status(log,p_Box,puzzleMat);
|
||||
//TODO!! Add more layers here
|
||||
switch(log.back().abstractionLevel)
|
||||
{
|
||||
case 0://pömpel
|
||||
puzzleMat.a1.EvaluateQuality(log.back().myCoor, log.back().PieceCollector);
|
||||
break;
|
||||
case 1://
|
||||
puzzleMat.a3.EvaluateQuality(log.back().myCoor,log.back().PieceCollector);
|
||||
break;
|
||||
case -1://random
|
||||
setsolution(log,puzzleMat);
|
||||
return;
|
||||
@ -105,6 +111,8 @@ void setsolution(vector<LogEntry>& log, Puzzle& puzzleMat)
|
||||
puzzleMat.combinedQualityVector.clear(); //clear data from temp variable
|
||||
//tell log entry that it is set
|
||||
log.back().Set();
|
||||
if(log.back().myCoor.row==27)
|
||||
cout << "hello" << endl;
|
||||
puzzleMat.setConstraints(log.back().myCoor,log.back().PieceCollector.begin()->second);
|
||||
//cout << "set:" << log.back().myCoor.col << "," << log.back().myCoor.row << endl;
|
||||
}
|
||||
@ -187,6 +195,9 @@ float capLogElements(vector<LogEntry>& log)
|
||||
break;
|
||||
}
|
||||
int newid=0;
|
||||
//check if all over
|
||||
if(id==log.back().PieceCollector.size())
|
||||
return 0;
|
||||
if(id>0)
|
||||
newid = --id; //set to the one just over limit
|
||||
|
||||
@ -241,6 +252,7 @@ bool SetBestOrMoreLayersArithmetical(vector<LogEntry>& log, qualityVector& cqVec
|
||||
case 4: threshold = 0.60; break;
|
||||
default: threshold = 0.5; break;
|
||||
}
|
||||
//TODO!! add more layers here!
|
||||
|
||||
// check Quality of current Puzzle Piece in combinedQualityVector with Threshold value
|
||||
for (qualityVector::iterator it = cqVector.begin(); it != cqVector.end(); it++)
|
||||
@ -269,6 +281,7 @@ void CalculateNewCombinedQuality(vector<LogEntry>& log, qualityVector& qVector,
|
||||
{
|
||||
bool summarizedVectors = false;
|
||||
int countSummarizedVectors = 0;
|
||||
bool removePart=true;
|
||||
|
||||
// check if both qualityVectors are not empty
|
||||
if(qVector.empty())
|
||||
@ -287,19 +300,25 @@ void CalculateNewCombinedQuality(vector<LogEntry>& log, qualityVector& qVector,
|
||||
for (unsigned int i = 0; i < cqVector.size(); i++) {
|
||||
for (unsigned int j = 0; j < qVector.size(); j++) {
|
||||
// search same PuzzlePart of qualityVector and combinedQualityVector
|
||||
if (&cqVector.at(i).second == &qVector.at(j).second) {
|
||||
if (cqVector.at(i).second->GetPartID() == qVector.at(j).second->GetPartID() && cqVector.at(i).second->GetNumOfRotations() == qVector.at(j).second->GetNumOfRotations()) {
|
||||
// sum Quality of PieceCollector (qualityVector) to combinedQualityVector
|
||||
cqVector.at(j).first += qVector.at(i).first;
|
||||
countSummarizedVectors++;
|
||||
removePart=false;
|
||||
break; // skip remaining for loop => save time!
|
||||
}
|
||||
// remove element at poisition X in combinedQualityVector, because it was not summarized
|
||||
// inefficient way to delete element X
|
||||
//cqVector->erase(cqVector->begin()+i);
|
||||
// efficient way, but no sorted cqVector => wayne //echt? lol
|
||||
swap(cqVector.at(i), cqVector.back());
|
||||
cqVector.pop_back();
|
||||
|
||||
}
|
||||
if(removePart)
|
||||
{
|
||||
swap(cqVector.at(i), cqVector.back());
|
||||
cqVector.pop_back();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// cqVector should have the same size now as newest qVector
|
||||
|
@ -43,7 +43,7 @@ public:
|
||||
|
||||
bool set;
|
||||
AbstractionLayer_1_Properties m_a1;
|
||||
AbstractionLayer_Histogram_Properties m_aHistogram;
|
||||
AbstractionLayer_Histogram_Properties m_Histogram;
|
||||
private:
|
||||
int32_t m_partID;
|
||||
uint8_t m_numOfRotations;
|
||||
|
Loading…
Reference in New Issue
Block a user