beforemerge

This commit is contained in:
Raphael Maenle 2017-12-22 22:55:55 +01:00
parent bfed4aba05
commit edd3c8620b
7 changed files with 195 additions and 22 deletions

View File

@ -26,7 +26,7 @@ public:
* @brief pure virtual method for the pre processing of the layer
* @param [in] partArray - References of all Parts, in which the properties of the Layer will be written
*/
virtual void PreProcessing(const vector<Part*>* partArray) = 0;
virtual void PreProcessing(coor mySize, const vector<Part*>* partArray) = 0;
/**
* @brief pure virtual method for the quality evaluation of the layer
@ -58,7 +58,7 @@ public:
* @param [in] collumns - Wished collumns of the m_constraintMatrix
* @param [in] rows - Wished rows of the m_constraintMatrix
*/
virtual void InitialiseConstraintMatrixSize(const int32_t collumns, const int32_t rows)
void InitialiseConstraintMatrixSize(const int32_t collumns, const int32_t rows)
{
m_constraintMatrix = vector<vector<T>>(collumns, vector<T>(rows));
}

View File

@ -11,9 +11,9 @@ map<int,float> DestructionPower_Properties::SpeedTable =
};
void DestructionPower::PreProcessing(const vector<Part*>* partArray)
void DestructionPower::PreProcessing(coor mySize,const vector<Part*>* partArray)
{
InitialiseConstraintMatrixSize(32,28);
InitialiseConstraintMatrixSize(mySize.row,mySize.col);
}
//it through qualityVector and removes all that do not trigger PlaceOfPartGood
@ -48,6 +48,35 @@ void DestructionPower::DestructionOfSurrounding(const coor constraintCoordinate)
newDestructionArray[i] /=divisor;
}
}
//gets next highest valued abstraction layer down from current one (if first, get highest)
int DestructionPower::getNextAbstractionLayer(coor newCoordinate, int currentAbstractionLayer)
{
float currentPower=-1;
int nextLayer=-1;
float nextLayerPower=0;
if (currentAbstractionLayer>=0)
currentPower = m_constraintMatrix[newCoordinate.row][newCoordinate.col].DestructionArray[currentAbstractionLayer];
int i=0;
for(float it:m_constraintMatrix[newCoordinate.row][newCoordinate.col].DestructionArray)
{
if(it <= currentPower)
{
//if equal, then has to be the next one (activated from left to right)
if(it == currentPower)
if(i>currentAbstractionLayer)
return i;
//if this one is bigger than previous biggest one, save
if(it>nextLayerPower)
{
nextLayerPower=it;
nextLayer=i;
}
i++;
}
}
return nextLayer;
}
DestructionPower_Properties::DestructionPower_Properties() {
float aging=1.001;
@ -57,4 +86,4 @@ DestructionPower_Properties::DestructionPower_Properties() {
DestructionArray.emplace_back((DestructionPower_Properties::SpeedTable[i]*DESTRUCTION_INIT));
DestructionArray.back()<0.99 ? DestructionArray.back()*=aging:DestructionArray.back();
}
}
}

View File

@ -17,12 +17,14 @@
class DestructionPower : public AbstractionLayer_Base<DestructionPower_Properties>
{
public:
void PreProcessing(const vector<Part*>* partArray) override;
void PreProcessing(coor mySize,const vector<Part*>* partArray)override;
bool EvaluateQuality (coor constraintCoordinate, qualityVector& qVector) override;
bool SetConstraintOnPosition(coor constraintCoordinate, AbstractionLayer_1_Properties constraint);
bool RemoveConstraintOnPosition(coor constraintCoordinate)override;
void DestructionOfSurrounding(coor constraintCoordinate);
int getNextAbstractionLayer(coor newCoordinate, int currentAbstractionLayer);
private:
};

View File

@ -7,9 +7,9 @@
#include <iostream>
void AbstractionLayer_1::PreProcessing(const vector<Part*>* partArray)
void AbstractionLayer_1::PreProcessing(coor mySize, const vector<Part*>* partArray)
{
InitialiseConstraintMatrixSize(32+2, 28+2);
InitialiseConstraintMatrixSize(mySize.row+2, mySize.col+2);
setEdgeZero();
}
@ -100,7 +100,7 @@ void AbstractionLayer_1::setEdgeZero()
{
for(int col=0;col<m_constraintMatrix.size();col++)
for(int row=0;row<m_constraintMatrix[col].size();row++)
if(col ==0 || col == m_constraintMatrix.size() || row == 0 || row == m_constraintMatrix[col].size())
if(col ==0 || col == m_constraintMatrix.size()-1 || row == 0 || row == m_constraintMatrix[col].size()-1)
m_constraintMatrix[col][row].m_connections=0b00000000;
}

View File

@ -16,7 +16,7 @@
class AbstractionLayer_1 : public AbstractionLayer_Base<AbstractionLayer_1_Properties>
{
public:
void PreProcessing(const vector<Part*>* partArray);//override
void PreProcessing(coor mySize, const vector<Part*>* partArray) final;
bool EvaluateQuality (const coor constraintCoordinate, qualityVector& qVector);
bool SetConstraintOnPosition(const coor constraintCoordinate, const AbstractionLayer_1_Properties constraint);
bool RemoveConstraintOnPosition(const coor constraintCoordinate);

View File

@ -1,6 +1,8 @@
#include "../../header.h"
void status(vector<LogEntry>& log, vector<Part*>& p_Box);
bool setBestOrMoreLayers(vector<LogEntry>& log);
void calculateTrueDestructionPower(vector<LogEntry>& log, Puzzle& puzzleMat, float Layerworth);
void capLogElements(vector<LogEntry>& log);
bool next(vector<LogEntry>& log, vector<Part*>& p_Box,Puzzle& puzzleMat)
{
@ -15,9 +17,9 @@ bool next(vector<LogEntry>& log, vector<Part*>& p_Box,Puzzle& puzzleMat)
//case last log element has multiple entries
else if(log.back().PieceCollector.size() > 1)
{
//moreLayers is 0, setbest is 1
//if(SetBestorMoreLayers()) setsolution(log,p_Box,puzzleMat);
//else solve(log,p_Box,puzzleMat);
//moreLayers is 0, setbest is 1
if(setBestOrMoreLayers(log)) setsolution(log,p_Box,puzzleMat);
else solve(log,p_Box,puzzleMat);
}
//case last log exactly one solution
else if(log.back().PieceCollector.size() == 1)
@ -42,8 +44,8 @@ void createNextLogElement(vector<LogEntry>& log, vector<Part*>& p_Box, Puzzle& p
{
log.emplace_back(LogEntry(coor(0, 0)));
log.back().myCoor = calculateNextCoor(log, p_Box,puzzleMat);
//getLayerDestructionPowerfromSurrounding();
solve(log, p_Box,puzzleMat);
puzzleMat.dp->DestructionOfSurrounding(log.back().myCoor);//calculate dp from surrounding
solve(log, p_Box,puzzleMat);
}
@ -63,12 +65,11 @@ coor calculateNextCoor(vector<LogEntry>& log, vector<Part*>& p_Box, Puzzle& puzz
if(m<puzzleMat.getSizeAsCoor().col-1) m++;
else if(n<puzzleMat.getSizeAsCoor().row-1){ m=0; n++;}
return {m,n};
//return nextCoor;
}
void solve(vector<LogEntry>& log, vector<Part*>& p_Box, Puzzle& puzzleMat)
{
//getNextHighestLayerworth(puzzleMat); //sets in abstractionLevel
puzzleMat.dp->getNextAbstractionLayer(log.back().myCoor,log.back().abstractionLevel); //sets in abstractionLevel
//status(log,p_Box,puzzleMat);
switch(log.back().abstractionLevel)
{
@ -160,4 +161,140 @@ void calculateTrueDestructionPower(vector<LogEntry>& log, Puzzle& puzzleMat, flo
//hier muss noch rein, wo die zeit der Abstractionlevels gespeichter wird
float destructionPower=sqrt(Layerworth * log.back().abstractionLevel);
//puzzleMat.setdestructionPower(log.back().myCoor,log.back().abstractionLevel,destructionPower);
}
}
// PART RAUER_WEIDINGER
/*
void sort()
{
}
void cut()
{
}
void capLogElements(vector<LogEntry>& log)
{
// Till Now only ground structure -> incorrect variable ans vector names
double limit = 0.6;
double diff = 0;
double maxdiff = 0;
int vectorsizeBefore = 0;
int vectorsizeAfter = 0;
double destroyed = 0; // destroyed parts in %
double worth = 0;
vectorsizeBefore = log.back().PieceCollector.size();
sort(); // Sort the vector after probabilities
auto idxcut;
for(idxcut:log.back().PieceCollector)
if(idxcut.second < limit)
break;
while(idxcut != log.back().PieceCollector.end())
{
diff = part[i] - part[i+1];
if(diff > maxdiff)
{
maxdiff = diff;
idxcut = i;
}
i++;
}
cut();
vectorsizeAfter = vector.size();
destroyed = (vectorsizeBefore - vectorsizeAfter) / vectorsizeBefore;
worth = sqrt(destroyed*maxdiff);
//return worth;
} */
//partdavid
bool setBestOrMoreLayers(vector<LogEntry>& log)
{
int countBest = 0;
float tempBest = 0.0;
// count how many Pieces are greater than the threshold value
for(auto it:log.back().PieceCollector)
{
// check Probability of current Puzzle Piece in this vector
if (it.second >= 0.90) // 0.90 as threshold
countBest++;
else
if (it.second > tempBest)
tempBest = it.second;
}
// return true if only one piece is left
if (1 == countBest)
{
return true;
}
//else if (countBest > 1 && countBest < 10) // TODO: add possible constraints
else
{
return false;
}
}
void calculateNewCombinedProbabilityForPuzzlePiecesArithmetic(vector<LogEntry>& log)
{
float totalValue = 0.0;
int i;
for(int i; i < log.back().PieceCollector.size(); i++)
{
// sum Probability of current Puzzle Piece in PieceCollector vector
//totalValue += *(log.back().PieceCollector.);
}
//return totalValue / i;
}
/*
//PartDavid
void calculateNewCombinedProbabilityForPuzzlePiecesTopK(vector<LogEntry>& log, int executedLayers)
{
float TopK[executedLayers][2] = {0.0}; // in Log speichern?
float sumTopK[executedLayers] = {0.0};
float HighestProbability = 0.0;
// searching for Top2 probability values in PieceCollector for each layer
for (int currentLayer = 0; currentLayer < executedLayers; currentLayer++)
{
// searching for Top2 probabilities in currentLayer
for(int i = 0; i < log.back().PieceCollector.size() && log.back().abstractionLevel == currentLayer; i++)
{
if (*(log.back().PieceCollector[i]) > TopK[currentLayer][0])
{
TopK[currentLayer][0] = *log.back().PieceCollector[i];
}
else if (*(log.back().PieceCollector[i]) > TopK[currentLayer][1])
{
TopK[currentLayer][1] = *log.back().PieceCollector[i];
}
else
{
// Spezialfall fuer 0 Ueberlegen
}
}
sumTopK[currentLayer] = TopK[currentLayer][0] + TopK[currentLayer][1];
}
// searching for highest probability for designated Position
for (int currentLayer = 0; currentLayer < executedLayers; currentLayer++)
{
if (sumTopK[currentLayer+1] > sumTopK[currentLayer])
{
HighestProbability = sumTopK[currentLayer+1];
}
}
}
*/

View File

@ -27,8 +27,8 @@ public:
void decreaseRandomed() { randomed--;}
int hasRandomed(){return randomed;}
LogEntry(coor newCoor = coor(0,0)): myCoor(newCoor){
abstractionLevel=0;
explicit LogEntry(coor newCoor = coor(0,0)): myCoor(newCoor){
abstractionLevel=-1;
set=false;
}
private:
@ -39,7 +39,12 @@ private:
class Puzzle
{
public:
Puzzle(unsigned int newcols,unsigned int newrows):rows(newrows),cols(newcols){}
Puzzle(unsigned int newcols,unsigned int newrows,DestructionPower* newdp,AbstractionLayer_1):rows(newrows),cols(newcols)
{
dp=newdp;
a1->PreProcessing({rows,cols}, nullptr);//because could not set nullptr as default in override
dp->PreProcessing({rows,cols},nullptr);
}
coor getSizeAsCoor()
{return {cols,rows};}