Rolling 20220526

This commit is contained in:
jomjol
2022-05-26 20:31:26 +02:00
parent cce812ff11
commit 00028010ee
203 changed files with 12003 additions and 1226 deletions

View File

@@ -19,7 +19,6 @@ void ClassFlow::SetInitialParameter(void)
std::vector<string> ClassFlow::ZerlegeZeile(std::string input, std::string delimiter)
{
std::vector<string> Output;
// std::string delimiter = " =,";
input = trim(input, delimiter);
size_t pos = findDelimiterPos(input, delimiter);

View File

@@ -26,7 +26,6 @@ struct HTMLInfo
class ClassFlow
{
protected:
// std::vector<string> ZerlegeZeile(string input);
std::vector<string> ZerlegeZeile(string input, string delimiter = " =, \t");
bool isNewParagraph(string input);
bool GetNextParagraph(FILE* pfile, string& aktparamgraph);

View File

@@ -197,33 +197,6 @@ int ClassFlowCNNGeneral::ZeigerEvalHybrid(float zahl, float zahl_vorgaenger, int
return ((int) trunc(zahl) + 10) % 10;
}
/*
int ClassFlowCNNGeneral::ZeigerEvalHybrid_NEU(float zahl, float zahl_vorgaenger)
{
int ergebnis_nachkomma = ((int) floor(zahl * 10) + 10) % 10;
int ergebnis_vorkomma = ((int) floor(zahl) + 10) % 10;
int ergebnis, ergebnis_rating;
if (zahl_vorgaenger < 0)
return ergebnis_vorkomma % 10;
ergebnis_rating = ergebnis_nachkomma - zahl_vorgaenger;
if (ergebnis_nachkomma >= 5)
ergebnis_rating-=5;
else
ergebnis_rating+=5;
ergebnis = (int) round(zahl);
if (ergebnis_rating < 0)
ergebnis-=1;
if (ergebnis == -1)
ergebnis+=10;
ergebnis = (ergebnis + 10) % 10;
return ergebnis;
}
*/
int ClassFlowCNNGeneral::ZeigerEval(float zahl, int ziffer_vorgaenger)
@@ -309,11 +282,12 @@ bool ClassFlowCNNGeneral::ReadParameter(FILE* pfile, string& aktparamgraph)
{
CNNGoodThreshold = std::stof(zerlegt[1]);
}
if ((toUpper(zerlegt[0]) == "MODELINPUTSIZE") && (zerlegt.size() > 2))
/* if ((toUpper(zerlegt[0]) == "MODELINPUTSIZE") && (zerlegt.size() > 2))
{
this->modelxsize = std::stoi(zerlegt[1]);
this->modelysize = std::stoi(zerlegt[2]);
}
*/
if (zerlegt.size() >= 5)
{
general* _analog = GetGENERAL(zerlegt[0], true);
@@ -334,11 +308,14 @@ bool ClassFlowCNNGeneral::ReadParameter(FILE* pfile, string& aktparamgraph)
}
}
if (!getNetworkParameter())
return false;
for (int _ana = 0; _ana < GENERAL.size(); ++_ana)
for (int _ana = 0; _ana < GENERAL.size(); ++_ana)
for (int i = 0; i < GENERAL[_ana]->ROI.size(); ++i)
{
GENERAL[_ana]->ROI[i]->image = new CImageBasis(modelxsize, modelysize, 3);
GENERAL[_ana]->ROI[i]->image = new CImageBasis(modelxsize, modelysize, modelchannel);
GENERAL[_ana]->ROI[i]->image_org = new CImageBasis(GENERAL[_ana]->ROI[i]->deltax, GENERAL[_ana]->ROI[i]->deltay, 3);
}
@@ -499,13 +476,11 @@ void ClassFlowCNNGeneral::DrawROI(CImageBasis *_zw)
}
}
bool ClassFlowCNNGeneral::doNeuralNetwork(string time)
bool ClassFlowCNNGeneral::getNetworkParameter()
{
if (disabled)
return true;
string logPath = CreateLogFolder(time);
CTfLiteClass *tflite = new CTfLiteClass;
string zwcnn = "/sdcard" + cnnmodelfile;
zwcnn = FormatFileName(zwcnn);
@@ -513,7 +488,6 @@ bool ClassFlowCNNGeneral::doNeuralNetwork(string time)
if (!tflite->LoadModel(zwcnn)) {
printf("Can't read model file /sdcard%s\n", cnnmodelfile.c_str());
LogFile.WriteToFile("Cannot load model");
delete tflite;
return false;
}
@@ -521,6 +495,11 @@ bool ClassFlowCNNGeneral::doNeuralNetwork(string time)
if (CNNType == AutoDetect)
{
tflite->GetInputDimension(false);
modelxsize = tflite->ReadInputDimenstion(0);
modelysize = tflite->ReadInputDimenstion(1);
modelchannel = tflite->ReadInputDimenstion(2);
int _anzoutputdimensions = tflite->GetAnzOutPut();
switch (_anzoutputdimensions)
{
@@ -549,6 +528,30 @@ bool ClassFlowCNNGeneral::doNeuralNetwork(string time)
}
}
delete tflite;
return true;
}
bool ClassFlowCNNGeneral::doNeuralNetwork(string time)
{
if (disabled)
return true;
string logPath = CreateLogFolder(time);
CTfLiteClass *tflite = new CTfLiteClass;
string zwcnn = "/sdcard" + cnnmodelfile;
zwcnn = FormatFileName(zwcnn);
printf(zwcnn.c_str());printf("\n");
if (!tflite->LoadModel(zwcnn)) {
printf("Can't read model file /sdcard%s\n", cnnmodelfile.c_str());
LogFile.WriteToFile("Cannot load model");
delete tflite;
return false;
}
tflite->MakeAllocate();
for (int _ana = 0; _ana < GENERAL.size(); ++_ana)
{
for (int i = 0; i < GENERAL[_ana]->ROI.size(); ++i)
@@ -581,14 +584,15 @@ bool ClassFlowCNNGeneral::doNeuralNetwork(string time)
if (isLogImage)
{
string _imagename = GENERAL[_ana]->name + "_" + GENERAL[_ana]->ROI[i]->name;
if (isLogImageSelect)
{
if (LogImageSelect.find(GENERAL[_ana]->ROI[i]->name) != std::string::npos)
LogImage(logPath, GENERAL[_ana]->ROI[i]->name, NULL, &GENERAL[_ana]->ROI[i]->result_klasse, time, GENERAL[_ana]->ROI[i]->image_org);
LogImage(logPath, _imagename, NULL, &GENERAL[_ana]->ROI[i]->result_klasse, time, GENERAL[_ana]->ROI[i]->image_org);
}
else
{
LogImage(logPath, GENERAL[_ana]->ROI[i]->name, NULL, &GENERAL[_ana]->ROI[i]->result_klasse, time, GENERAL[_ana]->ROI[i]->image_org);
LogImage(logPath, _imagename, NULL, &GENERAL[_ana]->ROI[i]->result_klasse, time, GENERAL[_ana]->ROI[i]->image_org);
}
}
} break;
@@ -617,7 +621,18 @@ bool ClassFlowCNNGeneral::doNeuralNetwork(string time)
if (debugdetailgeneral) LogFile.WriteToFile(_zwres);
if (isLogImage)
LogImage(logPath, GENERAL[_ana]->ROI[i]->name, &GENERAL[_ana]->ROI[i]->result_float, NULL, time, GENERAL[_ana]->ROI[i]->image_org);
{
string _imagename = GENERAL[_ana]->name + "_" + GENERAL[_ana]->ROI[i]->name;
if (isLogImageSelect)
{
if (LogImageSelect.find(GENERAL[_ana]->ROI[i]->name) != std::string::npos)
LogImage(logPath, _imagename, NULL, &GENERAL[_ana]->ROI[i]->result_klasse, time, GENERAL[_ana]->ROI[i]->image_org);
}
else
{
LogImage(logPath, _imagename, NULL, &GENERAL[_ana]->ROI[i]->result_klasse, time, GENERAL[_ana]->ROI[i]->image_org);
}
}
} break;
case DigitalHyprid10:
{
@@ -641,7 +656,18 @@ bool ClassFlowCNNGeneral::doNeuralNetwork(string time)
if (debugdetailgeneral) LogFile.WriteToFile(_zwres);
if (isLogImage)
LogImage(logPath, GENERAL[_ana]->ROI[i]->name, &GENERAL[_ana]->ROI[i]->result_float, NULL, time, GENERAL[_ana]->ROI[i]->image_org);
{
string _imagename = GENERAL[_ana]->name + "_" + GENERAL[_ana]->ROI[i]->name;
if (isLogImageSelect)
{
if (LogImageSelect.find(GENERAL[_ana]->ROI[i]->name) != std::string::npos)
LogImage(logPath, _imagename, NULL, &GENERAL[_ana]->ROI[i]->result_klasse, time, GENERAL[_ana]->ROI[i]->image_org);
}
else
{
LogImage(logPath, _imagename, NULL, &GENERAL[_ana]->ROI[i]->result_klasse, time, GENERAL[_ana]->ROI[i]->image_org);
}
}
} break;
case DoubleHyprid10:
@@ -649,6 +675,7 @@ bool ClassFlowCNNGeneral::doNeuralNetwork(string time)
int _num, _numplus, _numminus;
float _val, _valplus, _valminus;
float _fit;
float _result_save_file;
tflite->LoadInputImageBasis(GENERAL[_ana]->ROI[i]->image);
tflite->Invoke();
@@ -680,10 +707,13 @@ bool ClassFlowCNNGeneral::doNeuralNetwork(string time)
if (result < 0)
result = result + 10;
_result_save_file = result;
if (_fit < CNNGoodThreshold)
{
GENERAL[_ana]->ROI[i]->isReject = true;
result = -1;
_result_save_file+= 100; // Für den Fall, dass fit nicht ausreichend, soll trotzdem das Ergebnis mit "-10x.y" abgespeichert werden.
string zw = "Value Rejected due to Threshold (Fit: " + to_string(_fit) + "Threshold: " + to_string(CNNGoodThreshold);
printf("Value Rejected due to Threshold (Fit: %f, Threshold: %f\n", _fit, CNNGoodThreshold);
LogFile.WriteToFile(zw);
@@ -693,9 +723,23 @@ bool ClassFlowCNNGeneral::doNeuralNetwork(string time)
GENERAL[_ana]->ROI[i]->isReject = false;
}
GENERAL[_ana]->ROI[i]->result_float = result;
printf("Result General(Analog)%i: %f\n", i, GENERAL[_ana]->ROI[i]->result_float);
if (isLogImage)
{
string _imagename = GENERAL[_ana]->name + "_" + GENERAL[_ana]->ROI[i]->name;
if (isLogImageSelect)
{
if (LogImageSelect.find(GENERAL[_ana]->ROI[i]->name) != std::string::npos)
LogImage(logPath, _imagename, &_result_save_file, NULL, time, GENERAL[_ana]->ROI[i]->image_org);
}
else
{
LogImage(logPath, _imagename, &_result_save_file, NULL, time, GENERAL[_ana]->ROI[i]->image_org);
}
}
}
break;

View File

@@ -24,7 +24,7 @@ protected:
float CNNGoodThreshold;
string cnnmodelfile;
int modelxsize, modelysize;
int modelxsize, modelysize, modelchannel;
bool isLogImageSelect;
string LogImageSelect;
ClassFlowAlignment* flowpostalignment;
@@ -39,6 +39,8 @@ protected:
bool doNeuralNetwork(string time);
bool doAlignAndCut(string time);
bool getNetworkParameter();
public:
ClassFlowCNNGeneral(ClassFlowAlignment *_flowalign, t_CNNType _cnntype = AutoDetect);

View File

@@ -37,6 +37,7 @@ struct NumberPost {
float PreValue; // letzter Wert, der gut ausgelesen wurde
float Value; // letzer ausgelesener Wert, inkl. Korrekturen
string ReturnRateValue; // RückgabewertRate
string ReturnChangeAbsolute; // RückgabewertRate
string ReturnRawValue; // Rohwert (mit N & führenden 0)
string ReturnValue; // korrigierter Rückgabewert, ggf. mit Fehlermeldung
string ReturnPreValue; // korrigierter Rückgabewert ohne Fehlermeldung

View File

@@ -149,6 +149,7 @@ bool ClassFlowMQTT::doFlow(string zwtime)
std::string resultraw = "";
std::string resultrate = "";
std::string resulttimestamp = "";
std::string resultchangabs = "";
string zw = "";
string namenumber = "";
@@ -180,6 +181,7 @@ bool ClassFlowMQTT::doFlow(string zwtime)
resultraw = (*NUMBERS)[i]->ReturnRawValue;
resulterror = (*NUMBERS)[i]->ErrorMessageText;
resultrate = (*NUMBERS)[i]->ReturnRateValue;
resultchangabs = (*NUMBERS)[i]->ReturnChangeAbsolute;
resulttimestamp = (*NUMBERS)[i]->timeStamp;
namenumber = (*NUMBERS)[i]->name;
@@ -200,6 +202,10 @@ bool ClassFlowMQTT::doFlow(string zwtime)
if (resultrate.length() > 0)
MQTTPublish(zw, resultrate, SetRetainFlag);
zw = namenumber + "changeabsolut";
if (resultchangabs.length() > 0)
MQTTPublish(zw, resultchangabs, SetRetainFlag);
zw = namenumber + "raw";
if (resultraw.length() > 0)
MQTTPublish(zw, resultraw, SetRetainFlag);

View File

@@ -77,6 +77,8 @@ void ClassFlowPostProcessing::SetPreValue(float zw, string _numbers, bool _exter
if (NUMBERS[j]->name == _numbers)
{
NUMBERS[j]->PreValue = zw;
NUMBERS[j]->ReturnPreValue = std::to_string(zw);
NUMBERS[j]->PreValueOkay = true;
if (_extern)
{
time(&(NUMBERS[j]->lastvalue));
@@ -541,7 +543,6 @@ void ClassFlowPostProcessing::InitNUMBERS()
_number->ReturnRawValue = ""; // Rohwert (mit N & führenden 0)
_number->ReturnValue = ""; // korrigierter Rückgabewert, ggf. mit Fehlermeldung
// _number->ReturnValueNoError = ""; // korrigierter Rückgabewert ohne Fehlermeldung
_number->ErrorMessageText = ""; // Fehlermeldung bei Consistency Check
_number->ReturnPreValue = "";
_number->PreValueOkay = false;
@@ -560,7 +561,6 @@ void ClassFlowPostProcessing::InitNUMBERS()
_number->Value = 0; // letzer ausgelesener Wert, inkl. Korrekturen
_number->ReturnRawValue = ""; // Rohwert (mit N & führenden 0)
_number->ReturnValue = ""; // korrigierter Rückgabewert, ggf. mit Fehlermeldung
// _number->ReturnValueNoError = ""; // korrigierter Rückgabewert ohne Fehlermeldung
_number->ErrorMessageText = ""; // Fehlermeldung bei Consistency Check
_number->Nachkomma = _number->AnzahlAnalog;
@@ -722,7 +722,7 @@ bool ClassFlowPostProcessing::doFlow(string zwtime)
if (NUMBERS[j]->useMaxRateValue && PreValueUse && NUMBERS[j]->PreValueOkay)
{
float _ratedifference;
float _ratedifference;
if (NUMBERS[j]->RateType == RateChange)
_ratedifference = NUMBERS[j]->FlowRateAct;
else
@@ -745,6 +745,7 @@ bool ClassFlowPostProcessing::doFlow(string zwtime)
NUMBERS[j]->ReturnValue = RundeOutput(NUMBERS[j]->Value, NUMBERS[j]->Nachkomma);
NUMBERS[j]->ReturnPreValue = RundeOutput(NUMBERS[j]->PreValue, NUMBERS[j]->Nachkomma);
NUMBERS[j]->ReturnChangeAbsolute = RundeOutput(NUMBERS[j]->Value - NUMBERS[j]->PreValue, NUMBERS[j]->Nachkomma);
NUMBERS[j]->ErrorMessageText = "no error";
UpdatePreValueINI = true;