commit 1bf1657b932aa2825977696b3c3fb068b5533bbc Author: Anton Romanov Date: Tue Dec 13 12:36:06 2022 +0400 initial diff --git a/Debug/project_template.exe b/Debug/project_template.exe new file mode 100644 index 0000000..036f777 Binary files /dev/null and b/Debug/project_template.exe differ diff --git a/Debug/project_template.ilk b/Debug/project_template.ilk new file mode 100644 index 0000000..59b5ba4 Binary files /dev/null and b/Debug/project_template.ilk differ diff --git a/Debug/project_template.pdb b/Debug/project_template.pdb new file mode 100644 index 0000000..fa6a972 Binary files /dev/null and b/Debug/project_template.pdb differ diff --git a/Debug/test/1.txt b/Debug/test/1.txt new file mode 100644 index 0000000..e48db0e --- /dev/null +++ b/Debug/test/1.txt @@ -0,0 +1,51 @@ +5520 +3940 +4490 +5030 +5660 +4790 +5520 +5560 +5200 +6670 +5900 +5280 +6490 +5560 +5090 +7090 +6570 +5890 +6640 +6360 +5640 +6630 +5330 +5540 +7100 +5410 +5800 +6110 +6870 +6560 +6120 +6540 +5810 +6300 +6270 +6900 +6310 +5330 +5700 +6680 +5510 +6690 +5870 +7140 +6680 +6520 +6020 +6190 +6690 +6330 +7620 diff --git a/Debug/test/1.txtresult/15out b/Debug/test/1.txtresult/15out new file mode 100644 index 0000000..c520e5c --- /dev/null +++ b/Debug/test/1.txtresult/15out @@ -0,0 +1,30 @@ +5780 +5780 +5780 +5780 +5780 +5780 +4860 +5780 +5780 +5780 +4860 +5780 +5780 +5780 +5780 +5780 +6700 +5780 +5780 +5166,67 +6393,33 +5780 +5780 +5166,67 +5780 +5780 +5780 +6393,33 +5780 +6393,33 diff --git a/Debug/test/1.txtresult/smape b/Debug/test/1.txtresult/smape new file mode 100644 index 0000000..f0a1b7e --- /dev/null +++ b/Debug/test/1.txtresult/smape @@ -0,0 +1 @@ +Smape for 15 method: 11.2282 alpha: 0 delta 0 gamma: 0 phi: 0 countRulesIn: 1 countFuzzyParts: 2 sizeLabels: 1 p: 1 diff --git a/Release/project_template.exe b/Release/project_template.exe new file mode 100644 index 0000000..e7f6c33 Binary files /dev/null and b/Release/project_template.exe differ diff --git a/Release/project_template.pdb b/Release/project_template.pdb new file mode 100644 index 0000000..25552c0 Binary files /dev/null and b/Release/project_template.pdb differ diff --git a/libnlopt-0.dll b/libnlopt-0.dll new file mode 100644 index 0000000..1d63e4b Binary files /dev/null and b/libnlopt-0.dll differ diff --git a/libnlopt-0.lib b/libnlopt-0.lib new file mode 100644 index 0000000..8caa00f Binary files /dev/null and b/libnlopt-0.lib differ diff --git a/project_template.ncb b/project_template.ncb new file mode 100644 index 0000000..6ca2984 Binary files /dev/null and b/project_template.ncb differ diff --git a/project_template.sln b/project_template.sln new file mode 100644 index 0000000..e37b7d2 --- /dev/null +++ b/project_template.sln @@ -0,0 +1,26 @@ + +Microsoft Visual Studio Solution File, Format Version 10.00 +# Visual Studio 2008 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "project_template", "project_template\project_template.vcproj", "{D68B91E2-1169-4096-AC00-5992226D29DF}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Элементы решения", "Элементы решения", "{33699718-6D46-4936-8061-0EF2D6A27D62}" + ProjectSection(SolutionItems) = preProject + C:\Users\orion\Desktop\project_template.vsd = C:\Users\orion\Desktop\project_template.vsd + project_template.vsd = project_template.vsd + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Win32 = Debug|Win32 + Release|Win32 = Release|Win32 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {D68B91E2-1169-4096-AC00-5992226D29DF}.Debug|Win32.ActiveCfg = Debug|Win32 + {D68B91E2-1169-4096-AC00-5992226D29DF}.Debug|Win32.Build.0 = Debug|Win32 + {D68B91E2-1169-4096-AC00-5992226D29DF}.Release|Win32.ActiveCfg = Release|Win32 + {D68B91E2-1169-4096-AC00-5992226D29DF}.Release|Win32.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/project_template.suo b/project_template.suo new file mode 100644 index 0000000..7af92a1 Binary files /dev/null and b/project_template.suo differ diff --git a/project_template.vsd b/project_template.vsd new file mode 100644 index 0000000..5219f30 Binary files /dev/null and b/project_template.vsd differ diff --git a/project_template/A.cpp b/project_template/A.cpp new file mode 100644 index 0000000..410091d --- /dev/null +++ b/project_template/A.cpp @@ -0,0 +1,35 @@ +#include "StdAfx.h" +#include "A.h" + +A::A() { +} + +A::A(double left, double right) { + this->left = left; + this->right = right; +} + + double A::getValue(double value) { + if (value == (right + left) / 2) { + return 1; + } else if ((value >= right) || (value <= left)) { + return 0; + } else if (value < (right + left) / 2) { + return (double)(value - left) / ((right + left) / 2 - left); + } else if (value > (right + left) / 2) { + return (double) -(value - right) / (right - (right + left) / 2); + } + + } + + double A::getValueAtTop() { + return (right + left) / 2; + } + + double A::getLeft() { + return left; + } + + double A::getRight() { + return right; + } diff --git a/project_template/A.h b/project_template/A.h new file mode 100644 index 0000000..b59a863 --- /dev/null +++ b/project_template/A.h @@ -0,0 +1,20 @@ +#ifndef A_H +#define A_H + +//using namespace std; + +// , +class A { +private: + double left; // + double right; // +public: + + A(); + A(double, double); + double getLeft(); + double getRight(); + double getValue(double); // + double getValueAtTop(); // = 1 +}; +#endif \ No newline at end of file diff --git a/project_template/AddTrendAddSeasonality.cpp b/project_template/AddTrendAddSeasonality.cpp new file mode 100644 index 0000000..51d5a12 --- /dev/null +++ b/project_template/AddTrendAddSeasonality.cpp @@ -0,0 +1,142 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include "AddTrendAddSeasonality.h" +#include "Param.h" + +// +AddTrendAddSeasonality::AddTrendAddSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +AddTrendAddSeasonality::~AddTrendAddSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(x); + std::vector ().swap(T); + std::vector ().swap(I); + std::vector ().swap(forecast); +} + +// , +void AddTrendAddSeasonality::init() { + S.clear(); + T.clear(); + I.clear(); + forecast.clear(); + + double sumS = 0; + double sumT = 0; + for (unsigned int t = 0; t < p; t++) { + sumS += x[t]; + sumT += x[t+p]; + } + + S.push_back(sumS / p); + T.push_back((sumT/ p - S[0]) / p); + + for (unsigned int t = 0; t < p; t++) { + I.push_back(x[t] - S[0]); + } + + forecast.push_back(S[0] + T[0] + I[0]); + +} + +// +void AddTrendAddSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } else if (paramName.compare("p") == 0) { + this->p = value; + } else if (paramName.compare("delta") == 0) { + this->delta = value; + } + +} + + +// +void AddTrendAddSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 + this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + T[t] + alpha * e); // + T.push_back(T[t] + alpha * gamma * e); // + I.push_back(I[t] + delta * e); // + forecast.push_back(S[t+1] + T[t+1] + I[t+1]); // + } +} + + +// +void AddTrendAddSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + T[t] + alpha * e); // + T.push_back(T[t] + alpha * gamma * e); // + I.push_back(I[t] + delta * e); // + forecast.push_back(S[t+1] + T[t+1] + I[t+1]); // + } +} + + +// +vector AddTrendAddSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +// +double AddTrendAddSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} + +// +// TODO: +Param* AddTrendAddSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.01) { + for (double del = 0.1; del < 1; del+= 0.01) { + this->setParam("alpha", al); + this->setParam("delta", del); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + } + } + } + return optimal; +} diff --git a/project_template/AddTrendAddSeasonality.h b/project_template/AddTrendAddSeasonality.h new file mode 100644 index 0000000..efd1ea5 --- /dev/null +++ b/project_template/AddTrendAddSeasonality.h @@ -0,0 +1,35 @@ +#ifndef ADDTRENDADDSEASONALITY_H +#define ADDTRENDADDSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + +using namespace std; + +// Method +// , +class AddTrendAddSeasonality : public Method { +public: + double alpha; // + vector S; // + double gamma; // + double delta; // + vector T; // + vector I; // + int p; // + + AddTrendAddSeasonality(vector, int); + ~AddTrendAddSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // + vector getS(); +}; + +#endif \ No newline at end of file diff --git a/project_template/AddTrendMultSeasonality.cpp b/project_template/AddTrendMultSeasonality.cpp new file mode 100644 index 0000000..c46ca9d --- /dev/null +++ b/project_template/AddTrendMultSeasonality.cpp @@ -0,0 +1,145 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include "AddTrendMultSeasonality.h" +#include "Param.h" + +// +AddTrendMultSeasonality::AddTrendMultSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +AddTrendMultSeasonality::~AddTrendMultSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(x); + std::vector ().swap(T); + std::vector ().swap(I); + std::vector ().swap(forecast); +} + +// , +void AddTrendMultSeasonality::init() { + S.clear(); + T.clear(); + I.clear(); + forecast.clear(); + + double sumS = 0; + double sumT = 0; + for (unsigned int t = 0; t < p; t++) { + sumS += x[t]; + sumT += x[t+p]; + } + + S.push_back(sumS / p); + T.push_back((sumT/ p - S[0]) / p); + + for (unsigned int t = 0; t < p; t++) { + I.push_back(x[t] / S[0]); + } + + forecast.push_back((S[0] + T[0]) * I[0]); +} + +// +void AddTrendMultSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } else if (paramName.compare("p") == 0) { + this->p = value; + } else if (paramName.compare("delta") == 0) { + this->delta = value; + } + +} + +// +void AddTrendMultSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 + this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + T[t] + alpha * e / I[t]); // + T.push_back(T[t] + alpha * gamma * e / I[t]); // + I.push_back(I[t] + delta * e / (S[t] + T[t])); // + forecast.push_back((S[t+1] + T[t+1]) * I[t+1]); // + } +} + +// . +void AddTrendMultSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 +this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + T[t] + alpha * e / I[t]); // + T.push_back(T[t] + alpha * gamma * e / I[t]); // + I.push_back(I[t] + delta * e / (S[t] + T[t])); // + forecast.push_back((S[t+1] + T[t+1]) * I[t+1]); // + + } +} + + +// +vector AddTrendMultSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +// +double AddTrendMultSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} +// +// TODO: +Param* AddTrendMultSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.01) { + for (double gam = 0.1; gam < 1; gam+= 0.01) { + for (double del = 0.1; del < 1;del+= 0.01) { + this->setParam("alpha", al); + this->setParam("gamma", gam); + this->setParam("delta", del); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->gamma = gam; + optimal->delta = del; + } + } + } + } + return optimal; +} + diff --git a/project_template/AddTrendMultSeasonality.h b/project_template/AddTrendMultSeasonality.h new file mode 100644 index 0000000..a4109f8 --- /dev/null +++ b/project_template/AddTrendMultSeasonality.h @@ -0,0 +1,34 @@ +#ifndef ADDTRENDMULTSEASONALITY_H +#define ADDTRENDMULTSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + +using namespace std; + +// Method +// , +class AddTrendMultSeasonality : public Method { +public: + double alpha; // + vector S; // + double gamma; // + double delta; // + vector T; // + vector I; // + int p; // + + AddTrendMultSeasonality(vector, int); + ~AddTrendMultSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // +}; + +#endif \ No newline at end of file diff --git a/project_template/AddTrendNoSeasonality.cpp b/project_template/AddTrendNoSeasonality.cpp new file mode 100644 index 0000000..3f05244 --- /dev/null +++ b/project_template/AddTrendNoSeasonality.cpp @@ -0,0 +1,126 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include "AddTrendNoSeasonality.h" +#include "Aic.h" +#include "Param.h" + +// +AddTrendNoSeasonality::AddTrendNoSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); + this->alpha = 0; + this->gamma = 0; +} + +AddTrendNoSeasonality::~AddTrendNoSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(x); + std::vector ().swap(T); + std::vector ().swap(forecast); +} + +// +void AddTrendNoSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } +} + + +// , +void AddTrendNoSeasonality::init() { + S.clear(); + T.clear(); + forecast.clear(); + + T.push_back(x[1] - x[0]); + S.push_back(x[0]); + forecast.push_back(S[0] + T[0]); +} + +// +void AddTrendNoSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + T[t] + alpha * e); // + T.push_back(T[t] + alpha * gamma * e); // + forecast.push_back(S[t+1] + T[t+1]); // + } +} + + +// +void AddTrendNoSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + T[t] + alpha * e); // + T.push_back(T[t] + alpha * gamma * e); // + forecast.push_back(S[t+1] + T[t+1]); // + } +} + + +// +vector AddTrendNoSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +// +double AddTrendNoSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} + + +// +// TODO: +Param* AddTrendNoSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.01) { + for (double gam = 0.1; gam < 1; gam+= 0.01) { + this->setParam("alpha", al); + this->setParam("gamma", gam); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->gamma = gam; + } + } + } + return optimal; +} + diff --git a/project_template/AddTrendNoSeasonality.h b/project_template/AddTrendNoSeasonality.h new file mode 100644 index 0000000..f4f2164 --- /dev/null +++ b/project_template/AddTrendNoSeasonality.h @@ -0,0 +1,31 @@ +#ifndef ADDTRENDNOSEASONALITY_H +#define ADDTRENDNOSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + +using namespace std; + +// Method +// , +class AddTrendNoSeasonality : public Method { +public: + double alpha; // + vector S; // + double gamma; // + vector T; // + + AddTrendNoSeasonality(vector, int); + ~AddTrendNoSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // +}; + +#endif \ No newline at end of file diff --git a/project_template/Aic.cpp b/project_template/Aic.cpp new file mode 100644 index 0000000..bb473e2 --- /dev/null +++ b/project_template/Aic.cpp @@ -0,0 +1,202 @@ +#include "StdAfx.h" +#include +#include +#include "Aic.h" +# define M_PI 3.14159265358979323846 /* pi */ + +using namespace std; + +// критерий Акаике. Наследуется от класса Estimation. +// реализует метод "получить значение критерия" + + +Aic::Aic() { +} + +Aic::~Aic(){ +} +/* +paramsCount - count of model params, +количество параетров модели +tsLen - lenght of forecasted estimated time seria, +длина оцениваемого спрогнозированного временного ряда +isSecondOrder - AIC second order modification, +модифицированный АИК второго порядка +original - values ​​of the predicted time series, +известные значения прогнозируемого временного ряда +model - forecasted values ​​of the predicted time series, +прогнозированные значения прогнозируемого временного ряда +*/ +//Надо перенести в интерфейс модели, логично если модели будут в последствии +//разных типов +//Функция правдоподобия для не линейной модели +double Aic::logLikNLS(vector forecast, vector weights){ + int n = forecast.size(); + if (weights.size() == 0){ + for (int i = 0; i < n; i++){ + weights.push_back(1); + } + } + //summ log(w) + double wls = 0; + //wetghted forecasts + double wfor = 0; + for (int i = 0; i < n; i++){ + //zero weights hack + if (weights[i] != 0){ + wls += log(weights[i]); + } + else{ + wls += log((double)1); + } + wfor += weights[i] * forecast[i] * forecast[i]; + } + double res = -n * (log((double)2 * M_PI) + 1 - log((double)n) - wls + log((double)wfor)) / 2; + return res; +} +//Функция правдоподобия для линейной модели +double Aic::logLikLM(vector forecast, vector weights){ + int n = forecast.size(); + if (weights.size() == 0){ + for (int i = 0; i < n; i++){ + weights.push_back(1); + } + } + else{ + int i = 0; + while (i < n){ + if (weights[i] == 0){ + forecast.erase(forecast.begin() + i); + weights.erase(weights.begin() + i); + n = forecast.size(); + } + i++; + } + } + //summ log(w) + double wls = 0; + //wetghted forecasts + + double wfor = 0; + for (int i = 0; i < n; i++){ + wls += log(weights[i]); + wfor += weights[i] * forecast[i] * forecast[i]; + } + double res = 0.5 * (wls - n * (log((double)2 * M_PI) + 1 - log((double)n) + log((double)wfor))); + return res; +} + +//Свойство класса модели +int Aic::getParamsCount(vector model){ +#pragma message ("Need solve where is it must do") + cout << "log-likelihood function calculate"; + return 0; +} + + +/* +models with special methods: +glm, multinom ++- maxlikeFit, merMod, unmarkedFit, vglm +*/ +double Aic::getBicValue(int paramsCount, + vector original, vector model){ + int n = (original.size() > model.size() ? model.size() : original.size()); + double res = 0; + double lik = 0; + double lik2 = 0; + vector e; + cout << "Bicc " << endl; + cout << " e sum(lik) sum(lik2)" << endl; + for (int i = 0; i < n; i++){ + //adderror + e.push_back(original[i] - model[i]); + cout << " " << e[i]; + lik += e[i] * e[i]; + cout << " " << lik; + lik2 += log(fabs(model[i])); + cout << " " << lik2 << endl; + // MULT lik += (original[i] - model[i])/ model[i]; + } + lik = n * log(lik); + //MULT lik +=2 *lik2; + res = lik + log((double)n) * paramsCount; + cout << " " << res << " " << endl; + return res; +} +double Aic::getAiccValue(int paramsCount, + vector original, vector model){ + int n = (original.size() > model.size() ? model.size() : original.size()); + double res = 0; + double lik = 0; + double lik2 = 0; + vector e; + cout << "Aicc " << endl; + cout << " e sum(lik) sum(lik2)" << endl; + for (int i = 0; i < n; i++){ + //adderror + e.push_back(original[i] - model[i]); + cout << " " << e[i]; + lik += e[i] * e[i]; + cout << " " << lik; + lik2 += log(fabs(model[i])); + cout << " " << lik2 << endl; + // MULT lik += (original[i] - model[i])/ model[i]; + } + lik = n * log(lik); + //MULT lik +=2 *lik2; + res = lik + (2 * paramsCount* n)/( n - paramsCount - 1); + cout << " " << res << " " << endl; + return res; +} +/* +valide for next models by package: +aov, clm, clmm, coxme, coxph, gls, lm, lme, lmeik, mer, merMod, +multinom, nlme, nls, polr, rlm, zeroinfl +*/ +double Aic::getValue(int paramsCount, // bool isSecondOrder, + vector original, vector model) { + //Validate(original, model); + int n = (original.size() > model.size() ? model.size() : original.size()); + ////// R default package + //double logLik = Aic::logLikNLS(model, RSSweights(original, model));//Aic::logLikLM(model, RSSweights(original, model)); + ////R-package version + //if (true){//isSecondOrder){ + // return -2 * logLik + 2 * paramsCount; + //} + //else + //{ + // return -2 * logLik + 2 * paramsCount *(n / (n - paramsCount - 1)); + //} + //lm version from paper + /*double rss = RSS(original, model); + double res = 2 * paramsCount + n * log(rss / (n - 2)); + return res;*/ + + ///forecast 5.6 R package + double res = 0; + double lik = 0; + double lik2 = 0; + vector e; + //cout << "Aic " << endl; + //cout << " e sum(lik) sum(lik2)" << endl; + for (int i = 0; i < n; i++){ + //adderror + e.push_back(original[i] - model[i]); + //cout << " " << e[i]; + lik += e[i] * e[i]; + //cout << " " << lik; + lik2 += log(fabs(model[i])); + //cout << " " << lik2 << endl; + // MULT lik += (original[i] - model[i])/ model[i]; + } + lik = n * log(lik); + //MULT lik +=2 *lik2; + res = lik + 2 * paramsCount; + //cout << " " << res << " " << endl; + return res; +} + +double Aic::getValue(vector original, vector model) { + return this->getValue(4, original, model); +} diff --git a/project_template/Aic.h b/project_template/Aic.h new file mode 100644 index 0000000..e88248d --- /dev/null +++ b/project_template/Aic.h @@ -0,0 +1,40 @@ +#ifndef AIC_H +#define AIC_H +#include "Estimation.h" +#include + +using namespace std; + +// . Estimation. +// " " +class Aic : public Estimation { +private: + static double getMinValue(vector AicWeights); // + //For fuzzy adoptive weights calculate + //Delta koeff + static double delta; + static double N; + static double relativeError(double value, double forecastedValue); + static double forecastedTendency(vector realSeria); + static double grayTendency(double forecastedTendency, vector timeSeria, vector> forecastedSeries); + static double grayBasicWeight(double relativeError, vector timeSeria, vector> forecastedSeries); + static double adaptiveControlCoefficient(int i); + static double beta(vector timeSeria, vector> forecastedSeries); + static double alpha(vector timeSeria, vector> forecastedSeries); +public: + Aic(); + ~Aic(); + static int getParamsCount(vector model); + static double logLikNLS(vector model, vector weights); + static double logLikLM(vector model, vector weights); + static vector calculateWeights(vector AicWeights); // + static vector calculateFuzzyAdaptiveWeights(vector timeSeria, vector> forecastedSeries); // + double getBicValue(int paramsCount, vector original, vector model); + double getAiccValue(int paramsCount, vector original, vector model); + double getValue(int paramsCount,// int tsLen, bool isSecondOrder, + vector original, vector model); // + double getValue(// int tsLen, bool isSecondOrder, + vector original, vector model); // + +}; +#endif \ No newline at end of file diff --git a/project_template/AicWeights.cpp b/project_template/AicWeights.cpp new file mode 100644 index 0000000..f801b01 --- /dev/null +++ b/project_template/AicWeights.cpp @@ -0,0 +1,126 @@ +#include "StdAfx.h" +#include +#include +#include "AicWeights.h" + +using namespace std; + +AicWeights::AicWeights(){ + delta = 0.5; + N = 0.5; +} + +AicWeights::~AicWeights(){ +} + +double AicWeights::getMinValue(vector AicValues){ + double res = AicValues[0]; + for (int i = 0; i < AicValues.size(); i++){ + if (AicValues[i] < res){ + res = AicValues[i]; + } + } + return res; +} + +vector AicWeights::calculateWeights(vector AicValues){ + vector diffs; + double minWeight = getMinValue(AicValues); + double sum = 0; + for (int i = 0; i < AicValues.size(); i++){ + diffs.push_back(AicValues[i] - minWeight); + sum += exp(-0.5 * (AicValues[i] - minWeight)); + } + vector res; + for (int i = 0; i < AicValues.size(); i++){ + res.push_back(exp(-0.5 *diffs[i]) / sum); + } + double check = 0; + for (int i = 0; i < res.size(); i++){ + check += res[i]; + } + return res; +} + +//timeSeria - Real process time seria value vector. +//forecastedSeries - Vector of vectors of the forecasted values. +//Inner vectors must have the same length that timeSeria. Usually need cut timeSeria. +double AicWeights::alpha(vector timeSeria, vector> forecastedSeries){ + double alphaRes = abs(timeSeria[0] - forecastedSeries[0][0]); + for (int j = 0; j < forecastedSeries.size(); j++){ + for (int i = 0; i < forecastedSeries[j].size(); i++){ + if (alphaRes < abs(timeSeria[i] - forecastedSeries[j][i])){ + alphaRes = abs(timeSeria[i] - forecastedSeries[j][i]); + } + } + } + return alphaRes; +} + +double AicWeights::beta(vector timeSeria, vector> forecastedSeries){ + double betaRes = abs(timeSeria[0] - forecastedSeries[0][0]); + for (int j = 0; j < forecastedSeries.size(); j++){ + for (int i = 0; i < forecastedSeries[j].size(); i++){ + if (betaRes > abs(timeSeria[i] - forecastedSeries[j][i])){ + betaRes = abs(timeSeria[i] - forecastedSeries[j][i]); + } + } + } + return betaRes; +} + +double AicWeights::grayBasicWeight(double relativeError, vector timeSeria, vector> forecastedSeries){ + return (alpha(timeSeria, forecastedSeries) + beta(timeSeria, forecastedSeries) * delta) / abs(relativeError) + + beta(timeSeria, forecastedSeries) * delta; +} + +double AicWeights::grayTendency(double forecastedTendency, vector timeSeria, vector> forecastedSeries){ + + return (alpha(timeSeria, forecastedSeries) + beta(timeSeria, forecastedSeries) * delta) / abs(forecastedTendency) + + beta(timeSeria, forecastedSeries) * delta; +} + +double AicWeights::delta; +double AicWeights::N; +//e(i) +double AicWeights::relativeError(double value, double forecastedValue){ + return (value - forecastedValue) / value; +} + +//c(i) +double AicWeights::forecastedTendency(vector realSeria){ + double sum = 0; + for (int i = 0; i < realSeria.size() - 1; i++){ + sum += realSeria[i]; + } + return (realSeria[realSeria.size() - 1] - (1 / (realSeria.size() - 2)) * sum) / realSeria[realSeria.size() - 1]; +} + +//n - positive number,usually n = 0.5 +//i - forecasted value number +double AicWeights::adaptiveControlCoefficient(int i){ + double acc = 0; + acc = 1 - pow(((i - 1) / i), N); + return acc; +} + +vector AicWeights::calculateFuzzyAdaptiveWeights(vector timeSeria, vector> forecastedSeries){ + vector fweights; + for (int i = 0; i < forecastedSeries.size(); i++){ + fweights.push_back(adaptiveControlCoefficient(timeSeria.size())* grayBasicWeight(relativeError(timeSeria[timeSeria.size() - 1], forecastedSeries[i][forecastedSeries[i].size() - 1]), timeSeria, forecastedSeries) + + (1 - adaptiveControlCoefficient(timeSeria.size())) * grayTendency(forecastedTendency(timeSeria), timeSeria, forecastedSeries)); + } + //normalization + double sum = 0; + for (int i = 0; i < fweights.size(); i++){ + sum += fweights[i]; + } + for (int i = 0; i < fweights.size(); i++){ + fweights[i] = fweights[i] / sum; + } + double check = 0; + for (int i = 0; i < fweights.size(); i++){ + check += fweights[i]; + } + return fweights; +} diff --git a/project_template/AicWeights.h b/project_template/AicWeights.h new file mode 100644 index 0000000..84df8a1 --- /dev/null +++ b/project_template/AicWeights.h @@ -0,0 +1,29 @@ +#ifndef AICWEIGHTS_H +#define AICWEIGHTS_H +#include + +using namespace std; + +// . Estimation. +// " " +class AicWeights { +private: + static double getMinValue(vector AicWeights); // + //For fuzzy adoptive weights calculate + //Delta koeff + static double delta; + static double N; + static double relativeError(double value, double forecastedValue); + static double forecastedTendency(vector realSeria); + static double grayTendency(double forecastedTendency, vector timeSeria, vector> forecastedSeries); + static double grayBasicWeight(double relativeError, vector timeSeria, vector> forecastedSeries); + static double adaptiveControlCoefficient(int i); + static double beta(vector timeSeria, vector> forecastedSeries); + static double alpha(vector timeSeria, vector> forecastedSeries); +public: + AicWeights(); + ~AicWeights(); + static vector calculateWeights(vector AicWeights); // + static vector calculateFuzzyAdaptiveWeights(vector timeSeria, vector> forecastedSeries); // +}; +#endif \ No newline at end of file diff --git a/project_template/BIC.cpp b/project_template/BIC.cpp new file mode 100644 index 0000000..1a7e12b --- /dev/null +++ b/project_template/BIC.cpp @@ -0,0 +1,104 @@ +#include "StdAfx.h" +#include +#include "sMAPE.h" +#include "RMSE.h" +#include "BIC.h" +#include "Math.h" +#include + + +Bic::Bic(algoritm versionBIC, int paramCount) { + pc = paramCount; + this->versionBIC = versionBIC; +} + +Bic::~Bic() { +} + + +double Bic::getValue(vector original, vector model) { + double valueBIC; + + switch (versionBIC) + { + case algoritm::Classic: + valueBIC = classicBIC(original, model, pc); + break; + case algoritm::RMSE: + valueBIC = rmseBIC(original, model, pc); + break; + case algoritm::sMAPE: + valueBIC = smapeBIC(original, model, pc); + break; + } + + return valueBIC; +} + +double Bic::getValue(int paramCount, vector original, vector model) { + + double valueBIC; + + switch (versionBIC) + { + case algoritm::Classic: + valueBIC = classicBIC(original, model, paramCount); + break; + case algoritm::RMSE: + valueBIC = rmseBIC(original, model, paramCount); + break; + case algoritm::sMAPE: + valueBIC = smapeBIC(original, model, paramCount); + break; + } + + return valueBIC; +} + + +double Bic::classicBIC(vector original, vector model, int paramCount) +{ + + double valueBIC; + + double sumLik = 0; + double logLik; + int sampleSize = original.size(); + + for (int i = 0; i < sampleSize; i++) { + sumLik += pow(original[i] - model[i], 2); + } + + logLik = log(sumLik) * sampleSize; + + valueBIC = logLik + log((double)sampleSize) * paramCount; + + return valueBIC; +} + +double Bic::rmseBIC(vector original, vector model, int paramCount) +{ + double valueBIC; + int sampleSize = original.size(); + + Estimation *RMSE = new Rmse(); + double valueRMSE = RMSE->getValue(original, model); + + valueBIC = valueRMSE * pow((double)sampleSize, paramCount / (2 * sampleSize)); + + return valueBIC; + +} + +double Bic::smapeBIC(vector original, vector model, int paramCount) +{ + double valueBIC; + int sampleSize = original.size(); + + Estimation *sMAPE = new SMape(); + double value_sMAPE = sMAPE->getValue(original, model); + + valueBIC = value_sMAPE * pow((double)sampleSize, paramCount / (2 * sampleSize)); + + return valueBIC; +} \ No newline at end of file diff --git a/project_template/BIC.h b/project_template/BIC.h new file mode 100644 index 0000000..a8bdc03 --- /dev/null +++ b/project_template/BIC.h @@ -0,0 +1,30 @@ +#ifndef BIC_H +#define BIC_H +#include "Estimation.h" +#include + +using namespace std; + +enum algoritm { + RMSE, + sMAPE, + Classic +}; + + +class Bic : public Estimation { +private: + algoritm versionBIC; + + double classicBIC(vector, vector, int); + double rmseBIC(vector, vector, int); + double smapeBIC(vector, vector, int); + +public: + Bic(algoritm, int); + ~Bic(); + int pc; + double getValue(vector, vector); + double getValue(int, vector, vector); +}; +#endif \ No newline at end of file diff --git a/project_template/ClassDiagram1.cd b/project_template/ClassDiagram1.cd new file mode 100644 index 0000000..addb7ba --- /dev/null +++ b/project_template/ClassDiagram1.cd @@ -0,0 +1,373 @@ + + + + + + IAAAAAAAAACAACAAAAAAAAAAAAAAACAAAAAAAAAAAAA= + Estimation.h + + + + + + AAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAggAAAAAAAQg= + File.h + + + + + + AAQAAAAEAgAAAABAAAAAABAAAIBEACAkDEBEAAAAACI= + Method.h + + + + + + AABgAAAACAABAAoAAABAAAAAAAAAAAAAAAAAAAAAAAA= + Normalization.h + + + + + + AAEAAAIAAAAAAAAAAAQABAAAAAAAAGCAAAgQAEAAAAA= + Param.h + + + + + + AAEAAEAABAAAAAAEAAAAAAAAAAADAAAAAABQDAACAAA= + TrackingControlSignal.h + + + + + + AAAAAABEAECAQAIAIhAIhUAABAAhAAAEIAAQEwAMAAA= + Vovk.h + + + + + + AABgCAAACAABFAJAiQBABBACACAAAAEACQgAAAIAACA= + Preparator.h + + + + + + AAUAAAAEAgAAAAAAAAAAAAAAAIIAAAYACEAQAGCEAAI= + AddTrendAddSeasonality.h + + + + + + + + + + + + + + + + AAUAAAAEAgAAAAQAAAAAAAAAAIAAAAIACEAQCGCEAAI= + AddTrendMultSeasonality.h + + + + + + + + + + + + + + + + AAUAAAAEAgAAAABAAgAAAAAAAIAAAAIACAAQAACEAAI= + AddTrendNoSeasonality.h + + + + + + + + + + + + + + AAUAAAAEAgAAAAAAAAAAAAAAAIAAAEIACEAQAGCEAGI= + DATrendAddSeasonality.h + + + + + + + + + + + + + + + AAUgAAAEAgAAAAAAAAAAAEAAAIAAAEIACEAQAGCEAAI= + DATrendMultSeasonality.h + + + + + + AAUEAAAEAgAAAAQAAAAAAAAAAIAAAEIACAAQAACEAAI= + DATrendNoSeasonality.h + + + + + + + + + + + + + + AAUAAAAEAgAAAAAAAAAAAAAABIAAAEIACEAQAGCEACI= + DMTrendAddSeasonality.h + + + + + + + + + + + + + + + + AAUgAAAEAgAEAAAAAAAAAAAAAIAAAEIACEAQAGCEAQI= + DMTrendMultSeasonality.h + + + + + + + + + + + + + + + + AAUAAAAEBgAAAAQAAAAAAAAAAIAAAEIACAAQAACEAAI= + DMTrendNoSeasonality.h + + + + + + + + + + + + + + + + + + AAQAAAAEAgACAAAAAAAAAAAAAIAAAAKACAAAAQIAAAI= + Dsa.h + + + + + + + + + + + + + + + + AAUAAAAEAgAQAAAAAAAAAAAAAIAAAAJACEAQAGCEAAI= + MultTrendAddSeasonality.h + + + + + + AAUAAAAEAgAAAAAAAAAAAAAAAIAAAAIACEARAGCFAAI= + MultTrendMultSeasonality.h + + + + + + + + + + + + + + + + AAUAAAAEAgAAAIAAAAAAAAAAAIAAAAIACAAQAACEABI= + MultTrendNoSeasonality.h + + + + + + + + + + + + + + + + AAwAAAAEAgBAAAAAAAAAAAAAAIAAAAIACEAQAGCAAAI= + NoTrendAddSeasonality.h + + + + + + + + + + + + + + + + AAQAAAAEAgIAAABAAAAAAAAAAIAAAAIACEAQAGCAAAI= + NoTrendMultSeasonality.h + + + + + + + + + + + + + + AAQAAAAEAgAAAAAAAAAAAAAAAIAAAANACAAQAACAAAI= + NoTrendNoSeasonality.h + + + + + + AIAAAAAACwAAACIAAQCAFAAEAEAAoBAAQAAQAFCAAAA= + Aic.h + + + + + + ABAAAQAACwAAAAAAAACABAAAAAAAoAAAQAAQAFCAAAA= + AicWeights.h + + + + + + AAAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAIQAAA= + Mape.h + + + + + + AAAAAAAAAAAAACAAgAAAAAABAAAAAAAAAAAAAAAAAAA= + SMape.h + + + + + + AAAAAAAAgAAAACAAAAAAAAAACAAICAAAAAAAIAAAgAA= + A.h + + + + + + + + + + + + + + AAUAAAAEFgIGAgAEAAQkAAAAAIBEAGIhCEiQAEAAAAI= + Fuzzy.h + + + + + + + + + + + + + + + + + + AAcQAAIEBgIKAgAEQIQgAAAAAIQEAEIhCEkQAGCEAAI= + FuzzyWithSets.h + + + + + + AAAAAAAAAAAAAAAAAAAAAAAAAQAEACAgAAAAAAAAAAA= + Vovk.h + + + + \ No newline at end of file diff --git a/project_template/DATrendAddSeasonality.cpp b/project_template/DATrendAddSeasonality.cpp new file mode 100644 index 0000000..4a1ff30 --- /dev/null +++ b/project_template/DATrendAddSeasonality.cpp @@ -0,0 +1,152 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include "DATrendAddSeasonality.h" +#include "Param.h" + +// +DATrendAddSeasonality::DATrendAddSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +DATrendAddSeasonality::~DATrendAddSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(x); + std::vector ().swap(T); + std::vector ().swap(I); + std::vector ().swap(forecast); +} + +// , +void DATrendAddSeasonality::init() { + S.clear(); + T.clear(); + I.clear(); + forecast.clear(); + + double sumS = 0; + double sumT = 0; + for (unsigned int t = 0; t < p; t++) { + sumS += x[t]; + sumT += x[t+p]; + } + + S.push_back(sumS / p); + T.push_back((sumT/ p - S[0]) / p); + + for (unsigned int t = 0; t < p; t++) { + I.push_back(x[t] - S[0]); + } + + forecast.push_back(S[0] + T[0] * phi + I[0]); +} + +// +void DATrendAddSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } else if (paramName.compare("p") == 0) { + this->p = value; + } else if (paramName.compare("delta") == 0) { + this->delta = value; + } else if (paramName.compare("phi") == 0) { + this->phi = value; + } + +} + +// +void DATrendAddSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 + this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + T[t] * phi + alpha * e); // + T.push_back(T[t] * phi + alpha * gamma * e); // + I.push_back(I[t] + delta * e); // + forecast.push_back(S[t+1] + T[t+1] * phi + I[t+1]); // + } +} + + +// +void DATrendAddSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + T[t] * phi + alpha * e); // + T.push_back(T[t] * phi + alpha * gamma * e); // + I.push_back(I[t] + delta * e); // + forecast.push_back(S[t+1] + T[t+1] * phi + I[t+1]); // + } +} + +// +vector DATrendAddSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +// +double DATrendAddSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} + + +// +// TODO: +Param* DATrendAddSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.05) { + for (double gam = 0.1; gam < 1; gam+= 0.05) { + for (double del = 0.1; del < 1;del+= 0.05) { + for (double ph = 0.1; ph < 1;ph+= 0.05) { + this->setParam("alpha", al); + this->setParam("gamma", gam); + this->setParam("delta", del); + this->setParam("ph", ph); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->gamma = gam; + optimal->delta = del; + optimal->phi = ph; + } + } + } + } + } + return optimal; +} + diff --git a/project_template/DATrendAddSeasonality.h b/project_template/DATrendAddSeasonality.h new file mode 100644 index 0000000..cacefbf --- /dev/null +++ b/project_template/DATrendAddSeasonality.h @@ -0,0 +1,35 @@ +#ifndef DATRENDADDSEASONALITY_H +#define DATRENDADDSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + +using namespace std; + +// Method +// , +class DATrendAddSeasonality : public Method { +public: + double alpha; // + vector S; // + double gamma; // + double delta; // + vector T; // + vector I; // + int p; // + double phi; // + + DATrendAddSeasonality(vector, int); + ~DATrendAddSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // +}; + +#endif \ No newline at end of file diff --git a/project_template/DATrendMultSeasonality.cpp b/project_template/DATrendMultSeasonality.cpp new file mode 100644 index 0000000..3b58fc9 --- /dev/null +++ b/project_template/DATrendMultSeasonality.cpp @@ -0,0 +1,150 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include "DATrendMultSeasonality.h" +#include "Param.h" + +// +DATrendMultSeasonality::DATrendMultSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +DATrendMultSeasonality::~DATrendMultSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(x); + std::vector ().swap(T); + std::vector ().swap(I); + std::vector ().swap(forecast); +} + +// , +void DATrendMultSeasonality::init() { + S.clear(); + T.clear(); + I.clear(); + forecast.clear(); + + double sumS = 0; + double sumT = 0; + for (unsigned int t = 0; t < p; t++) { + sumS += x[t]; + sumT += x[t+p]; + } + + S.push_back(sumS / p); + T.push_back((sumT/ p - S[0]) / p); + + for (unsigned int t = 0; t < p; t++) { + I.push_back(x[t] / S[0]); + } + + forecast.push_back((S[0] + T[0] * phi) * I[0]); +} + +// +void DATrendMultSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } else if (paramName.compare("p") == 0) { + this->p = value; + } else if (paramName.compare("delta") == 0) { + this->delta = value; + } else if (paramName.compare("phi") == 0) { + this->phi = value; + } +} + +// +void DATrendMultSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 + this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + T[t] * phi + alpha * e / I[t]); // + T.push_back(T[t] * phi + alpha * gamma * e / I[t]); // + I.push_back(I[t] + delta * e / (S[t] + T[t] * phi)); // + forecast.push_back((S[t+1] + T[t+1] * phi ) * I[t+1]); // + } +} + +// +void DATrendMultSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + T[t] * phi + alpha * e / I[t]); // + T.push_back(T[t] * phi + alpha * gamma * e / I[t]); // + I.push_back(I[t] + delta * e / (S[t] + T[t] * phi)); // + forecast.push_back((S[t+1] + T[t+1] * phi ) * I[t+1]); // + } +} + +// +vector DATrendMultSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +// +double DATrendMultSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} + + +// +// TODO: +Param* DATrendMultSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.05) { + for (double gam = 0.1; gam < 1; gam+= 0.05) { + for (double del = 0.1; del < 1;del+= 0.05) { + for (double ph = 0.1; ph < 1;ph+= 0.05) { + this->setParam("alpha", al); + this->setParam("gamma", gam); + this->setParam("delta", del); + this->setParam("ph", ph); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->gamma = gam; + optimal->delta = del; + optimal->phi = ph; + } + } + } + } + } + return optimal; +} + diff --git a/project_template/DATrendMultSeasonality.h b/project_template/DATrendMultSeasonality.h new file mode 100644 index 0000000..425ca6e --- /dev/null +++ b/project_template/DATrendMultSeasonality.h @@ -0,0 +1,35 @@ +#ifndef DATRENDMULTSEASONALITY_H +#define DATRENDMULTSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + +using namespace std; + +// Method +// , +class DATrendMultSeasonality : public Method { +public: + double alpha; // + vector S; // + double gamma; // + double delta; // + vector T; // + vector I; // + int p; // + double phi; // + + DATrendMultSeasonality(vector, int); + ~DATrendMultSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // +}; + +#endif \ No newline at end of file diff --git a/project_template/DATrendNoSeasonality.cpp b/project_template/DATrendNoSeasonality.cpp new file mode 100644 index 0000000..86cde2d --- /dev/null +++ b/project_template/DATrendNoSeasonality.cpp @@ -0,0 +1,125 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include "DATrendNoSeasonality.h" +#include "Param.h" + +// +DATrendNoSeasonality::DATrendNoSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +DATrendNoSeasonality::~DATrendNoSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(x); + std::vector ().swap(T); + std::vector ().swap(forecast); +} + +// , +void DATrendNoSeasonality::init() { + S.clear(); + T.clear(); + forecast.clear(); + + T.push_back(x[1] - x[0]); + S.push_back(x[0]); + forecast.push_back(S[0] + T[0] * phi); +} + +// +void DATrendNoSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } else if (paramName.compare("phi") == 0) { + this->phi = value; + } +} + +// +void DATrendNoSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 + this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + phi * T[t] + alpha * e); // + T.push_back(phi * T[t] + alpha * gamma * e); // + forecast.push_back(S[t+1] + phi * T[t+1]); // + } +} +// +void DATrendNoSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + phi * T[t] + alpha * e); // + T.push_back(phi * T[t] + alpha * gamma * e); // + forecast.push_back(S[t+1] + phi * T[t+1]); // + } +} + + +// +vector DATrendNoSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +// +double DATrendNoSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(2, this->xEstimation, this->forecast); +} + + +// +// TODO: +Param* DATrendNoSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.05) { + for (double gam = 0.1; gam < 1; gam+= 0.05) { + for (double ph = 0.1; ph < 1; ph+= 0.05) { + this->setParam("alpha", al); + this->setParam("gamma", gam); + this->setParam("phi", ph); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->gamma = gam; + optimal->phi = phi; + } + } + } + } + return optimal; +} diff --git a/project_template/DATrendNoSeasonality.h b/project_template/DATrendNoSeasonality.h new file mode 100644 index 0000000..44027c8 --- /dev/null +++ b/project_template/DATrendNoSeasonality.h @@ -0,0 +1,32 @@ +#ifndef DATRENDNOSEASONALITY_H +#define DATRENDNOSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + +using namespace std; + +// Method +// , +class DATrendNoSeasonality : public Method { +public: + double alpha; // + vector S; // + double gamma; // + vector T; // + double phi; // + + DATrendNoSeasonality(vector, int); + ~DATrendNoSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // +}; + +#endif \ No newline at end of file diff --git a/project_template/DMTrendAddSeasonality.cpp b/project_template/DMTrendAddSeasonality.cpp new file mode 100644 index 0000000..e881929 --- /dev/null +++ b/project_template/DMTrendAddSeasonality.cpp @@ -0,0 +1,155 @@ +// +// : +// , +// +#include "StdAfx.h" +#include "math.h" +#include +#include "DMTrendAddSeasonality.h" +#include "Param.h" + +// +DMTrendAddSeasonality::DMTrendAddSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +DMTrendAddSeasonality::~DMTrendAddSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(x); + std::vector ().swap(T); + std::vector ().swap(I); + std::vector ().swap(forecast); +} + +// , +void DMTrendAddSeasonality::init() { + S.clear(); + T.clear(); + I.clear(); + forecast.clear(); + + double sumS = 0; + double sumT = 0; + for (unsigned int t = 0; t < p; t++) { + sumS += x[t]; + sumT += x[t+p]; + } + + S.push_back(sumS / p); + T.push_back((sumT/ p - S[0]) / p); + + for (unsigned int t = 0; t < p; t++) { + I.push_back(x[t] - S[0]); + } + + forecast.push_back(S[0] * pow(T[0], phi) + I[0]); +} + +// +void DMTrendAddSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } else if (paramName.compare("p") == 0) { + this->p = value; + } else if (paramName.compare("delta") == 0) { + this->delta = value; + } else if (paramName.compare("phi") == 0) { + this->phi = value; + } + +} + +// +void DMTrendAddSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 +this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] * pow(T[t], phi) + alpha * e); // + T.push_back(pow(T[t], phi) + alpha * gamma * e / S[t]); // + I.push_back(I[t] + delta * e); // + forecast.push_back(S[t+1] * pow(T[t+1], phi) + I[t+1]); // + } +} + + +// +void DMTrendAddSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] * pow(T[t], phi) + alpha * e); // + T.push_back(pow(T[t], phi) + alpha * gamma * e / S[t]); // + I.push_back(I[t] + delta * e); // + forecast.push_back(S[t+1] * pow(T[t+1], phi) + I[t+1]); // + } +} + + + +// +vector DMTrendAddSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +// +double DMTrendAddSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} + + +// +// TODO: +Param* DMTrendAddSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.05) { + for (double gam = 0.1; gam < 1; gam+= 0.05) { + for (double del = 0.1; del < 1;del+= 0.05) { + for (double ph = 0.1; ph < 1;ph+= 0.05) { + this->setParam("alpha", al); + this->setParam("gamma", gam); + this->setParam("delta", del); + this->setParam("ph", ph); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->gamma = gam; + optimal->delta = del; + optimal->phi = ph; + } + } + } + } + } + return optimal; +} + diff --git a/project_template/DMTrendAddSeasonality.h b/project_template/DMTrendAddSeasonality.h new file mode 100644 index 0000000..704c880 --- /dev/null +++ b/project_template/DMTrendAddSeasonality.h @@ -0,0 +1,35 @@ +#ifndef DMTRENDADDSEASONALITY_H +#define DMTRENDADDSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + +using namespace std; + +// Method +// , +class DMTrendAddSeasonality : public Method { +public: + double alpha; // + vector S; // + double gamma; // + double delta; // + vector T; // + vector I; // + int p; // + double phi; // + + DMTrendAddSeasonality(vector, int); + ~DMTrendAddSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // +}; + +#endif \ No newline at end of file diff --git a/project_template/DMTrendMultSeasonality.cpp b/project_template/DMTrendMultSeasonality.cpp new file mode 100644 index 0000000..b496da4 --- /dev/null +++ b/project_template/DMTrendMultSeasonality.cpp @@ -0,0 +1,149 @@ +// +// : +// , +// +#include "StdAfx.h" +#include "math.h" +#include +#include "DMTrendMultSeasonality.h" +#include "Param.h" + +// +DMTrendMultSeasonality::DMTrendMultSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +DMTrendMultSeasonality::~DMTrendMultSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(x); + std::vector ().swap(T); + std::vector ().swap(I); + std::vector ().swap(forecast); +} + +// , +void DMTrendMultSeasonality::init() { + S.clear(); + T.clear(); + I.clear(); + forecast.clear(); + + double sumS = 0; + double sumT = 0; + for (unsigned int t = 0; t < p; t++) { + sumS += x[t]; + sumT += x[t+p]; + } + + S.push_back(sumS / p); + T.push_back((sumT/ p - S[0]) / p); + + for (unsigned int t = 0; t < p; t++) { + I.push_back(x[t] / S[0]); + } + + forecast.push_back(S[0] * pow(T[0], phi) * I[0]); +} + +// +void DMTrendMultSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } else if (paramName.compare("p") == 0) { + this->p = value; + } else if (paramName.compare("delta") == 0) { + this->delta = value; + } else if (paramName.compare("phi") == 0) { + this->phi = value; + } +} + +// +void DMTrendMultSeasonality::createModel() { +this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 + this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] * pow(T[t], phi) + alpha * e / I[t]); // + T.push_back(pow(T[t], phi) + alpha * gamma * e / (I[t] * S[t])); // + I.push_back(I[t] + delta * e / (S[t] * pow(T[t], phi))); // + forecast.push_back(S[t+1] * pow(T[t+1], phi) * I[t+1]); // + } +} + +// . +void DMTrendMultSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + ` + S.push_back(S[t] * pow(T[t], phi) + alpha * e / I[t]); // + T.push_back(pow(T[t], phi) + alpha * gamma * e / (I[t] * S[t])); // + I.push_back(I[t] + delta * e / (S[t] * pow(T[t], phi))); // + forecast.push_back(S[t+1] * pow(T[t+1], phi) * I[t+1]); // + } +} + +// +vector DMTrendMultSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +// +double DMTrendMultSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} + +// +// TODO: +Param* DMTrendMultSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.05) { + for (double gam = 0.1; gam < 1; gam+= 0.05) { + for (double del = 0.1; del < 1;del+= 0.05) { + for (double ph = 0.1; ph < 1;ph+= 0.05) { + this->setParam("alpha", al); + this->setParam("gamma", gam); + this->setParam("delta", del); + this->setParam("phi", ph); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->gamma = gam; + optimal->delta = del; + optimal->phi = ph; + } + } + } + } + } + return optimal; +} diff --git a/project_template/DMTrendMultSeasonality.h b/project_template/DMTrendMultSeasonality.h new file mode 100644 index 0000000..c8d703f --- /dev/null +++ b/project_template/DMTrendMultSeasonality.h @@ -0,0 +1,36 @@ +#ifndef DMTRENDMULTSEASONALITY_H +#define DMTRENDMULTSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + +using namespace std; + +// Method +// , +class DMTrendMultSeasonality : public Method { +public: + double alpha; // + vector S; // + double gamma; // + double delta; // + vector T; // + vector I; // + vector R; // + int p; // + double phi; // + + DMTrendMultSeasonality(vector, int); + ~DMTrendMultSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // +}; + +#endif \ No newline at end of file diff --git a/project_template/DMTrendNoSeasonality.cpp b/project_template/DMTrendNoSeasonality.cpp new file mode 100644 index 0000000..eec9a7a --- /dev/null +++ b/project_template/DMTrendNoSeasonality.cpp @@ -0,0 +1,127 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include +#include "DMTrendNoSeasonality.h" +#include "Param.h" + +// +DMTrendNoSeasonality::DMTrendNoSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +DMTrendNoSeasonality::~DMTrendNoSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(x); + std::vector ().swap(T); + std::vector ().swap(forecast); +} + +// , +void DMTrendNoSeasonality::init() { + S.clear(); + T.clear(); + forecast.clear(); + + T.push_back(x[1] / x[0]); + S.push_back(x[0]); + forecast.push_back(S[0] * pow(T[0], phi)); +} + +// +void DMTrendNoSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } else if (paramName.compare("phi") == 0) { + this->phi = value; + } +} + +// +void DMTrendNoSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 + this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] * pow(T[t], phi) + alpha * e); // + T.push_back(pow(T[t], phi) + alpha * gamma * e / S[t]); // + forecast.push_back(S[t+1] * pow(T[t+1], phi)); // + } +} + +// +void DMTrendNoSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] * pow(T[t], phi) + alpha * e); // + T.push_back(pow(T[t], phi) + alpha * gamma * e / S[t]); // + forecast.push_back(S[t+1] * pow(T[t+1], phi)); // + } +} + + +// +vector DMTrendNoSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +// +double DMTrendNoSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} + + +// +// TODO: +Param* DMTrendNoSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.05) { + for (double gam = 0.1; gam < 1; gam+= 0.05) { + for (double ph = 0.1; ph < 1; ph+= 0.05) { + this->setParam("alpha", al); + this->setParam("gamma", gam); + this->setParam("phi", ph); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->gamma = gam; + optimal->phi = phi; + } + } + } + } + return optimal; +} diff --git a/project_template/DMTrendNoSeasonality.h b/project_template/DMTrendNoSeasonality.h new file mode 100644 index 0000000..49ba840 --- /dev/null +++ b/project_template/DMTrendNoSeasonality.h @@ -0,0 +1,32 @@ +#ifndef DMTRENDNOSEASONALITY_H +#define DMTRENDNOSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + +using namespace std; + +// Method +// , +class DMTrendNoSeasonality : public Method { +public: + double alpha; // + vector S; // + double gamma; // + vector T; // + double phi; // + + DMTrendNoSeasonality(vector, int); + ~DMTrendNoSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // +}; + +#endif \ No newline at end of file diff --git a/project_template/Dsa.cpp b/project_template/Dsa.cpp new file mode 100644 index 0000000..c73e4d1 --- /dev/null +++ b/project_template/Dsa.cpp @@ -0,0 +1,307 @@ +#include "StdAfx.h" +#include "Dsa.h" +#include +#include "Param.h" + +Dsa::Dsa(vector timeSeries, int countPointForecast){ + this->countPointForecast = countPointForecast; + this->x = timeSeries; + this->partition(); +} + +Dsa::~Dsa(void) { +} + +void Dsa::init() { + defineUniversum(); + fuzzyTs.clear(); +} + +// +// , , +void Dsa::defineUniversum() { + this->universumMax = x[0]; + this->universumMin = x[0]; + for (int i = 1; i < x.size(); i++) { + if (universumMax < x[i]) { + universumMax = x[i]; + } + if (universumMin > x[i]) { + universumMin = x[i]; + } + } + + // + // , + // , + double baseSize = (universumMax - universumMin) / countFuzzyParts; + a.resize(countFuzzyParts + 1); + for (int i=0; i < countFuzzyParts + 1;i++){ + a[i] = (A(universumMin + (i-sizeLabels) * baseSize, universumMin + (i+sizeLabels)*baseSize)); + } +} + +// +// +int Dsa::fuzzyfication(double value) { + // , + // , + int indMax = 0; + double max = a[0].getValue(value); + for (int j =0; j < a.size(); j++) { + if (a[j].getValue(value) > max) { + indMax = j; + max = a[j].getValue(value); + } + } + return indMax; +} + +void Dsa::createRules() { + rulesIn.clear(); + rulesIn.resize(fuzzyTs.size() - countRulesIn); + rulesOut.clear(); + rulesOut.resize(fuzzyTs.size() - countRulesIn); + for (int i=0; i < fuzzyTs.size() - countRulesIn; i ++) { + vector v; + v.resize(countRulesIn); + for (int j=i; j < i + countRulesIn; j++) { + v[j-i] = fuzzyTs[j]; + } + rulesIn[i] = v; + rulesOut[i] = fuzzyTs[i+countRulesIn]; + } +} + + +vector Dsa::searchRules(vector inPart) { + vector res; + for (int i = 0; i < rulesIn.size(); i++) { + bool isRuleFound = true; + for (int j = 0; j < rulesIn[i].size(); j++) { + if (rulesIn[i][j] != inPart[j]) { + isRuleFound = false; + } + } + if (isRuleFound) { + res.push_back(rulesOut[i]); + } + } + return res; +} + +vector Dsa::searchRulesForSeason(int index) { + vector res; + int i =index - p; + while (i > 0) { + res.push_back(fuzzyfication(x[i])); + i -= p; + } + return res; +} + + +void Dsa::createModelForEstimation() { + init(); + fuzzyTs.resize(xLearning.size()); + for (int t = 0; t < xLearning.size(); t++) { + fuzzyTs[t] = fuzzyfication(xLearning[t]); + } + createRules(); + + forecast.clear(); + forecast.resize(fuzzyTs.size()+countPointForecast); + + for (int i=0; i < fuzzyTs.size();i++) { + forecast[i] = defuzzyfication(fuzzyTs[i]); + } + int k = fuzzyTs.size(); + + for (int i=0; i < countPointForecast;i++) { + vector lastPoints; + lastPoints.resize(countRulesIn); + for (int j = countRulesIn; j > 0; j--) { + lastPoints[countRulesIn -j] = (fuzzyTs[fuzzyTs.size() - j]); + } + + double sum = 0; + + // + vector foundSeasonFuncs = searchRulesForSeason(fuzzyTs.size()); + + for (int j =0; j < foundSeasonFuncs.size(); j++) { + sum += a[foundSeasonFuncs[j]].getValueAtTop(); + } + double valueAtSeason = sum / foundSeasonFuncs.size(); + + + forecast[i+k] = (valueAtSeason); + int index = fuzzyfication(forecast[i+k]); + // + fuzzyTs.push_back(index); + xLearning.push_back(forecast[i+k]); + createRules(); + + } + for (int i=0; i < countPointForecast;i++) { + if (fuzzyTs.size() > 0) + fuzzyTs.pop_back(); + if (rulesIn.size() > 0) + rulesIn.pop_back(); + if (rulesOut.size() > 0) + rulesOut.pop_back(); + xLearning.pop_back(); + } +} + + +// , +void Dsa::createModel() { + init(); + fuzzyTs.resize(x.size()); + for (int i = 0; i < x.size(); i++) { + fuzzyTs[i] = fuzzyfication(x[i]); + } + createRules(); + + + forecast.clear(); + forecast.resize(fuzzyTs.size()+countPointForecast); + + for (int i=0; i < fuzzyTs.size();i++) { + forecast[i] = defuzzyfication(fuzzyTs[i]); + } + int k = fuzzyTs.size(); + + for (int i=0; i < countPointForecast;i++) { + vector lastPoints; + lastPoints.resize(countRulesIn); + for (int j = countRulesIn; j > 0; j--) { + lastPoints[countRulesIn -j] = (fuzzyTs[fuzzyTs.size() - j]); + } + + double sum = 0; + + // + vector foundSeasonFuncs = searchRulesForSeason(fuzzyTs.size()); + + for (int j =0; j < foundSeasonFuncs.size(); j++) { + sum += a[foundSeasonFuncs[j]].getValueAtTop(); + } + double valueAtSeason = sum / foundSeasonFuncs.size(); + + + forecast[i+k] = (valueAtSeason); + int index = fuzzyfication(forecast[i+k]); + // + fuzzyTs.push_back(index); + x.push_back(forecast[i+k]); + createRules(); + + } + for (int i=0; i < countPointForecast;i++) { + if (fuzzyTs.size() > 0) + fuzzyTs.pop_back(); + if (rulesIn.size() > 0) + rulesIn.pop_back(); + if (rulesOut.size() > 0) + rulesOut.pop_back(); + x.pop_back(); + } +} + + + +vector Dsa::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return forecast; +} + +double Dsa::defuzzyfication(int index) { + return this->a[index].getValueAtTop(); +} + +void Dsa::setParam(string paramName, double value) { + if (paramName.compare("countFuzzyParts") == 0) { + this->countFuzzyParts = value; + } else if (paramName.compare("countRulesIn") == 0) { + if (this->xLearning.size() < value) { + this->countRulesIn = this->xLearning.size(); + } else { + this->countRulesIn = value; + } + + } else if (paramName.compare("p") == 0) { + if (value <= 0) { + this->p = 1; + } else { + this->p = value; + } + } else if (paramName.compare("sizeLabels") == 0) { + this->sizeLabels = value; + } + + if (paramName.compare("0") == 0) { + this->countFuzzyParts = value * 100; + } else if (paramName.compare("1") == 0) { + if (this->xLearning.size() < value * 5) { + this->countRulesIn = this->xLearning.size(); + } else { + this->countRulesIn = value * 5; + } + } else if (paramName.compare("2") == 0) { + this->sizeLabels = value * 100; + } +} + + +// +double Dsa::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} + +// +// TODO: +Param* Dsa::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double cfp = 2; cfp < 50;cfp+= 1) { + cout << "DSA " << cfp << " 50" <<"\n";; + for (double cri = 1; cri < 5;cri+=1) { + for (double sizeLabels = 1; sizeLabels < 50; sizeLabels+= 1) { + this->setParam("countRulesIn", cri); + this->setParam("countFuzzyParts", cfp); + this->setParam("sizeLabels", sizeLabels); + double smapeValue = 0; + int maxShift = 5; + if (maxShift > this->countPointForecast) { + maxShift = this->countPointForecast-1; + } + this->countPointForecast -= maxShift; + for (int shift=0; shift <= maxShift; shift++) { + this->partition(); + this->createModelForEstimation(); + smapeValue += est->getValue(x, getForecast()); + this->countPointForecast++; + } + this->countPointForecast--; + smapeValue = smapeValue / maxShift; + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->countFuzzyParts = cfp; + optimal->countRulesIn = cri; + optimal->estim = smapeValue; + optimal->sizeLabels = sizeLabels; + } + } + } + } + return optimal; +} + +int Dsa::getNamberParam() { + return 3; +} diff --git a/project_template/Dsa.h b/project_template/Dsa.h new file mode 100644 index 0000000..fb5267a --- /dev/null +++ b/project_template/Dsa.h @@ -0,0 +1,48 @@ +#ifndef DSA_H +#define DSA_H +#include +#include "A.h" +#include "Method.h" +#include "Param.h" + +using namespace std; + +class Dsa : public Method { +private: + vector a; // + int countFuzzyParts;// + vector> rulesIn; // + vector rulesOut; // + int countRulesIn; // + double universumMin; + double universumMax; + double sizeLabels; + vector fuzzyTs; // + double alpha; // + double gamma; // + double delta; // + int p; // + double phi; // + + void defineUniversum(); // + void createRules(); // , + int fuzzyfication(double); // + double defuzzyfication(int); // + vector searchRules(vector); + vector searchRulesForSeason(int); + + +public: + Dsa(vector, int); + ~Dsa(void); + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // + int getNamberParam(); +}; + +#endif \ No newline at end of file diff --git a/project_template/Estimation.h b/project_template/Estimation.h new file mode 100644 index 0000000..9de2f78 --- /dev/null +++ b/project_template/Estimation.h @@ -0,0 +1,33 @@ +#ifndef ESTIMATION_H +#define ESTIMATION_H +#include + +using namespace std; + +// +// "" +// +// , +// , double +class Estimation { +public: + double value; // + virtual double getValue(int paramsCount, vector, vector) = 0; // + virtual double getValue(vector, vector) = 0; // +protected: + double RSS(vector original, vector model){ + double res = 0.0; + int commonElemsCount = original.size() > model.size() ? original.size() : model.size(); + for (int i = 0; i < commonElemsCount; i++) { + res += (original[i] - model[i]) * (original[i] - model[i]); + } + return res; + } + virtual void Validate(vector original, vector model){ + if (original.size() == 0) + throw exception("Original time seria is empty"); + if (model.size() == 0) + throw exception("Model time seria is empty"); + } +}; +#endif \ No newline at end of file diff --git a/project_template/File.cpp b/project_template/File.cpp new file mode 100644 index 0000000..355d9a4 --- /dev/null +++ b/project_template/File.cpp @@ -0,0 +1,117 @@ +// +// : , +// +#include "stdafx.h" +#include +#include +#include +#include +#include +#include "File.h" +#include + +using namespace std; + +// +// : , , - +vector File::readFile(string tsFileName) { + vector x; // + std::ifstream inFile; // + + inFile.open(tsFileName.c_str()); + string tsString = ""; + while(inFile >> tsString) { + string str = tsString; + int pos = tsString.find("."); + if (pos > 0) { + str = tsString.replace(tsString.find("."), 1, ","); + } + x.push_back(atof(str.c_str())); + } + inFile.close(); + return x; +} + +vector File::readKonfFile(string tsFileName) { + vector x; // + int forecastCountDots = 1; + string TSPeriod = "daily"; // day, mounth, year....? + string TSName = ""; // day, mounth, year....? + std::ifstream inFile; // + + inFile.open(tsFileName.c_str()); + string tsString = ""; + while (inFile >> tsString) { + int endpos = tsString.find(';'); + int startpos = 0; + if (endpos > 0){ + if (TSName == ""){ + //first string with params + //endpos = tsString.find(';'); + //set ts name + TSName = tsString.substr(0, endpos); + startpos = endpos; + //set ts name + endpos = tsString.find(';', startpos + 1); + forecastCountDots = atoi(tsString.substr(startpos + 1, endpos - startpos - 1).c_str()); + startpos = endpos; + //set ts name + endpos = tsString.find(';', startpos + 1); + TSPeriod = tsString.substr(startpos + 1 , endpos - startpos - 1); + startpos = endpos; + endpos = tsString.find(';', startpos + 1); + } + //parse ts data + while (endpos > 0){ + x.push_back(atof(tsString.substr(startpos + 1, endpos - startpos -1).c_str())); + startpos = endpos; + endpos = tsString.find(';', startpos + 1); + } + } + + /*string str = tsString; + int pos = tsString.find("."); + if (pos > 0) { + str = tsString.replace(tsString.find("."), 1, ","); + } + x.push_back(atof(str.c_str()));*/ + } + inFile.close(); + return x; +} + +double File::round (double value) { + return floor(value + 0.5); +} + +// +string File::myRound(double num) { + double n = ((double) round(num * 100)) / 100; + std::ostringstream sstream; + sstream << n; + string s = sstream.str(); + // + int commaIndex = s.find("."); + if (commaIndex > 0) { + s = s.replace(commaIndex, 1, ","); + } + return s; +} + + +// +void File::writeFile(string outFileName, vector result) { + ofstream outFile(outFileName.c_str(), std::ofstream::out | std::ofstream::trunc); + for (unsigned int i = 0; i < result.size(); i++) { + outFile << myRound(result[i]) << "\n"; + } + outFile.close(); +} + +// , +void File::writeFile(string outFileName, string key, double value) { + ofstream outFile(outFileName.c_str(), ios::app); + outFile << key << ";" << myRound(value) << "\n"; + outFile.close(); +} + diff --git a/project_template/File.h b/project_template/File.h new file mode 100644 index 0000000..d90a196 --- /dev/null +++ b/project_template/File.h @@ -0,0 +1,23 @@ +#ifndef FILE_H +#define FILE_H +// +// : +// , +// + +#include +#include + +using namespace std; + +class File { +private: + static double round(double); // + static string myRound(double num); // +public: + static vector readFile (string); // + static vector readKonfFile(string); // + static void writeFile (string, vector); // + static void writeFile (string, string, double); // +}; +#endif \ No newline at end of file diff --git a/project_template/Fuzzy.cpp b/project_template/Fuzzy.cpp new file mode 100644 index 0000000..dd2e06c --- /dev/null +++ b/project_template/Fuzzy.cpp @@ -0,0 +1,354 @@ +#include "StdAfx.h" +#include "Fuzzy.h" +#include +#include "Param.h" +#include + +Fuzzy::Fuzzy(string trendType, string seasonType, vector timeSeries, int countPointForecast){ + this->countPointForecast = countPointForecast; + this->trendType = trendType; + this->seasonType = seasonType; + this->x = timeSeries; + this->partition(); +} + +Fuzzy::~Fuzzy(void) { +} + +void Fuzzy::init() { + defineUniversum(); + fuzzyTs.clear(); +} + +// +// , , +void Fuzzy::defineUniversum() { + this->universumMax = x[0]; + this->universumMin = x[0]; + for (int i = 1; i < x.size(); i++) { + if (universumMax < x[i]) { + universumMax = x[i]; + } + if (universumMin > x[i]) { + universumMin = x[i]; + } + } + + // + // , + // , + double baseSize = (universumMax - universumMin) / countFuzzyParts; + a.resize(countFuzzyParts + 1); + for (int i=0; i < countFuzzyParts + 1;i++){ + a[i] = (A(universumMin + (i-1) * baseSize, universumMin + (i+1)*baseSize)); + } +} + +// +// +int Fuzzy::fuzzyfication(double value) { + // , + // , + int indMax = 0; + double max = a[0].getValue(value); + for (int j =0; j < a.size(); j++) { + if (a[j].getValue(value) > max) { + indMax = j; + max = a[j].getValue(value); + } + } + return indMax; +} + +void Fuzzy::createRules() { + rulesIn.clear(); + rulesIn.resize(fuzzyTs.size() - countRulesIn); + rulesOut.clear(); + rulesOut.resize(fuzzyTs.size() - countRulesIn); + for (int i=0; i < fuzzyTs.size() - countRulesIn; i ++) { + vector v; + v.resize(countRulesIn); + for (int j=i; j < i + countRulesIn; j++) { + v[j-i] = fuzzyTs[j]; + } + rulesIn[i] = v; + rulesOut[i] = fuzzyTs[i+countRulesIn]; + } +} + + +vector Fuzzy::searchRules(vector inPart) { + vector res(rulesOut.size()); + int countRes = 0; + for (int i = 0; i < rulesIn.size(); i++) { + bool isRuleFound = true; + for (int j = 0; j < rulesIn[i].size(); j++) { + if (rulesIn[i][j] != inPart[j]) { + isRuleFound = false; + } + } + if (isRuleFound) { + res[countRes++] = rulesOut[i]; + } + } + res.resize(countRes); + return res; +} + +vector Fuzzy::searchRulesForSeason(int index) { + vector res; + int i =index - p; + while (i > 0) { + res.push_back(fuzzyfication(x[i])); + i -= p; + } + return res; +} + + +void Fuzzy::createModelForEstimation() { + init(); + fuzzyTs.resize(xLearning.size()); + for (int t = 0; t < xLearning.size(); t++) { + fuzzyTs[t] = fuzzyfication(xLearning[t]); + } + createRules(); + + forecast.clear(); + forecast.resize(fuzzyTs.size()+countPointForecast); + for (int i=0; i < fuzzyTs.size();i++) { + forecast[i] = defuzzyfication(fuzzyTs[i]); + } + int k = fuzzyTs.size(); + + + for (int i=0; i < countPointForecast;i++) { + vector lastPoints; + lastPoints.resize(countRulesIn); + for (int j = countRulesIn; j > 0; j--) { + lastPoints[countRulesIn -j] = (fuzzyTs[fuzzyTs.size() - j]); + } + vector foundFuncs = searchRules(lastPoints); + if (foundFuncs.size() == 0) { + foundFuncs.push_back(fuzzyTs[fuzzyTs.size()-1]); + } + double sum = 0; + + for (int j =0; j < foundFuncs.size(); j++) { + sum += a[foundFuncs[j]].getValueAtTop(); + } + // + double globInfo = sum / foundFuncs.size(); + A at = this->a[fuzzyTs[fuzzyTs.size()-1]]; + A atPrev = this->a[fuzzyTs[fuzzyTs.size()-2]]; + double localInfo = at.getLeft() + (at.getRight() - at.getLeft())/2 + (at.getValueAtTop() - atPrev.getValueAtTop()) / (at.getValueAtTop() + atPrev.getValueAtTop()); + + // + vector foundSeasonFuncs = searchRulesForSeason(fuzzyTs.size()); + + sum = 0; + for (int j =0; j < foundSeasonFuncs.size(); j++) { + sum += a[foundSeasonFuncs[j]].getValueAtTop(); + } + double valueAtSeason = sum / foundSeasonFuncs.size(); + + if (trendType.compare("None") == 0) { + if (seasonType.compare("None") == 0) { + forecast[i+k] = (localInfo); + } else if (seasonType.compare("Add") == 0) { + forecast[i+k] = (gamma * localInfo + (1-gamma) * valueAtSeason); + } + } else if (trendType.compare("Add") == 0) { + if (seasonType.compare("None") == 0) { + forecast[i+k] = (delta * localInfo + (1-delta) * globInfo); + } else if (seasonType.compare("Add") == 0) { + forecast[i+k] = (gamma * delta * localInfo + (1-gamma) * valueAtSeason + (1-delta) * globInfo); + } + } + int index = fuzzyfication(forecast[i+k]); + // + fuzzyTs.push_back(index); + xLearning.push_back(forecast[i+k]); + createRules(); + } + for (int i=0; i < countPointForecast;i++) { + fuzzyTs.pop_back(); + rulesIn.pop_back(); + rulesOut.pop_back(); + xLearning.pop_back(); + } + +} + + +// , +void Fuzzy::createModel() { + init(); + fuzzyTs.resize(x.size()); + for (int i = 0; i < x.size(); i++) { + fuzzyTs[i] = fuzzyfication(x[i]); + } + createRules(); + + forecast.clear(); + forecast.resize(fuzzyTs.size()+countPointForecast); + for (int i=0; i < fuzzyTs.size();i++) { + forecast[i] = defuzzyfication(fuzzyTs[i]); + } + int k = fuzzyTs.size(); + + for (int i=0; i < countPointForecast;i++) { + vector lastPoints; + lastPoints.resize(countRulesIn); + for (int j = countRulesIn; j > 0; j--) { + lastPoints[countRulesIn -j] = (fuzzyTs[fuzzyTs.size() - j]); + } + vector foundFuncs = searchRules(lastPoints); + if (foundFuncs.size() == 0) { + foundFuncs.push_back(fuzzyTs[fuzzyTs.size()-1]); + } + double sum = 0; + + for (int j =0; j < foundFuncs.size(); j++) { + sum += a[foundFuncs[j]].getValueAtTop(); + } + // + double globInfo = sum / foundFuncs.size(); + A at = this->a[fuzzyTs[fuzzyTs.size()-1]]; + A atPrev = this->a[fuzzyTs[fuzzyTs.size()-2]]; + double localInfo = at.getLeft() + (at.getRight() - at.getLeft())/2 + (at.getValueAtTop() - atPrev.getValueAtTop()) / (at.getValueAtTop() + atPrev.getValueAtTop()); + + // + vector foundSeasonFuncs = searchRulesForSeason(fuzzyTs.size()); + + sum = 0; + for (int j =0; j < foundSeasonFuncs.size(); j++) { + sum += a[foundSeasonFuncs[j]].getValueAtTop(); + } + double valueAtSeason = sum / foundSeasonFuncs.size(); + + if (trendType.compare("None") == 0) { + if (seasonType.compare("None") == 0) { + forecast[i+k] = (localInfo); + } else if (seasonType.compare("Add") == 0) { + forecast[i+k] = (gamma * localInfo + (1-gamma) * valueAtSeason); + } + } else if (trendType.compare("Add") == 0) { + if (seasonType.compare("None") == 0) { + forecast[i+k] = (delta * localInfo + (1-delta) * globInfo); + } else if (seasonType.compare("Add") == 0) { + forecast[i+k] = (gamma * delta * localInfo + (1-gamma) * valueAtSeason + (1-delta) * globInfo); + } + } + int index = fuzzyfication(forecast[i+k]); + // + fuzzyTs.push_back(index); + x.push_back(forecast[i+k]); + createRules(); + } + for (int i=0; i < countPointForecast;i++) { + fuzzyTs.pop_back(); + rulesIn.pop_back(); + rulesOut.pop_back(); + x.pop_back(); + } +} + + + +vector Fuzzy::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return forecast; +} + +double Fuzzy::defuzzyfication(int index) { + return this->a[index].getValueAtTop(); +} + +void Fuzzy::setParam(string paramName, double value) { + if (paramName.compare("countFuzzyParts") == 0) { + this->countFuzzyParts = value; + } else if (paramName.compare("countRulesIn") == 0) { + if (this->xLearning.size() < value) { + this->countRulesIn = this->xLearning.size(); + } else { + this->countRulesIn = value; + } + } else if (paramName.compare("p") == 0) { + this->p = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } else if (paramName.compare("delta") == 0) { + this->delta = value; + } + if (paramName.compare("0") == 0) { + this->countFuzzyParts = value * 100; + } else if (paramName.compare("1") == 0) { + if (this->xLearning.size() < value * 5) { + this->countRulesIn = this->xLearning.size(); + } else { + this->countRulesIn = value * 5; + } + } else if (paramName.compare("2") == 0) { + this->gamma = value; + } else if (paramName.compare("3") == 0) { + this->delta = value; + } +} + + +// +double Fuzzy::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} + +// +// TODO: +Param* Fuzzy::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double gam = 0; gam < 1; gam+= 0.1) { + cout << "fuzzy " << gam << " 1" <<"\n";; + for (double del = 0; del < 1;del+= 0.1) { + for (double cri = 1; cri < 5;cri+= 1) { + for (double cfp = 2; cfp < 50;cfp+= 2) { + this->setParam("gamma", gam); + this->setParam("delta", del); + this->setParam("countRulesIn", cri); + this->setParam("countFuzzyParts", cfp); + double smapeValue = 0; + int maxShift = 5; + if (maxShift > this->countPointForecast) { + maxShift = this->countPointForecast-1; + } + this->countPointForecast -= maxShift; + for (int shift=0; shift <= maxShift; shift++) { + this->partition(); + this->createModelForEstimation(); + smapeValue += est->getValue(x, getForecast()); + this->countPointForecast++; + } + this->countPointForecast--; + smapeValue = smapeValue / maxShift; + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->gamma = gam; + optimal->delta = del; + optimal->estim = smapeValue; + optimal->countFuzzyParts = cfp; + optimal->countRulesIn = cri; + } + } + } + } + } + return optimal; +} + + +int Fuzzy::getNamberParam() { + return 4; +} diff --git a/project_template/Fuzzy.h b/project_template/Fuzzy.h new file mode 100644 index 0000000..51b6bb4 --- /dev/null +++ b/project_template/Fuzzy.h @@ -0,0 +1,49 @@ +#ifndef FUZZY_H +#define FUZZY_H +#include +#include "A.h" +#include "Method.h" +#include "Param.h" + +using namespace std; + +class Fuzzy : public Method { +private: + vector a; // + int countFuzzyParts;// + vector> rulesIn; // + vector rulesOut; // + int countRulesIn; // + double universumMin; + double universumMax; + vector fuzzyTs; // + string trendType; + string seasonType; + double alpha; // + double gamma; // + double delta; // + int p; // + double phi; // + + void defineUniversum(); // + void createRules(); // , + int fuzzyfication(double); // + double defuzzyfication(int); // + vector searchRules(vector); + vector searchRulesForSeason(int); + + +public: + Fuzzy(string, string, vector, int); + ~Fuzzy(void); + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // + int getNamberParam(); +}; + +#endif \ No newline at end of file diff --git a/project_template/FuzzyWithSets.cpp b/project_template/FuzzyWithSets.cpp new file mode 100644 index 0000000..5663b14 --- /dev/null +++ b/project_template/FuzzyWithSets.cpp @@ -0,0 +1,498 @@ +#include "StdAfx.h" +#include "FuzzyWithSets.h" +#include +#include +#include "Param.h" +#include "float.h" + +FuzzyWithSets::FuzzyWithSets(string trendType, string seasonType, vector timeSeries, int countPointForecast){ + this->countPointForecast = countPointForecast; + this->trendType = trendType; + this->seasonType = seasonType; + this->x = timeSeries; + this->partition(); +} + +FuzzyWithSets::~FuzzyWithSets(void) { +} + +void FuzzyWithSets::init() { + S.clear(); + T.clear(); + I.clear(); + if (trendType.compare("None") == 0) { + if (seasonType.compare("None") == 0) { + S.push_back(fuzzyTs[0]); + fuzzyForecast.push_back(fuzzyTs[0]); + } else if (seasonType.compare("Add") == 0) { + A sumS = fuzzyTs[0]; + for (unsigned int t = 1; t < p; t++) { + sumS = plusSet(sumS, fuzzyTs[t]); + } + S.push_back(divSet(sumS, p)); + I.resize(p); + for (unsigned int t = 0; t < p; t++) { + I[t] = minusSet(fuzzyTs[t], S[0]); + } + fuzzyForecast.push_back(plusSet(S[0], I[0])); + } else if (seasonType.compare("Mult") == 0) { + A sumS = fuzzyTs[0]; + for (unsigned int t = 1; t < p; t++) { + sumS = plusSet(sumS, fuzzyTs[t]); + } + S.push_back(divSet(sumS, p)); + I.resize(p); + for (unsigned int t = 0; t < p; t++) { + I[t] = divSet(fuzzyTs[t], S[0]); + } + fuzzyForecast.push_back(multSet(S[0], I[0])); + } + } else if (trendType.compare("Add") == 0) { + if (seasonType.compare("None") == 0) { + T.push_back(minusSet(fuzzyTs[1], fuzzyTs[0])); + S.push_back(fuzzyTs[0]); + fuzzyForecast.push_back(plusSet(S[0], T[0])); + } else if (seasonType.compare("Add") == 0) { + A sumS = fuzzyTs[0]; + A sumT = fuzzyTs[0]; + for (unsigned int t = 1; t < p; t++) { + sumS = plusSet(sumS, fuzzyTs[t]); + sumT = plusSet(sumT, fuzzyTs[t+p]); + } + S.push_back(divSet(sumS, p)); + T.push_back(divSet(minusSet(divSet(sumT, p), S[0]), p)); + I.resize(p); + for (unsigned int t = 0; t < p; t++) { + I[t] = minusSet(fuzzyTs[t], S[0]); + } + fuzzyForecast.push_back(plusSet(S[0], plusSet(T[0], I[0]))); + } else if (seasonType.compare("Mult") == 0) { + A sumS = fuzzyTs[0]; + A sumT = fuzzyTs[0]; + for (unsigned int t = 1; t < p; t++) { + sumS = plusSet(sumS, fuzzyTs[t]); + sumT = plusSet(sumT, fuzzyTs[t+p]); + } + S.push_back(divSet(sumS, p)); + T.push_back(divSet(minusSet(divSet(sumT, p), S[0]), p)); + I.resize(p); + for (unsigned int t = 0; t < p; t++) { + I[t] = divSet(fuzzyTs[t], S[0]); + } + fuzzyForecast.push_back(multSet(plusSet(S[0],T[0]), I[0])); + } + } else if (trendType.compare("Mult") == 0) { + if (seasonType.compare("None") == 0) { + T.push_back(divSet(fuzzyTs[1], fuzzyTs[0])); + S.push_back(fuzzyTs[0]); + fuzzyForecast.push_back(multSet(S[0], T[0])); + } else if (seasonType.compare("Add") == 0) { + A sumS = fuzzyTs[0]; + A sumT = fuzzyTs[0]; + for (unsigned int t = 1; t < p; t++) { + sumS = plusSet(sumS, fuzzyTs[t]); + sumT = plusSet(sumT, fuzzyTs[t+p]); + } + S.push_back(divSet(sumS, p)); + T.push_back(divSet(minusSet(divSet(sumT, p), S[0]), p)); + I.resize(p); + for (unsigned int t = 0; t < p; t++) { + I[t] = minusSet(fuzzyTs[t], S[0]); + } + fuzzyForecast.push_back(plusSet(multSet(S[0], T[0]), I[0])); + } else if (seasonType.compare("Mult") == 0) { + A sumS = fuzzyTs[0]; + A sumT = fuzzyTs[0]; + for (unsigned int t = 1; t < p; t++) { + sumS = plusSet(sumS, fuzzyTs[t]); + sumT = plusSet(sumT, fuzzyTs[t+p]); + } + S.push_back(divSet(sumS, p)); + T.push_back(divSet(minusSet(divSet(sumT, p), S[0]), p)); + I.resize(p); + for (unsigned int t = 0; t < p; t++) { + I[t] = divSet(fuzzyTs[t], S[0]); + } + fuzzyForecast.push_back(multSet(multSet(S[0],T[0]), I[0])); + } + } +} + +// +// , , +void FuzzyWithSets::defineUniversum() { + this->universumMax = x[0]; + this->universumMin = x[0]; + for (int i = 1; i < x.size(); i++) { + if (universumMax < x[i]) { + universumMax = x[i]; + } + if (universumMin > x[i]) { + universumMin = x[i]; + } + } + + // + // , + // , + double baseSize = (universumMax - universumMin) / countFuzzyParts; + a.resize(countFuzzyParts + 1); + for (int i=0; i < countFuzzyParts + 1;i++){ + a[i] = (A(universumMin + (i-1) * baseSize, universumMin + (i+1)*baseSize)); + } +} + +// +A FuzzyWithSets::minusSet(A a, A b) { + return A(a.getLeft()- b.getLeft(), a.getRight() - b.getRight()); +} + +// +A FuzzyWithSets::divSet(A a, A b) { + return A(a.getLeft()/b.getLeft(), a.getRight()/b.getRight()); +} + +// +A FuzzyWithSets::divSet(A a, double b) { + return A(a.getLeft()/b, a.getRight()/b); +} + +// +A FuzzyWithSets::multSetNum(double num, A a) { + return A(a.getLeft() * num, a.getRight() * num); +} + +// +A FuzzyWithSets::plusSet(A a, A b) { + return A(a.getLeft()+ b.getLeft(), a.getRight() + b.getRight()); +} + +// +A FuzzyWithSets::multSet(A a, A b) { + return A(a.getLeft()* b.getLeft(), a.getRight() * b.getRight()); +} + + +// +// +A FuzzyWithSets::fuzzyfication(double value) { + // , + // , + A aMax = a[0]; + for (int j = 0; j < a.size(); j++) { + if (a[j].getValue(value) > aMax.getValue(value)) { + aMax = a[j]; + } + } + return aMax; +} + + +void FuzzyWithSets::createModelForEstimation() { + fuzzyTs.clear(); + fuzzyForecast.clear(); + defineUniversum(); + // + fuzzyTs.resize(xLearning.size()); + for (int i = 0; i < xLearning.size();i++) { + fuzzyTs[i] = (fuzzyfication(xLearning[i])); + } + init(); + A fuzzyLast = fuzzyTs[fuzzyTs.size()-1]; + A e; + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1+this->countPointForecast; t++) { + if (trendType.compare("None") == 0) { + // - , + if (t < fuzzyTs.size()) { + e = minusSet(fuzzyTs[t], fuzzyForecast[t]); + } else { + e = A(0,0); + } + if (seasonType.compare("None") == 0) { + S.push_back(plusSet(S[t],multSetNum(alpha, e))); + fuzzyForecast.push_back(S[t+1]); + } else if (seasonType.compare("Add") == 0) { + S.push_back(plusSet(S[t], multSetNum(alpha, e))); + I.push_back(plusSet(I[t], multSetNum(delta, e))); + fuzzyForecast.push_back(plusSet(S[t+1], I[t+1])); + } else if (seasonType.compare("Mult") == 0) { + S.push_back(plusSet(S[t], divSet(multSetNum(alpha, e), I[t]))); + I.push_back(plusSet(I[t], divSet(multSetNum(delta, e), S[t]))); + fuzzyForecast.push_back(multSet(S[t+1], I[t+1])); + } + } else if (trendType.compare("Add") == 0) { + // - , + if (t < fuzzyTs.size()) { + e = minusSet(fuzzyTs[t], fuzzyForecast[t]); + } else { + e = A(0,0); + } + if (seasonType.compare("None") == 0) { + S.push_back(plusSet(S[t], plusSet(T[t], multSetNum(alpha, e)))); + T.push_back(plusSet(T[t], multSetNum(alpha, multSetNum(gamma, e)))); + fuzzyForecast.push_back(plusSet(S[t+1], T[t+1])); + } else if (seasonType.compare("Add") == 0) { + S.push_back(plusSet(S[t], plusSet(T[t], multSetNum(alpha, e)))); + T.push_back(plusSet(T[t], multSetNum(alpha, multSetNum(gamma, e)))); + I.push_back(plusSet(I[t], multSetNum(delta, e))); + fuzzyForecast.push_back(plusSet(S[t+1], plusSet(T[t+1], I[t+1]))); + } else if (seasonType.compare("Mult") == 0) { + S.push_back(plusSet(S[t], plusSet(T[t], divSet(multSetNum(alpha, e), I[t])))); + T.push_back(plusSet(T[t], divSet(multSetNum(alpha, multSetNum(gamma, e)), I[t]))); + I.push_back(plusSet(I[t], divSet(multSetNum(delta, e), plusSet(S[t], T[t])))); + fuzzyForecast.push_back(multSet(plusSet(S[t+1], T[t+1]), I[t+1])); + } + } else if (trendType.compare("Mult") == 0) { + // - , + if (t < fuzzyTs.size()) { + e = minusSet(fuzzyTs[t], fuzzyForecast[t]); + } else { + e = A(0,0); + } + if (seasonType.compare("None") == 0) { + S.push_back(plusSet(multSet(S[t], T[t]), multSetNum(alpha, e))); + T.push_back(plusSet(T[t], divSet(multSetNum(alpha, multSetNum(gamma, e)), S[t]))); + fuzzyForecast.push_back(multSet(S[t+1], T[t+1])); + } else if (seasonType.compare("Add") == 0) { + S.push_back(plusSet(multSet(S[t], T[t]), multSetNum(alpha, e))); + T.push_back(plusSet(T[t], divSet(multSetNum(alpha, multSetNum(gamma, e)), S[t]))); + I.push_back(plusSet(I[t], multSetNum(delta, e))); + fuzzyForecast.push_back(plusSet(multSet(S[t+1], T[t+1]), I[t])); + } else if (seasonType.compare("Mult") == 0) { + S.push_back(plusSet(multSet(S[t], T[t]), divSet(multSetNum(alpha, e), I[t]))); + T.push_back(plusSet( + T[t], + divSet( + divSet( + multSetNum( + alpha, + multSetNum( + gamma, + e) + ), + S[t]), + I[t]) + ) + ); + I.push_back(plusSet(I[t], divSet(divSet(multSetNum(delta, e), S[t]), T[t]))); + fuzzyForecast.push_back(multSet(S[t+1], multSet(T[t+1], I[t]))); + } + } + } + + +} + +// , +void FuzzyWithSets::createModel() { + fuzzyTs.clear(); + fuzzyForecast.clear(); + defineUniversum(); + // + fuzzyTs.resize(x.size()); + for (int i = 0; i < x.size();i++) { + fuzzyTs[i] = (fuzzyfication(x[i])); + } + init(); + A fuzzyLast = fuzzyTs[fuzzyTs.size()-1]; + A e; + // countPointForecast + for (unsigned int t = 0; t < x.size()-1+this->countPointForecast; t++) { + if (trendType.compare("None") == 0) { + // - , + if (t < fuzzyTs.size()) { + e = minusSet(fuzzyTs[t], fuzzyForecast[t]); + } else { + e = A(0,0); + } + if (seasonType.compare("None") == 0) { + S.push_back(plusSet(S[t],multSetNum(alpha, e))); + fuzzyForecast.push_back(S[t+1]); + } else if (seasonType.compare("Add") == 0) { + S.push_back(plusSet(S[t], multSetNum(alpha, e))); + I.push_back(plusSet(I[t], multSetNum(delta, e))); + fuzzyForecast.push_back(plusSet(S[t+1], I[t+1])); + } else if (seasonType.compare("Mult") == 0) { + S.push_back(plusSet(S[t], divSet(multSetNum(alpha, e), I[t]))); + I.push_back(plusSet(I[t], divSet(multSetNum(delta, e), S[t]))); + fuzzyForecast.push_back(multSet(S[t+1], I[t+1])); + } + } else if (trendType.compare("Add") == 0) { + // - , + if (t < fuzzyTs.size()) { + e = minusSet(fuzzyTs[t], fuzzyForecast[t]); + } else { + e = A(0,0); + } + if (seasonType.compare("None") == 0) { + S.push_back(plusSet(S[t], plusSet(T[t], multSetNum(alpha, e)))); + T.push_back(plusSet(T[t], multSetNum(alpha, multSetNum(gamma, e)))); + fuzzyForecast.push_back(plusSet(S[t+1], T[t+1])); + } else if (seasonType.compare("Add") == 0) { + S.push_back(plusSet(S[t], plusSet(T[t], multSetNum(alpha, e)))); + T.push_back(plusSet(T[t], multSetNum(alpha, multSetNum(gamma, e)))); + I.push_back(plusSet(I[t], multSetNum(delta, e))); + fuzzyForecast.push_back(plusSet(S[t+1], plusSet(T[t+1], I[t+1]))); + } else if (seasonType.compare("Mult") == 0) { + S.push_back(plusSet(S[t], plusSet(T[t], divSet(multSetNum(alpha, e), I[t])))); + T.push_back(plusSet(T[t], divSet(multSetNum(alpha, multSetNum(gamma, e)), I[t]))); + I.push_back(plusSet(I[t], divSet(multSetNum(delta, e), plusSet(S[t], T[t])))); + fuzzyForecast.push_back(multSet(plusSet(S[t+1], T[t+1]), I[t+1])); + } + } else if (trendType.compare("Mult") == 0) { + // - , + if (t < fuzzyTs.size()) { + e = minusSet(fuzzyTs[t], fuzzyForecast[t]); + } else { + e = A(0,0); + } + if (seasonType.compare("None") == 0) { + S.push_back(plusSet(multSet(S[t], T[t]), multSetNum(alpha, e))); + T.push_back(plusSet(T[t], divSet(multSetNum(alpha, multSetNum(gamma, e)), S[t]))); + fuzzyForecast.push_back(multSet(S[t+1], T[t+1])); + } else if (seasonType.compare("Add") == 0) { + S.push_back(plusSet(multSet(S[t], T[t]), multSetNum(alpha, e))); + T.push_back(plusSet(T[t], divSet(multSetNum(alpha, multSetNum(gamma, e)), S[t]))); + I.push_back(plusSet(I[t], multSetNum(delta, e))); + fuzzyForecast.push_back(plusSet(multSet(S[t+1], T[t+1]), I[t])); + } else if (seasonType.compare("Mult") == 0) { + S.push_back(plusSet(multSet(S[t], T[t]), divSet(multSetNum(alpha, e), I[t]))); + T.push_back(plusSet( + T[t], + divSet( + divSet( + multSetNum( + alpha, + multSetNum( + gamma, + e) + ), + S[t]), + I[t]) + ) + ); + I.push_back(plusSet(I[t], divSet(divSet(multSetNum(delta, e), S[t]), T[t]))); + fuzzyForecast.push_back(multSet(S[t+1], multSet(T[t+1], I[t]))); + } + } + } + +} + +vector FuzzyWithSets::getForecast() { + vector result; +/* result.resize(countPointForecast); + for (unsigned int i = fuzzyForecast.size() - countPointForecast; i < fuzzyForecast.size(); i++) { + if (_finite(defuzzyfication(fuzzyForecast[i])) == 0){ + result[i - (fuzzyForecast.size() - countPointForecast)] = (x[x.size()-1]); + } else { + result[i - (fuzzyForecast.size() - countPointForecast)] = (defuzzyfication(fuzzyForecast[i])); + } + }*/ + result.resize(fuzzyForecast.size()); + for (unsigned int i = 0; i < fuzzyForecast.size(); i++) { + if (_finite(defuzzyfication(fuzzyForecast[i])) == 0){ + result[i] = (x[x.size()-1]); + } else { + result[i] = (defuzzyfication(fuzzyForecast[i])); + } + } + return result; +} + +double FuzzyWithSets::defuzzyfication(A a) { + return a.getValueAtTop(); +} + +vector FuzzyWithSets::defuzzyfication(vector fuz) { + vector result; + for (int i =0; i < fuz.size(); i++) { + result.push_back(defuzzyfication(fuz[i])); + } + return result; +} + +void FuzzyWithSets::setParam(string paramName, double value) { + if (paramName.compare("countFuzzyParts") == 0) { + this->countFuzzyParts = value; + } else if (paramName.compare("p") == 0) { + this->p = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } else if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("delta") == 0) { + this->delta = value; + } + + if (paramName.compare("0") == 0) { + this->countFuzzyParts = value * 100; + } else if (paramName.compare("1") == 0) { + this->alpha = value; + } else if (paramName.compare("2") == 0) { + this->gamma = value; + } else if (paramName.compare("3") == 0) { + this->delta = value; + } else if (paramName.compare("4") == 0) { + this->alpha = value; + } + +} + + +// +double FuzzyWithSets::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} + + + +// +// TODO: +Param* FuzzyWithSets::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0; al < 1; al+= 0.1) { + cout << "FWS " << al << " 1" <<"\n"; + for (double gam = 0; gam < 1; gam+= 0.1) { + for (double del = 0; del < 1;del+= 0.1) { + for (double cfp = 2; cfp < 50;cfp+= 2) { + this->setParam("alpha", al); + this->setParam("gamma", gam); + this->setParam("delta", del); + this->setParam("countFuzzyParts", cfp); + double smapeValue = 0; + int maxShift = 5; + if (maxShift > this->countPointForecast) { + maxShift = this->countPointForecast-1; + } + this->countPointForecast -= maxShift; + for (int shift=0; shift <= maxShift; shift++) { + this->partition(); + this->createModelForEstimation(); + smapeValue += est->getValue(x, getForecast()); + this->countPointForecast++; + } + this->countPointForecast--; + smapeValue = smapeValue / maxShift; + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->gamma = gam; + optimal->delta = del; + optimal->estim = smapeValue; + optimal->countFuzzyParts = cfp; + } + } + } + } + } + return optimal; +} + + + +int FuzzyWithSets::getNamberParam() { + return 5; +} diff --git a/project_template/FuzzyWithSets.h b/project_template/FuzzyWithSets.h new file mode 100644 index 0000000..091d7da --- /dev/null +++ b/project_template/FuzzyWithSets.h @@ -0,0 +1,55 @@ +#ifndef FUZZYWITHSETS_H +#define FUZZYWITHSETS_H +#include +#include "A.h" +#include "Method.h" +#include "Param.h" + +using namespace std; + +class FuzzyWithSets : public Method { +private: + vector a; // + int countFuzzyParts;// + double universumMin; + double universumMax; + vector fuzzyTs; // + vector fuzzyForecast; // + double w; + string trendType; + string seasonType; + double alpha; // + double gamma; // + double delta; // + int p; // + double phi; // + vector S; // + vector I; + vector T; + + void defineUniversum(); // + A fuzzyfication(double); + double defuzzyfication(A); + vector defuzzyfication(vector); + A minusSet(A, A); + A plusSet(A, A); + A divSet(A, A); + A divSet(A, double); + A multSet(A, A); + A multSetNum(double, A); + + +public: + FuzzyWithSets(string, string, vector, int); + ~FuzzyWithSets(void); + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // + int getNamberParam(); +}; + +#endif \ No newline at end of file diff --git a/project_template/Mape.cpp b/project_template/Mape.cpp new file mode 100644 index 0000000..2d59298 --- /dev/null +++ b/project_template/Mape.cpp @@ -0,0 +1,29 @@ +#include "StdAfx.h" +#include +#include "Mape.h" +#include "Math.h" +#include + +// Mape. Estimation. +// " " + +Mape::Mape() { +} + +Mape::~Mape() { +} + +double Mape::getValue(int paramCount, vector original, vector model) { +return 0; +} +double Mape::getValue(vector original, vector model) { + double mape = 0; + for (int i = 0; i < original.size(); i++) { + mape += fabs((fabs(original[i] - model[i])) / original[i]); + } + mape = mape / original.size(); + mape = mape * 100; + + return mape; + +} diff --git a/project_template/Mape.h b/project_template/Mape.h new file mode 100644 index 0000000..abfbd2d --- /dev/null +++ b/project_template/Mape.h @@ -0,0 +1,15 @@ +#ifndef MAPE_H +#define MAPE_H +#include "Estimation.h" + +using namespace std; +// Mape. Estimation. +// " " +class Mape : public Estimation { +public: + Mape(); + ~Mape(); + double getValue(vector, vector); + double getValue(int, vector, vector); +}; +#endif \ No newline at end of file diff --git a/project_template/Median.cpp b/project_template/Median.cpp new file mode 100644 index 0000000..7262c44 --- /dev/null +++ b/project_template/Median.cpp @@ -0,0 +1,65 @@ +#include "StdAfx.h" +#include "Median.h" +#include +#include + + +Median::Median() { + +} + +Median::~Median() { + +} + +double Median::getValue(int, vector, vector){ + return 0; +} +double Median::getValue(vector, vector){ + return 0; +} + + +double Median::getValue(vector ts){ + + double median; + + int size = ts.size(); + sort(ts.begin(), ts.end()); + + if (size % 2 == 0) + { + median = (ts[size % 2 - 1] + ts[size % 2 + 1]) / 2; + } + else + { + median = ts[size / 2]; + } + + return median; +} + +vector Median::getValue(vector ts, int period){ + + int size = ts.size(); + int amtPeriod = size / period; + int shift = size - amtPeriod * period; + + vector median(amtPeriod); + vector temp(period); + + int begin; + int end; + + for (int i = 0; i < amtPeriod; i++) + { + begin = shift + i * period; + end = begin + period; + + std::copy(ts.begin() + begin, ts.begin() + end, temp.begin()); + + median[i] = getValue(temp); + } + + return median; +} \ No newline at end of file diff --git a/project_template/Median.h b/project_template/Median.h new file mode 100644 index 0000000..ad819c4 --- /dev/null +++ b/project_template/Median.h @@ -0,0 +1,20 @@ +#ifndef MEDIAN_H +#define MEDIAN_H +#include "Estimation.h" + +using namespace std; +// +// +class Median :public Estimation { +public: + + Median(); + ~Median(); + + double getValue(vector); + vector getValue(vector, int); + + double getValue(int, vector, vector); + double getValue(vector, vector); +}; +#endif \ No newline at end of file diff --git a/project_template/Method.cpp b/project_template/Method.cpp new file mode 100644 index 0000000..632f925 --- /dev/null +++ b/project_template/Method.cpp @@ -0,0 +1,34 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include "Method.h" + +void Method::partition() { + this->xLearning.clear(); + this->xEstimation.clear(); + this->xLearning.resize(x.size() - countPointForecast); + this->xEstimation.resize(this->countPointForecast); + // + for (unsigned int i = 0; i < x.size() - countPointForecast; i++) { + this->xLearning[i] = x[i]; + } + // + int j=0; + for (unsigned int i = x.size() - countPointForecast; i < x.size(); i++) { + this->xEstimation[j++] = x[i]; + } +} + +vector Method::getXEstimation() { + return this->xEstimation; +} + +int Method::getNamberParam() { + return 1; +} + + + diff --git a/project_template/Method.h b/project_template/Method.h new file mode 100644 index 0000000..87a5911 --- /dev/null +++ b/project_template/Method.h @@ -0,0 +1,45 @@ +#ifndef METHOD_H +#define METHOD_H +#include "Estimation.h" +#include "Aic.h" +#include "Param.h" +#include + +using namespace std; + +// +// , , , +// +class Method { +public: + vector x; // + vector xLearning; // + vector xEstimation; // + vector forecast; // + unsigned int countPointForecast; // c + + //Vovks variables + char * name; + //double weight = 0.5; //start weight + //double value = 0; //last forecast + double weight; //start weight + double value; //last forecast + + //bool isActive = true; + bool isActive; + //Vovks variables + virtual vector getForecast() = 0; // + virtual void createModel() = 0; // + virtual void createModelForEstimation() = 0; // + virtual double calcEstimation(Aic *) = 0; // + //virtual double calcEstimation(Aicc *) = 0; // + //virtual double calcEstimation(Bic *) = 0; // + virtual Param* optimize(Estimation *) = 0; // + virtual void setParam(string, double) = 0; // + void partition(); // + void partition(int); // + vector getXLearning(); // + vector getXEstimation(); // + virtual int getNamberParam(); +}; +#endif \ No newline at end of file diff --git a/project_template/MultTrendAddSeasonality.cpp b/project_template/MultTrendAddSeasonality.cpp new file mode 100644 index 0000000..7f08ac7 --- /dev/null +++ b/project_template/MultTrendAddSeasonality.cpp @@ -0,0 +1,146 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include "MultTrendAddSeasonality.h" +#include +#include "Param.h" + +// +MultTrendAddSeasonality::MultTrendAddSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +MultTrendAddSeasonality::~MultTrendAddSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(x); + std::vector ().swap(T); + std::vector ().swap(I); + std::vector ().swap(forecast); +} + +// , +void MultTrendAddSeasonality::init() { + S.clear(); + T.clear(); + I.clear(); + forecast.clear(); + + double sumS = 0; + double sumT = 0; + for (unsigned int t = 0; t < p; t++) { + sumS += x[t]; + sumT += x[t+p]; + } + + S.push_back(sumS / p); + T.push_back((sumT/ p - S[0]) / p); + + for (unsigned int t = 0; t < p; t++) { + I.push_back(x[t] - S[0]); + } + + forecast.push_back(S[0] * T[0] + I[0]); +} + +// +void MultTrendAddSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } else if (paramName.compare("p") == 0) { + this->p = value; + } else if (paramName.compare("delta") == 0) { + this->delta = value; + } +} + +// +void MultTrendAddSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 + this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] * T[t] + alpha * e); // + T.push_back(T[t] + alpha * gamma * e / S[t]); // + I.push_back(I[t] + delta * e); // + forecast.push_back(S[t+1] * T[t+1] + I[t+1]); // + } +} + +// +void MultTrendAddSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] * T[t] + alpha * e); // + T.push_back(T[t] + alpha * gamma * e / S[t]); // + I.push_back(I[t] + delta * e); // + forecast.push_back(S[t+1] * T[t+1] + I[t+1]); // + } +} + +// +vector MultTrendAddSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + + +// +double MultTrendAddSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} + + +// +// TODO: +Param* MultTrendAddSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.01) { + for (double gam = 0.1; gam < 1; gam+= 0.01) { + for (double del = 0.1; del < 1;del+= 0.01) { + this->setParam("alpha", al); + this->setParam("gamma", gam); + this->setParam("delta", del); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->gamma = gam; + optimal->delta = del; + } + } + } + } + return optimal; +} + diff --git a/project_template/MultTrendAddSeasonality.h b/project_template/MultTrendAddSeasonality.h new file mode 100644 index 0000000..4713a11 --- /dev/null +++ b/project_template/MultTrendAddSeasonality.h @@ -0,0 +1,34 @@ +#ifndef MULTTRENDADDSEASONALITY_H +#define MULTTRENDADDSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + +using namespace std; + +// Method +// , +class MultTrendAddSeasonality : public Method { +public: + double alpha; // + vector S; // + double gamma; // + double delta; // + vector T; // + vector I; // + int p; // + + MultTrendAddSeasonality(vector, int); + ~MultTrendAddSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // +}; + +#endif \ No newline at end of file diff --git a/project_template/MultTrendMultSeasonality.cpp b/project_template/MultTrendMultSeasonality.cpp new file mode 100644 index 0000000..1d5a168 --- /dev/null +++ b/project_template/MultTrendMultSeasonality.cpp @@ -0,0 +1,145 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include "MultTrendMultSeasonality.h" +#include +#include "Param.h" + +// +MultTrendMultSeasonality::MultTrendMultSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +MultTrendMultSeasonality::~MultTrendMultSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(x); + std::vector ().swap(T); + std::vector ().swap(I); + std::vector ().swap(forecast); +} + +// , +void MultTrendMultSeasonality::init() { + S.clear(); + T.clear(); + I.clear(); + forecast.clear(); + + double sumS = 0; + double sumT = 0; + for (unsigned int t = 0; t < p; t++) { + sumS += x[t]; + sumT += x[t+p]; + } + + S.push_back(sumS / p); + T.push_back((sumT/ p - S[0]) / p); + + for (unsigned int t = 0; t < p; t++) { + I.push_back(x[t] / S[0]); + } + + forecast.push_back(S[0] * T[0] * I[0]); +} + +// +void MultTrendMultSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } else if (paramName.compare("p") == 0) { + this->p = value; + } else if (paramName.compare("delta") == 0) { + this->delta = value; + } +} + +// +void MultTrendMultSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 + this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] * T[t] + alpha * e / I[t]); // + T.push_back(T[t] + alpha * gamma * e / (I[t] * S[t])); // + I.push_back(I[t] + delta * e / (S[t] * T[t])); // + forecast.push_back(S[t+1] * T[t+1] * I[t+1]); // + } +} + +// . +void MultTrendMultSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] * T[t] + alpha * e / I[t]); // + T.push_back(T[t] + alpha * gamma * e / (I[t] * S[t])); // + I.push_back(I[t] + delta * e / (S[t] * T[t])); // + forecast.push_back(S[t+1] * T[t+1] * I[t+1]); // + } +} + + +// +vector MultTrendMultSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +// +double MultTrendMultSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} + +// +// TODO: +Param* MultTrendMultSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.01) { + for (double gam = 0.1; gam < 1; gam+= 0.01) { + for (double del = 0.1; del < 1;del+= 0.01) { + this->setParam("alpha", al); + this->setParam("gamma", gam); + this->setParam("delta", del); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->gamma = gam; + optimal->delta = del; + } + } + } + } + return optimal; +} + diff --git a/project_template/MultTrendMultSeasonality.h b/project_template/MultTrendMultSeasonality.h new file mode 100644 index 0000000..acd4a83 --- /dev/null +++ b/project_template/MultTrendMultSeasonality.h @@ -0,0 +1,34 @@ +#ifndef MULTTRENDMULTSEASONALITY_H +#define MULTTRENDMULTSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + +using namespace std; + +// Method +// , +class MultTrendMultSeasonality : public Method { +public: + double alpha; // + vector S; // + double gamma; // + vector T; // + double delta; // + vector I; // + int p; // + + MultTrendMultSeasonality(vector, int); + ~MultTrendMultSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // +}; + +#endif \ No newline at end of file diff --git a/project_template/MultTrendNoSeasonality.cpp b/project_template/MultTrendNoSeasonality.cpp new file mode 100644 index 0000000..2c320d8 --- /dev/null +++ b/project_template/MultTrendNoSeasonality.cpp @@ -0,0 +1,121 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include "MultTrendNoSeasonality.h" +#include "Param.h" + + +// +MultTrendNoSeasonality::MultTrendNoSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +MultTrendNoSeasonality::~MultTrendNoSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(x); + std::vector ().swap(T); + std::vector ().swap(forecast); +} + +// , +void MultTrendNoSeasonality::init() { + S.clear(); + T.clear(); + forecast.clear(); + + T.push_back(x[1] / x[0]); + S.push_back(x[0]); + forecast.push_back(S[0] * T[0]); +} + +// +void MultTrendNoSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("gamma") == 0) { + this->gamma = value; + } +} + +// +void MultTrendNoSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 + this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] * T[t] + alpha * e); // + T.push_back(T[t] + alpha * gamma * e / S[t]); // + forecast.push_back(S[t+1] * T[t+1]); // + + } +} + +// +void MultTrendNoSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] * T[t] + alpha * e); // + T.push_back(T[t] + alpha * gamma * e / S[t]); // + forecast.push_back(S[t+1] * T[t+1]); // + } +} + +// +vector MultTrendNoSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +// +double MultTrendNoSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(2, this->xEstimation, this->forecast); +} + +// +// TODO: +Param* MultTrendNoSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.01) { + for (double gam = 0.1; gam < 1; gam+= 0.01) { + this->setParam("alpha", al); + this->setParam("gamma", gam); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->gamma = gam; + } + } + } + return optimal; +} + diff --git a/project_template/MultTrendNoSeasonality.h b/project_template/MultTrendNoSeasonality.h new file mode 100644 index 0000000..8c90d3c --- /dev/null +++ b/project_template/MultTrendNoSeasonality.h @@ -0,0 +1,32 @@ +#ifndef MULTTRENDNOSEASONALITY_H +#define MULTTRENDNOSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + + +using namespace std; + +// Method +// , +class MultTrendNoSeasonality : public Method { +public: + double alpha; // + vector S; // + double gamma; // + vector T; // + + MultTrendNoSeasonality(vector, int); + ~MultTrendNoSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // +}; + +#endif \ No newline at end of file diff --git a/project_template/NoTrendAddSeasonality.cpp b/project_template/NoTrendAddSeasonality.cpp new file mode 100644 index 0000000..9d8908b --- /dev/null +++ b/project_template/NoTrendAddSeasonality.cpp @@ -0,0 +1,130 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include "NoTrendAddSeasonality.h" + +// +NoTrendAddSeasonality::NoTrendAddSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +NoTrendAddSeasonality::~NoTrendAddSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(I); + std::vector ().swap(x); + std::vector ().swap(forecast); +} + +// , +void NoTrendAddSeasonality::init() { + S.clear(); + I.clear(); + forecast.clear(); + + double sumS = 0; + for (unsigned int t = 0; t < p; t++) { + sumS += x[t]; + } + + S.push_back(sumS / p); + + for (unsigned int t = 0; t < p; t++) { + I.push_back(x[t] - S[0]); + } + + forecast.push_back(S[0] + I[0]); +} + +// +void NoTrendAddSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 + this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + alpha * e); // + I.push_back(I[t] + delta * e); // + forecast.push_back(S[t+1] + I[t+1]); // + } +} + +// +void NoTrendAddSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + alpha * e); // + I.push_back(I[t] + delta * e); // + forecast.push_back(S[t+1] + I[t+1]); // + } +} + +// +void NoTrendAddSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("delta") == 0) { + this->delta = value; + } else if (paramName.compare("p") == 0) { + this->p = (int) value; + } +} + +// +vector NoTrendAddSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +// +double NoTrendAddSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(2, this->xEstimation, this->forecast); +} + + +// +// TODO: +Param* NoTrendAddSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.01) { + for (double del = 0.1; del < 1; del+= 0.01) { + this->setParam("alpha", al); + this->setParam("delta", del); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->delta = del; + } + } + } + return optimal; +} + diff --git a/project_template/NoTrendAddSeasonality.h b/project_template/NoTrendAddSeasonality.h new file mode 100644 index 0000000..5725da6 --- /dev/null +++ b/project_template/NoTrendAddSeasonality.h @@ -0,0 +1,33 @@ +#ifndef NOTRENDADDSEASONALITY_H +#define NOTRENDADDSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + +using namespace std; + +// Method +// c +class NoTrendAddSeasonality : public Method { +public: + double alpha; // + vector S; // + double delta; // + vector I; // + int p; // + + NoTrendAddSeasonality(vector, int); + ~NoTrendAddSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // + vector getS(); +}; + +#endif \ No newline at end of file diff --git a/project_template/NoTrendMultSeasonality.cpp b/project_template/NoTrendMultSeasonality.cpp new file mode 100644 index 0000000..6d1bfb8 --- /dev/null +++ b/project_template/NoTrendMultSeasonality.cpp @@ -0,0 +1,132 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include "NoTrendMultSeasonality.h" + +// +NoTrendMultSeasonality::NoTrendMultSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +NoTrendMultSeasonality::~NoTrendMultSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(I); + std::vector ().swap(x); + std::vector ().swap(forecast); +} + +// +void NoTrendMultSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("delta") == 0) { + this->delta = value; + } else if (paramName.compare("p") == 0) { + this->p = (int) value; + } +} + + +// , +void NoTrendMultSeasonality::init() { + S.clear(); + I.clear(); + forecast.clear(); + + double sumS = 0; + for (int t = 0; t < p; t++) { + sumS += x[t]; + } + + S.push_back(sumS / p); + + for (unsigned int t = 0; t < p; t++) { + I.push_back(x[t] / S[0]); + } + + forecast.push_back(S[0] * I[0]); +} + +// +void NoTrendMultSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 + this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + alpha * e / I[t]); // + I.push_back(I[t] + delta * e / S[t]); // + forecast.push_back(S[t+1] * I[t+1]); // + } +} + +// +void NoTrendMultSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t] + alpha * e / I[t]); // + I.push_back(I[t] + delta * e / S[t]); // + forecast.push_back(S[t+1] * I[t+1]); // + } +} + + +// +vector NoTrendMultSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + + +// +double NoTrendMultSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->getForecast()); +} + +// +// TODO: +Param* NoTrendMultSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.01) { + for (double del = 0.1; del < 1; del+= 0.01) { + this->setParam("alpha", al); + this->setParam("delta", del); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + optimal->delta = del; + } + } + } + return optimal; +} + diff --git a/project_template/NoTrendMultSeasonality.h b/project_template/NoTrendMultSeasonality.h new file mode 100644 index 0000000..839132b --- /dev/null +++ b/project_template/NoTrendMultSeasonality.h @@ -0,0 +1,30 @@ +#ifndef NOTRENDMULTSEASONALITY_H +#define NOTRENDMULTSEASONALITY_H + +#include "Method.h" + +using namespace std; + +// Method +// c +class NoTrendMultSeasonality : public Method { +public: + double alpha; // + vector S; // + double delta; // + vector I; // + int p; // + + NoTrendMultSeasonality(vector, int); + ~NoTrendMultSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); // + Param* optimize(Estimation *); // +}; + +#endif \ No newline at end of file diff --git a/project_template/NoTrendNoSeasonality.cpp b/project_template/NoTrendNoSeasonality.cpp new file mode 100644 index 0000000..e4e606f --- /dev/null +++ b/project_template/NoTrendNoSeasonality.cpp @@ -0,0 +1,109 @@ +// +// : +// , +// +#include "StdAfx.h" +#include +#include "NoTrendNoSeasonality.h" +#include "Aic.h" +#include "Param.h" + +// +NoTrendNoSeasonality::NoTrendNoSeasonality(vector timeSeries, int countPointForecast) { + this->x = timeSeries; + this->countPointForecast = countPointForecast; + this->partition(); +} + +NoTrendNoSeasonality::~NoTrendNoSeasonality() { + // + std::vector ().swap(S); + std::vector ().swap(x); + std::vector ().swap(forecast); +} + +// , +void NoTrendNoSeasonality::init() { + S.clear(); + forecast.clear(); + + S.push_back(x[0]); + forecast.push_back(S[0]); +} + +// +void NoTrendNoSeasonality::setParam(string paramName, double value) { + if (paramName.compare("alpha") == 0) { + this->alpha = value; + } +} + +// +void NoTrendNoSeasonality::createModel() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < x.size()-1 + this->countPointForecast; t++) { + // - , + if (t < x.size()) { + e = x[t]-forecast[t]; + } else { + e = 0; + } + S.push_back(S[t]+alpha * e); // + forecast.push_back(S[t+1]); // + } + +} + +// +void NoTrendNoSeasonality::createModelForEstimation() { + this->init(); // + double e = 0; + + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1 + this->countPointForecast; t++) { + // - , + if (t < xLearning.size()) { + e = xLearning[t]-forecast[t]; + } else { + e = 0; + } + + S.push_back(S[t]+alpha * e); // + forecast.push_back(S[t+1]); // + } +} + +// +vector NoTrendNoSeasonality::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +// +double NoTrendNoSeasonality::calcEstimation(Aic *aic) { + return aic->getValue(2, this->xEstimation, this->getForecast()); +} + +// +// TODO: +Param* NoTrendNoSeasonality::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double al = 0.1; al < 1; al+= 0.01) { + this->setParam("alpha", al); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->alpha = al; + } + } + return optimal; +} + diff --git a/project_template/NoTrendNoSeasonality.h b/project_template/NoTrendNoSeasonality.h new file mode 100644 index 0000000..675de06 --- /dev/null +++ b/project_template/NoTrendNoSeasonality.h @@ -0,0 +1,30 @@ +#ifndef NOTRENDNOSEASONALITY_H +#define NOTRENDNOSEASONALITY_H + +#include "Method.h" +#include "Aic.h" +#include "Param.h" + +using namespace std; + +// Method +// +class NoTrendNoSeasonality : public Method { +public: + double alpha; // + vector S; // + + NoTrendNoSeasonality(vector, int); + ~NoTrendNoSeasonality(); + + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); // + double calcEstimation(Aic *); // + Param* optimize(Estimation *); // + vector getS(); +}; + +#endif \ No newline at end of file diff --git a/project_template/Normalization.cpp b/project_template/Normalization.cpp new file mode 100644 index 0000000..fefa24f --- /dev/null +++ b/project_template/Normalization.cpp @@ -0,0 +1,57 @@ +#include "stdafx.h" +#include "Math.h" +#include "Normalization.h" +using namespace std; + +Normalization::Normalization() { + timeSeriaShiftValue = 0; + maxTimePoint = 0; +} + +vector Normalization::shift(vector timeSeria){ + double timeSeriaShiftValue = timeSeria[0]; + for (int i = 0; i < timeSeria.size(); i++){ + if (timeSeria[i] < timeSeriaShiftValue && timeSeria[i] < 0){ + timeSeriaShiftValue = timeSeria[i]; + } + } + + if (timeSeriaShiftValue < 0){ + timeSeriaShiftValue = fabs(timeSeriaShiftValue); + for (int i = 0; i < timeSeria.size(); i++){ + timeSeria[i] += timeSeriaShiftValue; + } + } + return timeSeria; +} +vector Normalization::deShift(vector timeSeria){ + + if (timeSeriaShiftValue < 0){ + for (int i = 0; i < timeSeria.size(); i++){ + timeSeria[i] -= timeSeriaShiftValue; + } + } + return timeSeria; +} + +vector Normalization::normalizeTimeSeria(vector timeSeria){ + shift(timeSeria); + maxTimePoint = timeSeria[0]; + for (int i = 0; i < timeSeria.size(); i++){ + if (timeSeria[i] > maxTimePoint) + maxTimePoint = timeSeria[i]; + } + for (int i = 0; i < timeSeria.size(); i++){ + timeSeria[i] /= maxTimePoint; + } + return timeSeria; +} + +vector Normalization::deNormalizeTimeSeria(vector timeSeria){ + //shift(timeSeria); + for (int i = 0; i < timeSeria.size(); i++){ + timeSeria[i] *= maxTimePoint; + } + timeSeria = deShift(timeSeria); + return timeSeria; +} \ No newline at end of file diff --git a/project_template/Normalization.h b/project_template/Normalization.h new file mode 100644 index 0000000..46f4f5b --- /dev/null +++ b/project_template/Normalization.h @@ -0,0 +1,17 @@ +#ifndef NORMALIZATION_H +#define NORMALIZATION_H +#include "Method.h" +#include +using namespace std; +class Normalization { +private: + double timeSeriaShiftValue; + double maxTimePoint; + vector shift(vector); // + vector deShift(vector); // +public: + Normalization(); + vector normalizeTimeSeria(vector); // + vector deNormalizeTimeSeria(vector); // +}; +#endif \ No newline at end of file diff --git a/project_template/Param.cpp b/project_template/Param.cpp new file mode 100644 index 0000000..1f04f05 --- /dev/null +++ b/project_template/Param.cpp @@ -0,0 +1,36 @@ +#include "StdAfx.h" +#include "Param.h" +#include "string.h" +#include + + +Param::Param() { + alpha =0; + delta =0; + gamma =0; + phi =0; + countRulesIn =0; + countFuzzyParts =0; + sizeLabels =1; + lb1 = 0; + rb1 = 0; + lb2 = 0; + rb2 = 0; +} + +string Param::toString() { + stringstream ss1; + ss1 << "alpha: " < + +using namespace std; + +class Param { +public: + double alpha; + double delta; + double gamma; + double phi; + int countRulesIn; + int countFuzzyParts; + double sizeLabels; + double lb1; + double rb1; + double lb2; + double rb2; + double estim; + Param(); + string toString(void); +}; +#endif \ No newline at end of file diff --git a/project_template/Preparator.cpp b/project_template/Preparator.cpp new file mode 100644 index 0000000..1ba58ef --- /dev/null +++ b/project_template/Preparator.cpp @@ -0,0 +1,181 @@ +#include "stdafx.h" +#include "Preparator.h" +#include +#include +using namespace std; + +int Preparator::sgn(double val){ + if (val > 0) + return 1; + if (val == 0) + return 0; + return -1; +} +void Preparator::setLog(bool log){ + IsLogarithm = log; +} +void Preparator::setBoxCox(bool boxcox){ + IsBoxCox = boxcox; +} +void Preparator::setBoxCoxGamma(double gamma){ + if (gamma > 0 && gamma < 1){ + boxCoxGamma = gamma; + } +} + +vector Preparator::boxcox(vector timeSeria){ + for (int i = 0; i < timeSeria.size(); i++){ + timeSeria[i] = (pow(timeSeria[i], boxCoxGamma) - 1) / boxCoxGamma; + } + return timeSeria; +} + +vector Preparator::deboxcox(vector timeSeria){ + for (int i = 0; i < timeSeria.size(); i++){ + timeSeria[i] = sgn(boxCoxGamma * timeSeria[i] + 1) * + pow((abs(boxCoxGamma * timeSeria[i] + 1)), 1 / boxCoxGamma); + } + return timeSeria; +} + +Preparator::Preparator(bool _IsLogarithm, bool _IsBoxCox){ + IsBoxCox = _IsBoxCox; + IsLogarithm = _IsLogarithm; +} + + +vector Preparator::logarithm(vector timeSeria){ + for (int i = 0; i < timeSeria.size(); i++){ + timeSeria[i] = log(timeSeria[i]); + } + return timeSeria; +} + +vector Preparator::delogarithm(vector timeSeria){ + for (int i = 0; i < timeSeria.size(); i++){ + timeSeria[i] = exp(timeSeria[i]); + } + return timeSeria; +} + +vector Preparator::shift(vector timeSeria){ + double timeSeriaShiftValue = timeSeria[0]; + for (int i = 0; i < timeSeria.size(); i++){ + if (timeSeria[i] < timeSeriaShiftValue && timeSeria[i] < 0){ + timeSeriaShiftValue = timeSeria[i]; + } + } + + if (timeSeriaShiftValue < 0){ + timeSeriaShiftValue = fabs(timeSeriaShiftValue); + for (int i = 0; i < timeSeria.size(); i++){ + timeSeria[i] += timeSeriaShiftValue; + } + } + return timeSeria; +} +vector Preparator::deShift(vector timeSeria){ + + if (timeSeriaShiftValue < 0){ + for (int i = 0; i < timeSeria.size(); i++){ + timeSeria[i] -= timeSeriaShiftValue; + } + } + return timeSeria; +} + +vector Preparator::normalizeTimeSeria(vector timeSeria){ + timeSeria = shift(timeSeria); + if (IsLogarithm) + timeSeria = logarithm(timeSeria); + if (IsBoxCox) + timeSeria = boxcox(timeSeria); + maxTimePoint = timeSeria[0]; + for (int i = 0; i < timeSeria.size(); i++){ + if (timeSeria[i] > maxTimePoint) + maxTimePoint = timeSeria[i]; + } + for (int i = 0; i < timeSeria.size(); i++){ + timeSeria[i] /= maxTimePoint; + } + return timeSeria; +} + +vector Preparator::deNormalizeTimeSeria(vector timeSeria){ + //shift(timeSeria); + for (int i = 0; i < timeSeria.size(); i++){ + timeSeria[i] *= maxTimePoint; + } + if (IsLogarithm) + timeSeria = delogarithm(timeSeria); + if (IsBoxCox) + timeSeria = deboxcox(timeSeria); + deShift(timeSeria); + return timeSeria; +} +//bool Preparator::TestCommon(vector timeSeria, vector(*direct)(vector), +// vector(*inverse)(vector)){ +// vector initTimeSeria = timeSeria; +// timeSeria = direct(inverse(timeSeria)); +// double res = 0; +// for (int i = 0; i < timeSeria.size(); i++){ +// res += initTimeSeria[i] - timeSeria[i]; +// } +// return res == 0; +//} +double Preparator::TestPreparator(vector timeSeria){ + double resLog = TestLogarithm(timeSeria); + cout << "log func works with " << resLog << " error" << endl; + double resBoxCox = TestBoxCox(timeSeria); + cout << "BoxCox func works with " << resBoxCox << " error" << endl; + double resShift = TestShift(timeSeria); + cout << "Shift func works with " << resShift << " error" << endl; + double resNormalize = TestNormalize(timeSeria); + cout << "Normalize func works with " << resShift << " error" << endl; + return resNormalize + resLog + resBoxCox + resShift; +} + +double Preparator::TestLogarithm(vector timeSeria){ + vector initTimeSeria = timeSeria; + timeSeria = delogarithm(logarithm(timeSeria)); + double res = 0; + for (int i = 0; i < timeSeria.size(); i++){ + res += initTimeSeria[i] - timeSeria[i]; + } + return res; +} + +double Preparator::TestBoxCox(vector timeSeria){ + vector initTimeSeria = timeSeria; + timeSeria = deboxcox(boxcox(timeSeria)); + double res = 0; + for (int i = 0; i < timeSeria.size(); i++){ + res += initTimeSeria[i] - timeSeria[i]; + } + return res; +} + +double Preparator::TestShift(vector timeSeria){ + vector initTimeSeria = timeSeria; + timeSeria = deboxcox(boxcox(timeSeria)); + double res = 0; + for (int i = 0; i < timeSeria.size(); i++){ + res += initTimeSeria[i] - timeSeria[i]; + } + return res; +} + +double Preparator::TestNormalize(vector timeSeria){ + timeSeriaShiftValue = 0; + maxTimePoint = 0; + IsLogarithm = false; + IsBoxCox = false; + boxCoxGamma = 0.5; + vector initTimeSeria = timeSeria; + timeSeria = deboxcox(boxcox(timeSeria)); + double res = 0; + for (int i = 0; i < timeSeria.size(); i++){ + res += initTimeSeria[i] - timeSeria[i]; + } + return res; +} \ No newline at end of file diff --git a/project_template/Preparator.h b/project_template/Preparator.h new file mode 100644 index 0000000..e8c666a --- /dev/null +++ b/project_template/Preparator.h @@ -0,0 +1,35 @@ +#ifndef PREPARATOR_H +#define PREPARATOR_H +#include "Method.h" +#include +using namespace std; +class Preparator { +private: + /*bool TestCommon(vector timeSeria, vector(*)(vector), + vector(*)(vector)){}*/ + double timeSeriaShiftValue; + double maxTimePoint; + bool IsLogarithm; + bool IsBoxCox; + double boxCoxGamma; + int sgn(double); + vector logarithm(vector); // + vector delogarithm(vector); + vector boxcox(vector); // - + vector deboxcox(vector); + vector shift(vector); // + vector deShift(vector); // +public: + double TestPreparator(vector timeSeria); + double TestLogarithm(vector timeSeria); + double TestBoxCox(vector timeSeria); + double TestShift(vector timeSeria); + double TestNormalize(vector); + Preparator(bool IsLogarithm, bool IsBoxCox); + void setBoxCoxGamma(double gamma); + void setLog(bool log); + void setBoxCox(bool boxcox); + vector normalizeTimeSeria(vector timeSeria); // + vector deNormalizeTimeSeria(vector timeSeria); // +}; +#endif \ No newline at end of file diff --git a/project_template/RMSE.cpp b/project_template/RMSE.cpp new file mode 100644 index 0000000..d80388e --- /dev/null +++ b/project_template/RMSE.cpp @@ -0,0 +1,29 @@ +#include "StdAfx.h" +#include "RMSE.h" +#include +#include + + +Rmse::Rmse() { +} + +Rmse::~Rmse() { +} + +double Rmse::getValue(int paramCount, vector original, vector model) { + return 0; +} +double Rmse::getValue(vector original, vector model) { + + double valueRMSE = 0; + int sampleSize = original.size(); + + for (int i = 0; i < sampleSize; i++) { + valueRMSE += pow(original[i] - model[i], 2); + } + + valueRMSE = sqrt(valueRMSE / sampleSize); + + return valueRMSE; + +} \ No newline at end of file diff --git a/project_template/RMSE.h b/project_template/RMSE.h new file mode 100644 index 0000000..eff914f --- /dev/null +++ b/project_template/RMSE.h @@ -0,0 +1,14 @@ +#ifndef RMSE_H +#define RMSE_H +#include "Estimation.h" + +using namespace std; + +class Rmse : public Estimation { +public: + Rmse(); + ~Rmse(); + double getValue(vector, vector); + double getValue(int, vector, vector); +}; +#endif \ No newline at end of file diff --git a/project_template/ReadMe.txt b/project_template/ReadMe.txt new file mode 100644 index 0000000..61fe774 --- /dev/null +++ b/project_template/ReadMe.txt @@ -0,0 +1,33 @@ +======================================================================== + CONSOLE APPLICATION : project_template Project Overview +======================================================================== + +AppWizard has created this project_template application for you. + +This file contains a summary of what you will find in each of the files that +make up your project_template application. + + +project_template.vcproj + This is the main project file for VC++ projects generated using an Application Wizard. + It contains information about the version of Visual C++ that generated the file, and + information about the platforms, configurations, and project features selected with the + Application Wizard. + +project_template.cpp + This is the main application source file. + +///////////////////////////////////////////////////////////////////////////// +Other standard files: + +StdAfx.h, StdAfx.cpp + These files are used to build a precompiled header (PCH) file + named project_template.pch and a precompiled types file named StdAfx.obj. + +///////////////////////////////////////////////////////////////////////////// +Other notes: + +AppWizard uses "TODO:" comments to indicate parts of the source code you +should add to or customize. + +///////////////////////////////////////////////////////////////////////////// diff --git a/project_template/SMape.cpp b/project_template/SMape.cpp new file mode 100644 index 0000000..aed97da --- /dev/null +++ b/project_template/SMape.cpp @@ -0,0 +1,35 @@ +#include "StdAfx.h" +#include +#include "SMape.h" +#include "Math.h" +#include + +// SMape. Estimation. +// " " + +SMape::SMape() { + +} + +SMape::~SMape() { +} + +double SMape::getValue(int countParams, vector original, vector model) { + return 0; +} + +double SMape::getValue(vector original, vector model) { + double smape = 0; + for (int i = 0; i < original.size(); i++) { + /*double a = original[i] - model[i]; + double b = (original[i] + model[i]) / 2; + double c = fabs(a); + double d = fabs(b);*/ + smape += (fabs((double)original[i] - model[i])) / + (((fabs((double)(original[i])) + fabs((double)model[i]))) / 2); + } + smape = smape / original.size(); + smape = smape * 100; + + return smape; +} diff --git a/project_template/SMape.h b/project_template/SMape.h new file mode 100644 index 0000000..95fc419 --- /dev/null +++ b/project_template/SMape.h @@ -0,0 +1,15 @@ +#ifndef SMAPE_H +#define SMAPE_H +#include "Estimation.h" + +using namespace std; +// SMape. Estimation. +// " " +class SMape :public Estimation { +public: + SMape(); + ~SMape(); + double getValue(int, vector, vector); + double getValue(vector, vector); +}; +#endif \ No newline at end of file diff --git a/project_template/TrackingControlSignal.h b/project_template/TrackingControlSignal.h new file mode 100644 index 0000000..0e8c251 --- /dev/null +++ b/project_template/TrackingControlSignal.h @@ -0,0 +1,88 @@ +#ifndef TCS_H +#define TCS_H + +#include "math.h" +#include +#include + +using namespace std; + +class TrackingControlSignal { + +private: + map Quant; + + double AlphaQuant(double alpha){ + return alpha; + } + + double Eps; + double Eps1; + double Eps2; + double alpha; // 0~1 + double gamma; //0.05 ~ 0.1 + bool estimateTCS(double TCS){ + return ((-1.2 * AlphaQuant(alpha / 2) * sqrt(gamma / (2 - gamma)) < TCS) && + (1.2 * AlphaQuant(1 - alpha / 2) * sqrt(gamma / (2 - gamma)) > TCS)); + } + +public: + TrackingControlSignal::TrackingControlSignal(){ + + } + bool TrackingControlSignal::calcTCS(double realData, double forecastedData){ + //calcEps + Eps = realData - forecastedData; + //eps^ n eps~ + Eps1 = gamma * Eps + (1 - gamma) * Eps1; + Eps2 = gamma * fabs(Eps) + (1 - gamma) * Eps2; + return estimateTCS(Eps1 / Eps2); + } + vector TrackingControlSignal::HistoricalExpertControl(Method** values, double tsdiffval, int expertsCount){ + //init + double diff = 0.0; + vector res; + vector diffs; + int activeExperts = expertsCount; + for (int i = 0; i < expertsCount; i++){ + res.push_back(true); + diffs.push_back(0.0); + } + int indMaxDiff = 0; + bool isExcluded = true; + //main algo + while (isExcluded){ + //set exit flag + isExcluded = false; + //calc diffs between experts + for (int i = 0; i < expertsCount; i++){ + for (int j = 0; j < expertsCount; j++){ + if ((i != j) && res[i] && res[j]){ + diffs[j] += fabs(values[j]->value - values[i]->value); + if (diffs[j] > diffs[indMaxDiff]){ + indMaxDiff = j; + } + } + } + } + //check max diff on trashhold + if ((diffs[indMaxDiff] / activeExperts - 1) > tsdiffval){ + res[indMaxDiff] = false; + isExcluded = true; + activeExperts--; + for (int i = 0; i < expertsCount; i++){ + diffs[i] = 0.0; + } + } + if (activeExperts - 1 == 0){ + cout << "Something went wrong, all experts exept one was filltered." << endl; + cout << "Hight probability that last expert wasn't correct!" << endl; + cout << "See logs" << endl; + return res; + } + } + return res; + } + +}; +#endif \ No newline at end of file diff --git a/project_template/Transformation.cpp b/project_template/Transformation.cpp new file mode 100644 index 0000000..6c8bcd3 --- /dev/null +++ b/project_template/Transformation.cpp @@ -0,0 +1,559 @@ +#include "stdafx.h" +#include "Transformation.h" +#include "Median.h" +#include +#include +#include +#include + +Transformation::Transformation(vector ts) +{ + setTimeSeries(ts); + INFINITY = std::numeric_limits::infinity(); +} + +Transformation::~Transformation() +{ +} + +void Transformation::setTimeSeries(vector ts){ + + TimeSeries = ts; + tsSizeValue = ts.size(); + + tsShiftValue = 0; + GeometricAverage = 1; + valueMean = 0; + + double min = 0; + + for (int i = 0; i < tsSizeValue; i++) + { + if ((TimeSeries[i] < 0) && (TimeSeries[i] < min)) + { + min = TimeSeries[i]; + } + + GeometricAverage = GeometricAverage * TimeSeries[i]; + + valueMean += TimeSeries[i]; + + } + + if (min != 0) + { + tsShiftValue = abs(min) + 1; + } + + if (tsSizeValue != 0) + { + GeometricAverage = pow(GeometricAverage, 1 / tsSizeValue); + valueMean /= tsSizeValue; + } + +} + +int Transformation::sign(double value) +{ + if (value >= 0) + return 1; + else + return -1; +} + + +double Transformation::lambdaEstimationGuerrero(double param){ + + double result; + + int begin; // iterator + int end; // iterator + double sumRow; // sum row matrix + double sumSd; // sum for estimation standard deviation + double meanRat; + int lengthPeriod = 2; + + if (seasonalPeriod > lengthPeriod) + { + lengthPeriod = seasonalPeriod; + } + + int amtlengthPeriod = floor((double)tsSizeValue / lengthPeriod); + int shift = tsSizeValue - floor((double)amtlengthPeriod * lengthPeriod); + + vector mean(amtlengthPeriod); // mean row matrix + vector sd(amtlengthPeriod); // standard deviation + vector rat(amtlengthPeriod); + + for (int i = 0; i < amtlengthPeriod; i++) + { + begin = shift + i * lengthPeriod; + end = begin + lengthPeriod; + + sumRow = 0; + + for (int row = begin; row < end; row++) + { + sumRow += TimeSeries[row]; + } + + mean[i] = sumRow / lengthPeriod; + + sumSd = 0; + + for (int row = begin; row < end; row++) + { + sumSd += pow(TimeSeries[row] - mean[i], 2); + } + + sd[i] = sqrt(sumSd / (lengthPeriod - 1)); + } + + for (int i = 0; i < amtlengthPeriod; i++) + { + rat[i] = sd[i] / (pow(mean[i], 1 - param)); + } + + meanRat = 0; + + for (int i = 0; i < amtlengthPeriod; i++) + { + meanRat += rat[i]; + } + + meanRat /= amtlengthPeriod; + + sumSd = 0; + + for (int i = 0; i < amtlengthPeriod; i++) + { + sumSd += pow(rat[i] - meanRat, 2); + } + + result = sqrt(sumSd / (amtlengthPeriod - 1)) / meanRat; + + return result; +} + +void Transformation::lambdaEstimation(){ + + double cv = INFINITY; //cv - coefficient of variation + + for (double i = -1; i <= 2; i += 0.01) + { + //double i = 0.7; + double result = lambdaEstimationGuerrero(i); + if (result < cv) + { + cv = result; + lambda = i; + } + } + +} + + +vector Transformation::BoxCox(){ + + vector tsTransformation(tsSizeValue); + + if (lambda == 0) + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = log(TimeSeries[i] + tsShiftValue); + } + } + else + { + for (int i = 0; i < TimeSeries.size(); i++) + { + tsTransformation[i] = (pow(TimeSeries[i] + tsShiftValue, lambda) - 1) / lambda; + } + } + + return tsTransformation; +} + +vector Transformation::invBoxCox(){ + + vector tsTransformation(tsSizeValue); + + if (lambda == 0) + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = exp(TimeSeries[i]) - tsShiftValue; + } + + } + else + { + for (int i = 0; i < TimeSeries.size(); i++) + { + tsTransformation[i] = pow((lambda * TimeSeries[i] + 1), 1 / lambda) - tsShiftValue; + } + } + + return tsTransformation; +} + + +vector Transformation::ExponentialTransformation(){ + + vector tsTransformation(tsSizeValue); + + if (lambda != 0) + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = (exp(TimeSeries[i] * lambda) - 1) / lambda; + } + } + + return tsTransformation; +} + +vector Transformation::invExponentialTransformation(){ + + vector tsTransformation(tsSizeValue); + + if (lambda != 0) + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = log(1 + TimeSeries[i] * lambda) / lambda; + } + } + + return tsTransformation; +} + + +vector Transformation::ModulusTransformation(){ + + vector tsTransformation(tsSizeValue); + + if (lambda == 0) + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = sign(TimeSeries[i]) * log(abs(TimeSeries[i]) + 1); + } + } + else + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = sign(TimeSeries[i]) * (pow(abs(TimeSeries[i]) + 1, lambda) - 1) / lambda; + } + } + + return tsTransformation; +} + +vector Transformation::invModulusTransformation(){ + + vector tsTransformation(tsSizeValue); + + if (lambda == 0) + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = sign(TimeSeries[i]) * (exp(abs(TimeSeries[i])) - 1); + } + } + else + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = pow(TimeSeries[i] * lambda / sign(TimeSeries[i]) + 1, 1 / lambda) - 1; + } + } + + return tsTransformation; +} + + +vector Transformation::AsymptoticBoxCox(){ + + vector tsTransformation(tsSizeValue); + + if (lambda > 0) + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = (sign(TimeSeries[i]) * pow(abs(TimeSeries[i]), lambda) - 1) / lambda; + } + } + + return tsTransformation; +} + +vector Transformation::invAsymptoticBoxCox(){ + + vector tsTransformation(tsSizeValue); + + if (lambda > 0) + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = pow((TimeSeries[i] * lambda - 1) / sign(TimeSeries[i]), 1 / lambda); + } + } + + return tsTransformation; +} + + +vector Transformation::ModifiedBoxCox(){ + + vector tsTransformation(tsSizeValue); + + for (int i = 0; i < tsSizeValue; i++) + { + if ((TimeSeries[i] >= 0) && (lambda != 0)) + { + tsTransformation[i] = (pow(TimeSeries[i] + 1, lambda) - 1) / lambda; + } + else if ((TimeSeries[i] >= 0) && (lambda == 0)) + { + tsTransformation[i] = log(TimeSeries[i] + 1); + } + else if ((TimeSeries[i] < 0) && (lambda != 0)) + { + tsTransformation[i] = -(pow(-TimeSeries[i] + 1, 2 - lambda) - 1) / (2 - lambda); + } + else if ((TimeSeries[i] < 0) && (lambda == 0)) + { + tsTransformation[i] = -log(-TimeSeries[i] + 1); + } + } + + return tsTransformation; +} + +vector Transformation::invModifiedBoxCox(){ + + vector tsTransformation(tsSizeValue); + + for (int i = 0; i < tsSizeValue; i++) + { + if ((TimeSeries[i] >= 0) && (lambda != 0)) + { + tsTransformation[i] = pow(TimeSeries[i] * lambda + 1, 1 / lambda) - 1; + } + else if ((TimeSeries[i] >= 0) && (lambda == 0)) + { + tsTransformation[i] = exp(TimeSeries[i]) - 1; + } + else if ((TimeSeries[i] < 0) && (lambda != 2)) + { + tsTransformation[i] = -(pow(-(TimeSeries[i] * (2 - lambda) - 1), 1 / (2 - lambda)) - 1); + } + else if ((TimeSeries[i] < 0) && (lambda == 2)) + { + tsTransformation[i] = -exp(-TimeSeries[i]) + 1; + } + } + + return tsTransformation; +} + + +vector Transformation::NormalizedBoxCox(){ + + vector tsTransformation(tsSizeValue); + + if (lambda == 0) + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = GeometricAverage * log(TimeSeries[i]); + } + } + else + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = (pow(TimeSeries[i], lambda) - 1) / (lambda * pow(GeometricAverage, lambda - 1)); + } + } + + return tsTransformation; +} + +vector Transformation::invNormalizedBoxCox(){ + + vector tsTransformation(tsSizeValue); + + if (lambda == 0) + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = exp(TimeSeries[i] / GeometricAverage); + } + } + else + { + for (int i = 0; i < tsSizeValue; i++) + { + tsTransformation[i] = pow(TimeSeries[i] * lambda * pow(GeometricAverage, lambda - 1) + 1, 1 / lambda); + } + } + + return tsTransformation; + +} + + +vector Transformation::Deseasonalization(int seasonalPeriod){ + + vector tsDeseasonalization(tsSizeValue); + SeasonalIndex.resize(seasonalPeriod - 1); + + int tsShift = 0; + int amtSeasonalPeriod = tsSizeValue / seasonalPeriod; + + if (tsSizeValue % seasonalPeriod != 0) + { + tsShift = tsSizeValue - amtSeasonalPeriod * seasonalPeriod; + } + + // считаем средние сезоанальные индексы + Median *est = new Median(); + SeasonalIndex = est->getValue(TimeSeries, seasonalPeriod); + + // получаем итог + for (int i = 0; i < amtSeasonalPeriod; i++) + { + int begin = tsShift + i * seasonalPeriod; + int end = begin + seasonalPeriod; + + for (int ii = begin; ii < end; ii++) + { + tsDeseasonalization[ii] = TimeSeries[ii] / SeasonalIndex[i]; + } + } + + return tsDeseasonalization; + +} + +vector Transformation::invDeseasonalization(){ + + vector tsDeseasonalization(tsSizeValue); + + int tsShift = 0; + int amtSeasonalPeriod = tsSizeValue / seasonalPeriod; + + if (tsSizeValue % seasonalPeriod != 0) + { + tsShift = tsSizeValue - amtSeasonalPeriod * seasonalPeriod - 1; + } + + for (int i = 0; i < amtSeasonalPeriod; i++) + { + int begin = tsShift + i * seasonalPeriod; + int end = begin + seasonalPeriod; + + for (int ii = begin; ii < end; ii++) + { + tsDeseasonalization[ii] = TimeSeries[ii] * SeasonalIndex[i]; + } + } + + return tsDeseasonalization; +} + +vector Transformation::getSeasonalIndex(){ + + return SeasonalIndex; +} + + +vector Transformation::Aggregation(int seasonalPeriod){ + + int tsShift = 0; + int amtSeasonalPeriod = tsSizeValue / seasonalPeriod; + + vector tsAggregation(amtSeasonalPeriod); + + if (tsSizeValue % seasonalPeriod != 0) + { + tsShift = tsSizeValue - amtSeasonalPeriod * seasonalPeriod; + } + + // получаем итог + for (int i = 0; i < amtSeasonalPeriod; i++) + { + int begin = tsShift + i * seasonalPeriod; + int end = begin + seasonalPeriod; + + for (int ii = begin; ii < end; ii++) + { + tsAggregation[i] += TimeSeries[ii]; + } + } + + return tsAggregation; + +} + +vector Transformation::invAggregation(){ + + int sumSeasonalPeriod = tsSizeValue * seasonalPeriod; + + vector tsDeaggregation(sumSeasonalPeriod); + + // получаем итог + for (int i = 0; i < tsSizeValue; i++) + { + int begin = i * seasonalPeriod; + int end = begin + seasonalPeriod; + + for (int ii = begin; ii < end; ii++) + { + tsDeaggregation[ii] = TimeSeries[i] / seasonalPeriod; + } + } + + return tsDeaggregation; +} + + +vector Transformation::Normalization(){ + + vector tsNormalization(tsSizeValue); + + valueSd = 0; + + for (int i = 0; i < tsSizeValue; i++) + { + valueSd += pow(TimeSeries[i] - valueMean, 2); + } + + valueSd = sqrt(valueSd / (tsSizeValue - 1)); + + for (int i = 0; i < tsSizeValue; i++) + { + tsNormalization[i] = (TimeSeries[i] - valueMean) / valueSd; + } + + return tsNormalization; + +} + +vector Transformation::invNormalization() +{ + + vector tsNormalization(tsSizeValue); + + for (int i = 0; i < tsSizeValue; i++) + { + tsNormalization[i] = TimeSeries[i] * valueSd + valueMean; + } + + return tsNormalization; + +} \ No newline at end of file diff --git a/project_template/Transformation.h b/project_template/Transformation.h new file mode 100644 index 0000000..c51bd99 --- /dev/null +++ b/project_template/Transformation.h @@ -0,0 +1,96 @@ +#ifndef TRANSFORMATION_H +#define TRANSFORMATION_H +#include + +using namespace std; + +class Transformation +{ +private: + vector SeasonalIndex; + double GeometricAverage; + double valueMean; + double valueSd; + int tsSizeValue; + +public: + vector TimeSeries; + double INFINITY; + Transformation(vector); + ~Transformation(); + + // период сезонности для использования в Deseasonalization + int seasonalPeriod; + + // параметр трансформации + double lambda; + // параметр сдвига значений вектора при наличии в нем отрицательных значений + double tsShiftValue; + + // обрабатываемый временной ряд + void setTimeSeries(vector); + + int sign(double); + // auto lambda estimation + void lambdaEstimation(); + + //Guerrero(1993) + //Time-series analysis supported by Power Transformations + double lambdaEstimationGuerrero(double); + + //Box-Cox(1964) + vector BoxCox(); + vector invBoxCox(); + + //Manly(1976) + // -- Negative y’s could be allowed + // -- The transformation was reported to be successful in transform + // unimodal skewed distribution into normal distribution, but is + // not quite useful for bimodal or U - shaped distribution + vector ExponentialTransformation(); + vector invExponentialTransformation(); + + //John and Draper(1980) + // -- Negative y’s could be allowed. + // -- It works best at those distribution that is somewhat symmetric. + // A power transformation on a symmetric distribution is likel + // y going to introduce some degree of skewness. + vector ModulusTransformation(); + vector invModulusTransformation(); + + //Bickel and Doksum(1981) + // --Negative y’s could be allowed. + vector AsymptoticBoxCox(); + vector invAsymptoticBoxCox(); + + //Yeo and Johnson (2000) + // --Negative y’s could be allowed. + // --When estimating the transformation parameter, found the value of + // λ that minimizes the Kullback - Leibler distance between + // the normal distribution and the transformed distribution. + vector ModifiedBoxCox(); + vector invModifiedBoxCox(); + + // Atkinson(1973) + // --Negative y’s could be allowed. + vector NormalizedBoxCox(); + vector invNormalizedBoxCox(); + + // очистка вектора TimeSeries от сезонности + vector Deseasonalization(int); + vector invDeseasonalization(); + + // получение средних сезоональных индексов + vector getSeasonalIndex(); + + // Aggregation time series + vector Aggregation(int); + vector invAggregation(); + + // Normalization time series: mean = 0, Sd (Standard deviation) = 1 + vector Normalization(); + vector invNormalization(); + +}; + +#endif \ No newline at end of file diff --git a/project_template/Tsaur.cpp b/project_template/Tsaur.cpp new file mode 100644 index 0000000..b53ca4d --- /dev/null +++ b/project_template/Tsaur.cpp @@ -0,0 +1,271 @@ +#include "StdAfx.h" +#include "Tsaur.h" +#include "Param.h" +#include "float.h" +#include + + +Tsaur::Tsaur(vector timeSeries, int countPointForecast){ + this->countPointForecast = countPointForecast; + this->trendType = trendType; + this->seasonType = seasonType; + this->x = timeSeries; + this->partition(); +} + +void Tsaur::init() { + S.clear(); + T.clear(); + T.push_back(fuzzyfication(x[1]-x[0])); + S.push_back(fuzzyfication(x[0])); + fuzzyForecast.push_back(plusSet(S[0], T[0])); +} + +// +// , , +void Tsaur::defineUniversum() { + this->universumMax = x[0]; + this->universumMin = x[0]; + for (int i = 1; i < x.size(); i++) { + if (universumMax < x[i]) { + universumMax = x[i]; + } + if (universumMin > x[i]) { + universumMin = x[i]; + } + } + + // + // , + // , + double baseSize = (universumMax - universumMin) / countFuzzyParts; + a.resize(countFuzzyParts + 1); + for (int i=0; i < countFuzzyParts + 1;i++){ + a[i] = (A(universumMin + (i-1) * baseSize, universumMin + (i+1)*baseSize)); + } +} + +// +A Tsaur::minusSet(A a, A b) { + //if ((a.getLeft()- b.getLeft()) < (a.getRight() - b.getRight())) { + return A(a.getLeft()- b.getLeft(), a.getRight() - b.getRight()); + //} else { + // return A(a.getRight() - b.getRight(), a.getLeft()- b.getLeft()); + //} +} + +// +A Tsaur::numMinusSet(double num, A a) { + //if ((num - a.getLeft()) < (num - a.getRight())) { + return A(num - a.getLeft(), num - a.getRight()); + //} else { + // return A(num - a.getRight(), num - a.getLeft()); + //} +} + +// +A Tsaur::divSet(A a, A b) { + return A(a.getLeft()/b.getLeft(), a.getRight()/b.getRight()); +} + +// +A Tsaur::divSet(A a, double b) { + return A(a.getLeft()/b, a.getRight()/b); +} + +// +A Tsaur::multSetNum(double num, A a) { + //if ((a.getLeft() * num) < (a.getRight() * num)) { + return A(a.getLeft() * num, a.getRight() * num); + //} else { + // return A(a.getRight() * num, a.getLeft() * num); + //} +} + +// +A Tsaur::plusSet(A a, A b) { + return A(a.getLeft()+ b.getLeft(), a.getRight() + b.getRight()); +} + +// +A Tsaur::multSet(A a, A b) { + //if ((a.getLeft()* b.getLeft()) < (a.getRight() * b.getRight())) { + return A(a.getLeft()* b.getLeft(), a.getRight() * b.getRight()); + //} else { + // return A(a.getLeft()* b.getLeft(), a.getRight() * b.getRight()); + //} +} + +// +// +A Tsaur::fuzzyfication(double value) { + // , + // , + A aMax = a[0]; + for (int j = 0; j < a.size(); j++) { + if (a[j].getValue(value) > aMax.getValue(value)) { + aMax = a[j]; + } + } + return aMax; +} + + +void Tsaur::createModelForEstimation() { + defineUniversum(); + A fuzzyBetta0 = fuzzyfication(xLearning[1]- xLearning[0]); + A fuzzyBetta1 = A(lb1,rb1); + A fuzzyBetta2 = A(lb2,rb2); + A fuzzyAlpha = A(0, 1); + + fuzzyTs.clear(); + fuzzyForecast.clear(); + // + fuzzyTs.resize(xLearning.size()); + + init(); + A e; + // countPointForecast + for (unsigned int t = 0; t < xLearning.size()-1+this->countPointForecast; t++) { + if (t < xLearning.size()) { + e = minusSet(fuzzyfication(xLearning[t]), fuzzyForecast[t]); + } else { + e = A(0,0); + } + S.push_back(plusSet(S[t], multSet(e, fuzzyAlpha))); + T.push_back(plusSet(plusSet(fuzzyBetta0, multSetNum(t, fuzzyBetta1)), multSetNum(t*t, fuzzyBetta2))); + fuzzyForecast.push_back(plusSet(multSetNum(alpha, S[t+1]), multSetNum((1-alpha), T[t+1]))); + //fuzzyForecast.push_back(S[t+1]); + } + + forecast.clear(); + forecast.resize(fuzzyForecast.size()); + for (unsigned int i = 0; i < fuzzyForecast.size(); i++) { + if (_finite(defuzzyfication(fuzzyForecast[i])) == 0){ + forecast[i] = (x[x.size()-1]); + } else { + forecast[i] = (defuzzyfication(fuzzyForecast[i])); + } + } +} + +void Tsaur::createModel() { + defineUniversum(); + A fuzzyBetta0 = fuzzyfication(x[1]- x[0]); + A fuzzyBetta1 = A(lb1,rb1); + A fuzzyBetta2 = A(lb2,rb2); + A fuzzyAlpha = A(0, 1); + + fuzzyTs.clear(); + fuzzyForecast.clear(); + // + fuzzyTs.resize(x.size()); + + init(); + A e; + // countPointForecast + for (unsigned int t = 0; t < x.size()-1+this->countPointForecast; t++) { + if (t < x.size()) { + e = minusSet(fuzzyfication(x[t]), fuzzyForecast[t]); + } else { + e = A(0,0); + } + S.push_back(plusSet(S[t], multSet(e, fuzzyAlpha))); + T.push_back(plusSet(plusSet(fuzzyBetta0, multSetNum(t, fuzzyBetta1)), multSetNum(t*t, fuzzyBetta2))); + fuzzyForecast.push_back(plusSet(multSetNum(alpha, S[t+1]), multSetNum((1-alpha), T[t+1]))); + //fuzzyForecast.push_back(S[t+1]); + } + + forecast.clear(); + forecast.resize(fuzzyForecast.size()); + for (unsigned int i = 0; i < fuzzyForecast.size(); i++) { + if (_finite(defuzzyfication(fuzzyForecast[i])) == 0){ + forecast[i] = (x[x.size()-1]); + } else { + forecast[i] = (defuzzyfication(fuzzyForecast[i])); + } + } +} + +// +double Tsaur::calcEstimation(Aic *aic) { + return aic->getValue(3, this->xEstimation, this->forecast); +} +Tsaur::~Tsaur(void) { +} + +double Tsaur::defuzzyfication(A a) { + return a.getValueAtTop(); +} + +vector Tsaur::defuzzyfication(vector fuz) { + vector result; + for (int i =0; i < fuz.size(); i++) { + result.push_back(defuzzyfication(fuz[i])); + } + return result; +} + +vector Tsaur::getForecast() { + vector result; + for (unsigned int i = forecast.size() - countPointForecast; i < forecast.size(); i++) { + result.push_back(forecast[i]); + } + return result; +} + +void Tsaur::setParam(string paramName, double value) { + if (paramName.compare("countFuzzyParts") == 0) { + this->countFuzzyParts = value; + } else if (paramName.compare("alpha") == 0) { + this->alpha = value; + } else if (paramName.compare("lb1") == 0) { + this->lb1 = value; + } else if (paramName.compare("rb1") == 0) { + this->rb1 = value; + } else if (paramName.compare("lb2") == 0) { + this->lb2 = value; + } else if (paramName.compare("rb2") == 0) { + this->rb2 = value; + } +} + + +// +// TODO: +Param* Tsaur::optimize(Estimation *est) { + Param *optimal = new Param(); + double minSmape = 99999; + for (double a = 0; a < 1; a+= 0.1) { + cout << "TSAUR " << a << " 1" <<"\n";; + for (double _lb1 = -10; _lb1 < 1; _lb1 += 0.2) { + for (double _rb1 = _lb1; _rb1 < 1; _rb1 += 0.2) { + for (double _lb2 = -1; _lb2 < 1; _lb2 += 0.2) { + for (double _rb2 = _lb2; _rb2 < 1; _rb2 += 0.2) { + + for (double cfp = 2; cfp < 20;cfp+= 1) { + this->setParam("countFuzzyParts", cfp); + this->setParam("alpha", a); + this->setParam("lb1", _lb1); + this->setParam("rb1", _rb1); + this->setParam("lb2", _lb2); + this->setParam("rb2", _rb2); + this->createModelForEstimation(); + double smapeValue = est->getValue(getXEstimation(), getForecast()); + if (minSmape > smapeValue) { + minSmape = smapeValue; + optimal->countFuzzyParts = cfp; + optimal->alpha = a; + optimal->lb1 = _lb1; + optimal->lb2 = _lb2; + optimal->rb1 = _rb1; + optimal->rb2 = _rb2; + } + } + } + } + } + } + } + return optimal; +} \ No newline at end of file diff --git a/project_template/Tsaur.h b/project_template/Tsaur.h new file mode 100644 index 0000000..3c7baa8 --- /dev/null +++ b/project_template/Tsaur.h @@ -0,0 +1,59 @@ +#ifndef TSAUR_H +#define TSAUR_H +#include +#include "Method.h" +#include "A.h" + +using namespace std; + +class Tsaur : public Method{ +private: + vector a; // + int countFuzzyParts;// + double universumMin; + double universumMax; + vector fuzzyTs; // + vector fuzzyForecast; // + double w; + string trendType; + string seasonType; + double alpha; // + double gamma; // + double delta; // + int p; // + double phi; // + vector S; // + vector I; + vector T; + double lb1; + double rb1; + double lb2; + double rb2; + + + + void defineUniversum(); // + A fuzzyfication(double); + double defuzzyfication(A); + vector defuzzyfication(vector); + A minusSet(A, A); + A numMinusSet(double, A); + A plusSet(A, A); + A divSet(A, A); + A divSet(A, double); + A multSet(A, A); + A multSetNum(double, A); + +public: + Tsaur(vector, int); + ~Tsaur(void); + vector getForecast(); + void init(); + void createModel(); + void createModelForEstimation(); + void setParam(string, double); + double calcEstimation(Aic *); + Param* optimize(Estimation *); // +}; + +#endif \ No newline at end of file diff --git a/project_template/Vovk.cpp b/project_template/Vovk.cpp new file mode 100644 index 0000000..f20d805 --- /dev/null +++ b/project_template/Vovk.cpp @@ -0,0 +1,216 @@ +#include "StdAfx.h" +#include "Vovk.h" +#include "Dsa.h" +#include "Fuzzy.h" +#include "FuzzyWithSets.h" +#include "NoTrendNoSeasonality.h" +#include "NoTrendNoSeasonality.h" +#include "NoTrendAddSeasonality.h" +#include "NoTrendMultSeasonality.h" +#include "AddTrendNoSeasonality.h" +#include "AddTrendAddSeasonality.h" +#include "AddTrendMultSeasonality.h" +#include "MultTrendNoSeasonality.h" +#include "MultTrendAddSeasonality.h" +#include "MultTrendMultSeasonality.h" +#include "DATrendNoSeasonality.h" +#include "DATrendAddSeasonality.h" +#include "DATrendMultSeasonality.h" +#include "DMTrendNoSeasonality.h" +#include "DMTrendAddSeasonality.h" +#include "DMTrendMultSeasonality.h" +#include "TrackingControlSignal.h" +#include +//Set ts at constructor, get first forecast +//add value(update) ts +//recalculate weights +Vovk::Vovk(vector _timeSeria, int _forecastHorizont, Method **m) { + prep = new Preparator(true, true); + useSingleTrashhold = false; + forecastHorizont = 1; + singleTrashhold = 0.35; + useModelTrashhold = false; + modelTrashhold = 0; + activeExpertsCount = 0; + beta = 0.5; + step = 0; + forecast = 0; + addWeightFromSleepEx = 0.0; + timeSeria = _timeSeria; + calculateTimeSeriaDiff(_timeSeria); + int n = 29; // , + expertsCount = n; + forecastHorizont = _forecastHorizont; + /*int p = 1; + double alpha = 0.4; + double delta = 0.9; + double gamma = 0.4; + double phi = 0.1;*/ + int countRulesIn = 1; + int countFuzzyParts = 40; + models = m; + //models[0] = new NoTrendNoSeasonality(_timeSeria, _forecastHorizont); + //models[1] = new NoTrendAddSeasonality(_timeSeria, _forecastHorizont); + //models[2] = new NoTrendMultSeasonality(_timeSeria, _forecastHorizont); + + //models[3] = new AddTrendNoSeasonality(_timeSeria, _forecastHorizont); + //models[4] = new MultTrendNoSeasonality(_timeSeria, _forecastHorizont); + //models[5] = new DATrendNoSeasonality(_timeSeria, _forecastHorizont); + //models[6] = new DMTrendNoSeasonality(_timeSeria, _forecastHorizont); + + //models[7] = new AddTrendAddSeasonality(_timeSeria, _forecastHorizont); + //models[8] = new MultTrendAddSeasonality(_timeSeria, _forecastHorizont); + //models[9] = new DATrendAddSeasonality(_timeSeria, _forecastHorizont); + //models[10] = new DMTrendAddSeasonality(_timeSeria, _forecastHorizont); + + //models[11] = new AddTrendMultSeasonality(_timeSeria, _forecastHorizont); + //models[12] = new MultTrendMultSeasonality(_timeSeria, _forecastHorizont); + //models[13] = new DATrendMultSeasonality(_timeSeria, _forecastHorizont); + //models[14] = new DMTrendMultSeasonality(_timeSeria, _forecastHorizont); + //models[15] = new Dsa(_timeSeria, _forecastHorizont); + //models[16] = new Fuzzy("None", "None", _timeSeria, _forecastHorizont); + //models[17] = new Fuzzy("Add", "None", _timeSeria, _forecastHorizont); + //models[18] = new Fuzzy("None", "Add", _timeSeria, _forecastHorizont); + //models[19] = new Fuzzy("Add", "Add", _timeSeria, _forecastHorizont); + //models[20] = new FuzzyWithSets("None", "None", _timeSeria, _forecastHorizont); + //models[21] = new FuzzyWithSets("None", "Add", _timeSeria, _forecastHorizont); + //models[22] = new FuzzyWithSets("None", "Mult", _timeSeria, _forecastHorizont); + //models[23] = new FuzzyWithSets("Add", "None", _timeSeria, _forecastHorizont); + //models[24] = new FuzzyWithSets("Add", "Add", _timeSeria, _forecastHorizont); + //models[25] = new FuzzyWithSets("Add", "Mult", _timeSeria, _forecastHorizont); + //models[26] = new FuzzyWithSets("Mult", "None", _timeSeria, _forecastHorizont); + //models[27] = new FuzzyWithSets("Mult", "Add", _timeSeria, _forecastHorizont); + //models[28] = new FuzzyWithSets("Mult", "Mult", _timeSeria, _forecastHorizont); + + ////add models + //for (int i = 0; i < 29; i++) { + + // // + // // + // models[i]->setParam("alpha", alpha); + // models[i]->setParam("gamma", gamma); + // models[i]->setParam("delta", delta); + // models[i]->setParam("p", p); + // models[i]->setParam("phi", phi); + + // models[i]->setParam("countRulesIn", countRulesIn); + // models[i]->setParam("countFuzzyParts", countFuzzyParts); + // models[i]->setParam("w", 0.5); + // // , + // // + // // : = 20, c = 20 + // // + // models[i]->setParam("c", 20); + // // + // models[i]->createModelForEstimation(); + //} + //push models to Vovks object + double check = 0; + + //set weights at first time + for (int i = 0; i < expertsCount; i++) + { + models[i]->weight = (1.0 / expertsCount); + check += models[i]->weight; + models[i]->value = models[i]->getForecast()[step]; + //cout << models[i]->value << " " << models[i]->weight << " " << forecast << endl; + } + //check experts representation + checkExperts(); + //calculate first forecast + for (int i = 0; i < expertsCount; i++) + { + if (models[i]->isActive) + forecast += models[i]->value * (models[i]->weight + addWeightFromSleepEx/activeExpertsCount); + } + getNewExpertsValues(); + //step++; +} +void Vovk::setBeta(double _beta){ + beta = _beta; +} +Vovk::~Vovk() { + +} + +void Vovk::calculateTimeSeriaDiff(vector timeSeria){ + double diff = 0.0; + for (int i = 0; i < timeSeria.size() - 1; i++){ + diff += fabs(timeSeria[i] - timeSeria[i + 1]); + } + timeSeriaDiff = (diff / (timeSeria.size() - 1)) * (1 + singleTrashhold); +} + +void Vovk::checkExperts(){ + TrackingControlSignal *tcs = new TrackingControlSignal(); + activeExpertsCount = expertsCount; + addWeightFromSleepEx = 0.0; + vector expertsActivity = tcs->HistoricalExpertControl(models, timeSeriaDiff, expertsCount); + for (int i = 0; i < expertsCount; i++) + { + models[i]->isActive = expertsActivity[i]; + if (!expertsActivity[i]){ + addWeightFromSleepEx += models[i]->weight; + activeExpertsCount--; + } + } +} + +void Vovk::getNewExpertsValues(){ + for (int i = 0; i < expertsCount; i++){ + //for 1 scale forecast + models[i]->value = models[i]->getForecast()[step]; + } + step++; +} + +//ret gt(x) +double Vovk::recalculateWeights(double nextRealValue){ + double weightsSum = 0; + for (int i = 0; i < expertsCount; i++){ + models[i]->weight = models[i]->weight * pow(beta, fabs(models[i]->value - nextRealValue)); + weightsSum += models[i]->weight; + } + + //normalize weights + double check = 0.0; + for (int i = 0; i < expertsCount; i++){ + models[i]->weight = models[i]->weight / weightsSum; + check += models[i]->weight; + } + return log(weightsSum) / log(beta);//beta, weightsSum); +} + +void Vovk::updateTS(double nextRealValue){ + //change models weights with new real value + double gtx = recalculateWeights(nextRealValue); + forecast = 0.0; + //create new forecast from experts + getNewExpertsValues(); + checkExperts(); + //create aggregated forecast + double check = 0.0; + for (int i = 0; i < expertsCount; i++) + { + /*cout << i << ") "; + if (models[i]->isActive){ + cout << "true "; + } + else{ + cout << "false "; + } + cout << " " << models[i]->value << " " << models[i]->weight << endl;*/ + if (models[i]->isActive){ + check += models[i]->weight; + forecast += models[i]->value * (models[i]->weight + addWeightFromSleepEx / activeExpertsCount); + } + } +} + +double Vovk::getForecast(){ + return forecast; +} + +//void Vovk::addMethod(AggModel model){ +// models.push_back(model); +//} \ No newline at end of file diff --git a/project_template/Vovk.h b/project_template/Vovk.h new file mode 100644 index 0000000..d01ac5c --- /dev/null +++ b/project_template/Vovk.h @@ -0,0 +1,53 @@ +#ifndef VOVK_H +#define VOVK_H +#include "Method.h" +#include "Preparator.h" +struct AggModel { + + char * name; + double weight; + double value; + Method* expert; + +}; + +using namespace std; +class Vovk { + +private: + /*int expertsCount = 0; + int activeExpertsCount = 0; + double beta = 0.5; + int step = 0; + double forecast = 0; + double addWeightFromSleepEx = 0.0;*/ + int expertsCount; + int activeExpertsCount; + double beta; + int step; + double forecast; + double addWeightFromSleepEx; + vector timeSeria; + Method** models; + void getNewExpertsValues(); + void checkExperts(); + //ret - log(beta, sum(1..n) weight * pow(beta, L)) + double recalculateWeights(double nextRealValue); + //double timeSeriaDiff = 0; + double timeSeriaDiff; + Preparator *prep; + void calculateTimeSeriaDiff(vector timeSeria); +public: + void setBeta(double beta); + bool useSingleTrashhold ; + double forecastHorizont; + double singleTrashhold; + bool useModelTrashhold; + double modelTrashhold; + Vovk(vector, int, Method**); + ~Vovk(); + double getForecast(); + void updateTS(double nextRealValue); + void addMethod(AggModel); +}; +#endif diff --git a/project_template/libnlopt-0.dll b/project_template/libnlopt-0.dll new file mode 100644 index 0000000..1d63e4b Binary files /dev/null and b/project_template/libnlopt-0.dll differ diff --git a/project_template/libnlopt-0.lib b/project_template/libnlopt-0.lib new file mode 100644 index 0000000..8caa00f Binary files /dev/null and b/project_template/libnlopt-0.lib differ diff --git a/project_template/nlopt.h b/project_template/nlopt.h new file mode 100644 index 0000000..4f4150a --- /dev/null +++ b/project_template/nlopt.h @@ -0,0 +1,381 @@ +/* Copyright (c) 2007-2014 Massachusetts Institute of Technology + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +#ifndef NLOPT_H +#define NLOPT_H + +#include /* for ptrdiff_t and size_t */ + +/* Change 0 to 1 to use stdcall convention under Win32 */ +#if 0 && (defined(_WIN32) || defined(__WIN32__)) +# if defined(__GNUC__) +# define NLOPT_STDCALL __attribute__((stdcall)) +# elif defined(_MSC_VER) || defined(_ICC) || defined(_STDCALL_SUPPORTED) +# define NLOPT_STDCALL __stdcall +# else +# define NLOPT_STDCALL +# endif +#else +# define NLOPT_STDCALL +#endif + +/* for Windows compilers, you should add a line +*/ +#define NLOPT_DLL +/* + when using NLopt from a DLL, in order to do the proper + Windows importing nonsense. */ +#if defined(NLOPT_DLL) && (defined(_WIN32) || defined(__WIN32__)) && !defined(__LCC__) +/* annoying Windows syntax for calling functions in a DLL */ +# if defined(NLOPT_DLL_EXPORT) +# define NLOPT_EXTERN(T) extern __declspec(dllexport) T NLOPT_STDCALL +# else +# define NLOPT_EXTERN(T) extern __declspec(dllimport) T NLOPT_STDCALL +# endif +#else +# define NLOPT_EXTERN(T) extern T NLOPT_STDCALL +#endif + +#ifdef __cplusplus +extern "C" +{ +#endif /* __cplusplus */ + +typedef double (*nlopt_func)(unsigned n, const double *x, + double *gradient, /* NULL if not needed */ + void *func_data); + +typedef void (*nlopt_mfunc)(unsigned m, double *result, + unsigned n, const double *x, + double *gradient, /* NULL if not needed */ + void *func_data); + +/* A preconditioner, which preconditions v at x to return vpre. + (The meaning of "preconditioning" is algorithm-dependent.) */ +typedef void (*nlopt_precond)(unsigned n, const double *x, const double *v, + double *vpre, void *data); + +typedef enum { + /* Naming conventions: + + NLOPT_{G/L}{D/N}_* + = global/local derivative/no-derivative optimization, + respectively + + *_RAND algorithms involve some randomization. + + *_NOSCAL algorithms are *not* scaled to a unit hypercube + (i.e. they are sensitive to the units of x) + */ + + NLOPT_GN_DIRECT = 0, + NLOPT_GN_DIRECT_L, + NLOPT_GN_DIRECT_L_RAND, + NLOPT_GN_DIRECT_NOSCAL, + NLOPT_GN_DIRECT_L_NOSCAL, + NLOPT_GN_DIRECT_L_RAND_NOSCAL, + + NLOPT_GN_ORIG_DIRECT, + NLOPT_GN_ORIG_DIRECT_L, + + NLOPT_GD_STOGO, + NLOPT_GD_STOGO_RAND, + + NLOPT_LD_LBFGS_NOCEDAL, + + NLOPT_LD_LBFGS, + + NLOPT_LN_PRAXIS, + + NLOPT_LD_VAR1, + NLOPT_LD_VAR2, + + NLOPT_LD_TNEWTON, + NLOPT_LD_TNEWTON_RESTART, + NLOPT_LD_TNEWTON_PRECOND, + NLOPT_LD_TNEWTON_PRECOND_RESTART, + + NLOPT_GN_CRS2_LM, + + NLOPT_GN_MLSL, + NLOPT_GD_MLSL, + NLOPT_GN_MLSL_LDS, + NLOPT_GD_MLSL_LDS, + + NLOPT_LD_MMA, + + NLOPT_LN_COBYLA, + + NLOPT_LN_NEWUOA, + NLOPT_LN_NEWUOA_BOUND, + + NLOPT_LN_NELDERMEAD, + NLOPT_LN_SBPLX, + + NLOPT_LN_AUGLAG, + NLOPT_LD_AUGLAG, + NLOPT_LN_AUGLAG_EQ, + NLOPT_LD_AUGLAG_EQ, + + NLOPT_LN_BOBYQA, + + NLOPT_GN_ISRES, + + /* new variants that require local_optimizer to be set, + not with older constants for backwards compatibility */ + NLOPT_AUGLAG, + NLOPT_AUGLAG_EQ, + NLOPT_G_MLSL, + NLOPT_G_MLSL_LDS, + + NLOPT_LD_SLSQP, + + NLOPT_LD_CCSAQ, + + NLOPT_GN_ESCH, + + NLOPT_NUM_ALGORITHMS /* not an algorithm, just the number of them */ +} nlopt_algorithm; + +NLOPT_EXTERN(const char *) nlopt_algorithm_name(nlopt_algorithm a); + +typedef enum { + NLOPT_FAILURE = -1, /* generic failure code */ + NLOPT_INVALID_ARGS = -2, + NLOPT_OUT_OF_MEMORY = -3, + NLOPT_ROUNDOFF_LIMITED = -4, + NLOPT_FORCED_STOP = -5, + NLOPT_SUCCESS = 1, /* generic success code */ + NLOPT_STOPVAL_REACHED = 2, + NLOPT_FTOL_REACHED = 3, + NLOPT_XTOL_REACHED = 4, + NLOPT_MAXEVAL_REACHED = 5, + NLOPT_MAXTIME_REACHED = 6 +} nlopt_result; + +#define NLOPT_MINF_MAX_REACHED NLOPT_STOPVAL_REACHED + +NLOPT_EXTERN(void) nlopt_srand(unsigned long seed); +NLOPT_EXTERN(void) nlopt_srand_time(void); + +NLOPT_EXTERN(void) nlopt_version(int *major, int *minor, int *bugfix); + +/*************************** OBJECT-ORIENTED API **************************/ +/* The style here is that we create an nlopt_opt "object" (an opaque pointer), + then set various optimization parameters, and then execute the + algorithm. In this way, we can add more and more optimization parameters + (including algorithm-specific ones) without breaking backwards + compatibility, having functions with zillions of parameters, or + relying non-reentrantly on global variables.*/ + +struct nlopt_opt_s; /* opaque structure, defined internally */ +typedef struct nlopt_opt_s *nlopt_opt; + +/* the only immutable parameters of an optimization are the algorithm and + the dimension n of the problem, since changing either of these could + have side-effects on lots of other parameters */ +NLOPT_EXTERN(nlopt_opt) nlopt_create(nlopt_algorithm algorithm, unsigned n); +NLOPT_EXTERN(void) nlopt_destroy(nlopt_opt opt); +NLOPT_EXTERN(nlopt_opt) nlopt_copy(const nlopt_opt opt); + +NLOPT_EXTERN(nlopt_result) nlopt_optimize(nlopt_opt opt, double *x, + double *opt_f); + +NLOPT_EXTERN(nlopt_result) nlopt_set_min_objective(nlopt_opt opt, nlopt_func f, + void *f_data); +NLOPT_EXTERN(nlopt_result) nlopt_set_max_objective(nlopt_opt opt, nlopt_func f, + void *f_data); + +NLOPT_EXTERN(nlopt_result) nlopt_set_precond_min_objective(nlopt_opt opt, nlopt_func f, nlopt_precond pre, void *f_data); +NLOPT_EXTERN(nlopt_result) nlopt_set_precond_max_objective(nlopt_opt opt, nlopt_func f, nlopt_precond pre, void *f_data); + +NLOPT_EXTERN(nlopt_algorithm) nlopt_get_algorithm(const nlopt_opt opt); +NLOPT_EXTERN(unsigned) nlopt_get_dimension(const nlopt_opt opt); + +/* constraints: */ + +NLOPT_EXTERN(nlopt_result) nlopt_set_lower_bounds(nlopt_opt opt, + const double *lb); +NLOPT_EXTERN(nlopt_result) nlopt_set_lower_bounds1(nlopt_opt opt, double lb); +NLOPT_EXTERN(nlopt_result) nlopt_get_lower_bounds(const nlopt_opt opt, + double *lb); +NLOPT_EXTERN(nlopt_result) nlopt_set_upper_bounds(nlopt_opt opt, + const double *ub); +NLOPT_EXTERN(nlopt_result) nlopt_set_upper_bounds1(nlopt_opt opt, double ub); +NLOPT_EXTERN(nlopt_result) nlopt_get_upper_bounds(const nlopt_opt opt, + double *ub); + +NLOPT_EXTERN(nlopt_result) nlopt_remove_inequality_constraints(nlopt_opt opt); +NLOPT_EXTERN(nlopt_result) nlopt_add_inequality_constraint(nlopt_opt opt, + nlopt_func fc, + void *fc_data, + double tol); +NLOPT_EXTERN(nlopt_result) nlopt_add_precond_inequality_constraint( + nlopt_opt opt, nlopt_func fc, nlopt_precond pre, void *fc_data, + double tol); +NLOPT_EXTERN(nlopt_result) nlopt_add_inequality_mconstraint(nlopt_opt opt, + unsigned m, + nlopt_mfunc fc, + void *fc_data, + const double *tol); + +NLOPT_EXTERN(nlopt_result) nlopt_remove_equality_constraints(nlopt_opt opt); +NLOPT_EXTERN(nlopt_result) nlopt_add_equality_constraint(nlopt_opt opt, + nlopt_func h, + void *h_data, + double tol); +NLOPT_EXTERN(nlopt_result) nlopt_add_precond_equality_constraint( + nlopt_opt opt, nlopt_func h, nlopt_precond pre, void *h_data, + double tol); +NLOPT_EXTERN(nlopt_result) nlopt_add_equality_mconstraint(nlopt_opt opt, + unsigned m, + nlopt_mfunc h, + void *h_data, + const double *tol); + +/* stopping criteria: */ + +NLOPT_EXTERN(nlopt_result) nlopt_set_stopval(nlopt_opt opt, double stopval); +NLOPT_EXTERN(double) nlopt_get_stopval(const nlopt_opt opt); + +NLOPT_EXTERN(nlopt_result) nlopt_set_ftol_rel(nlopt_opt opt, double tol); +NLOPT_EXTERN(double) nlopt_get_ftol_rel(const nlopt_opt opt); +NLOPT_EXTERN(nlopt_result) nlopt_set_ftol_abs(nlopt_opt opt, double tol); +NLOPT_EXTERN(double) nlopt_get_ftol_abs(const nlopt_opt opt); + +NLOPT_EXTERN(nlopt_result) nlopt_set_xtol_rel(nlopt_opt opt, double tol); +NLOPT_EXTERN(double) nlopt_get_xtol_rel(const nlopt_opt opt); +NLOPT_EXTERN(nlopt_result) nlopt_set_xtol_abs1(nlopt_opt opt, double tol); +NLOPT_EXTERN(nlopt_result) nlopt_set_xtol_abs(nlopt_opt opt, const double *tol); +NLOPT_EXTERN(nlopt_result) nlopt_get_xtol_abs(const nlopt_opt opt, + double *tol); + +NLOPT_EXTERN(nlopt_result) nlopt_set_maxeval(nlopt_opt opt, int maxeval); +NLOPT_EXTERN(int) nlopt_get_maxeval(const nlopt_opt opt); + +NLOPT_EXTERN(nlopt_result) nlopt_set_maxtime(nlopt_opt opt, double maxtime); +NLOPT_EXTERN(double) nlopt_get_maxtime(const nlopt_opt opt); + +NLOPT_EXTERN(nlopt_result) nlopt_force_stop(nlopt_opt opt); +NLOPT_EXTERN(nlopt_result) nlopt_set_force_stop(nlopt_opt opt, int val); +NLOPT_EXTERN(int) nlopt_get_force_stop(const nlopt_opt opt); + +/* more algorithm-specific parameters */ + +NLOPT_EXTERN(nlopt_result) nlopt_set_local_optimizer(nlopt_opt opt, + const nlopt_opt local_opt); + +NLOPT_EXTERN(nlopt_result) nlopt_set_population(nlopt_opt opt, unsigned pop); +NLOPT_EXTERN(unsigned) nlopt_get_population(const nlopt_opt opt); + +NLOPT_EXTERN(nlopt_result) nlopt_set_vector_storage(nlopt_opt opt, unsigned dim); +NLOPT_EXTERN(unsigned) nlopt_get_vector_storage(const nlopt_opt opt); + +NLOPT_EXTERN(nlopt_result) nlopt_set_default_initial_step(nlopt_opt opt, + const double *x); +NLOPT_EXTERN(nlopt_result) nlopt_set_initial_step(nlopt_opt opt, + const double *dx); +NLOPT_EXTERN(nlopt_result) nlopt_set_initial_step1(nlopt_opt opt, double dx); +NLOPT_EXTERN(nlopt_result) nlopt_get_initial_step(const nlopt_opt opt, + const double *x, double *dx); + +/* the following are functions mainly designed to be used internally + by the Fortran and SWIG wrappers, allow us to tel nlopt_destroy and + nlopt_copy to do something to the f_data pointers (e.g. free or + duplicate them, respectively) */ +typedef void* (*nlopt_munge)(void *p); +NLOPT_EXTERN(void) nlopt_set_munge(nlopt_opt opt, + nlopt_munge munge_on_destroy, + nlopt_munge munge_on_copy); +typedef void* (*nlopt_munge2)(void *p, void *data); +NLOPT_EXTERN(void) nlopt_munge_data(nlopt_opt opt, + nlopt_munge2 munge, void *data); + +/*************************** DEPRECATED API **************************/ +/* The new "object-oriented" API is preferred, since it allows us to + gracefully add new features and algorithm-specific options in a + re-entrant way, and we can automatically assume reasonable defaults + for unspecified parameters. */ + +/* Where possible (e.g. for gcc >= 3.1), enable a compiler warning + for code that uses a deprecated function */ +#if defined(__GNUC__) && (__GNUC__ > 3 || (__GNUC__==3 && __GNUC_MINOR__ > 0)) +# define NLOPT_DEPRECATED __attribute__((deprecated)) +#else +# define NLOPT_DEPRECATED +#endif + +typedef double (*nlopt_func_old)(int n, const double *x, + double *gradient, /* NULL if not needed */ + void *func_data); + +NLOPT_EXTERN(nlopt_result) nlopt_minimize( + nlopt_algorithm algorithm, + int n, nlopt_func_old f, void *f_data, + const double *lb, const double *ub, /* bounds */ + double *x, /* in: initial guess, out: minimizer */ + double *minf, /* out: minimum */ + double minf_max, double ftol_rel, double ftol_abs, + double xtol_rel, const double *xtol_abs, + int maxeval, double maxtime) NLOPT_DEPRECATED; + +NLOPT_EXTERN(nlopt_result) nlopt_minimize_constrained( + nlopt_algorithm algorithm, + int n, nlopt_func_old f, void *f_data, + int m, nlopt_func_old fc, void *fc_data, ptrdiff_t fc_datum_size, + const double *lb, const double *ub, /* bounds */ + double *x, /* in: initial guess, out: minimizer */ + double *minf, /* out: minimum */ + double minf_max, double ftol_rel, double ftol_abs, + double xtol_rel, const double *xtol_abs, + int maxeval, double maxtime) NLOPT_DEPRECATED; + +NLOPT_EXTERN(nlopt_result) nlopt_minimize_econstrained( + nlopt_algorithm algorithm, + int n, nlopt_func_old f, void *f_data, + int m, nlopt_func_old fc, void *fc_data, ptrdiff_t fc_datum_size, + int p, nlopt_func_old h, void *h_data, ptrdiff_t h_datum_size, + const double *lb, const double *ub, /* bounds */ + double *x, /* in: initial guess, out: minimizer */ + double *minf, /* out: minimum */ + double minf_max, double ftol_rel, double ftol_abs, + double xtol_rel, const double *xtol_abs, + double htol_rel, double htol_abs, + int maxeval, double maxtime) NLOPT_DEPRECATED; + +NLOPT_EXTERN(void) nlopt_get_local_search_algorithm(nlopt_algorithm *deriv, + nlopt_algorithm *nonderiv, + int *maxeval) NLOPT_DEPRECATED; +NLOPT_EXTERN(void) nlopt_set_local_search_algorithm(nlopt_algorithm deriv, + nlopt_algorithm nonderiv, + int maxeval) NLOPT_DEPRECATED; + +NLOPT_EXTERN(int) nlopt_get_stochastic_population(void) NLOPT_DEPRECATED; +NLOPT_EXTERN(void) nlopt_set_stochastic_population(int pop) NLOPT_DEPRECATED; + +/*********************************************************************/ + +#ifdef __cplusplus +} /* extern "C" */ +#endif /* __cplusplus */ + +#endif diff --git a/project_template/nlopt.hpp b/project_template/nlopt.hpp new file mode 100644 index 0000000..e7accba --- /dev/null +++ b/project_template/nlopt.hpp @@ -0,0 +1,596 @@ +/* Copyright (c) 2007-2011 Massachusetts Institute of Technology + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +// C++ style wrapper around NLopt API +// nlopt.hpp is AUTOMATICALLY GENERATED from nlopt-in.hpp - edit the latter! + +#ifndef NLOPT_HPP +#define NLOPT_HPP + +#include "nlopt.h" + +#include +#include +#include +#include +#include +#include + +// convenience overloading for below (not in nlopt:: since has nlopt_ prefix) +inline nlopt_result nlopt_get_initial_step(const nlopt_opt opt, double *dx) { + return nlopt_get_initial_step(opt, (const double *) NULL, dx); +} + +namespace nlopt { + + ////////////////////////////////////////////////////////////////////// + // nlopt::* namespace versions of the C enumerated types + // AUTOMATICALLY GENERATED, DO NOT EDIT + // GEN_ENUMS_HERE + enum algorithm { + GN_DIRECT = 0, + GN_DIRECT_L, + GN_DIRECT_L_RAND, + GN_DIRECT_NOSCAL, + GN_DIRECT_L_NOSCAL, + GN_DIRECT_L_RAND_NOSCAL, + GN_ORIG_DIRECT, + GN_ORIG_DIRECT_L, + GD_STOGO, + GD_STOGO_RAND, + LD_LBFGS_NOCEDAL, + LD_LBFGS, + LN_PRAXIS, + LD_VAR1, + LD_VAR2, + LD_TNEWTON, + LD_TNEWTON_RESTART, + LD_TNEWTON_PRECOND, + LD_TNEWTON_PRECOND_RESTART, + GN_CRS2_LM, + GN_MLSL, + GD_MLSL, + GN_MLSL_LDS, + GD_MLSL_LDS, + LD_MMA, + LN_COBYLA, + LN_NEWUOA, + LN_NEWUOA_BOUND, + LN_NELDERMEAD, + LN_SBPLX, + LN_AUGLAG, + LD_AUGLAG, + LN_AUGLAG_EQ, + LD_AUGLAG_EQ, + LN_BOBYQA, + GN_ISRES, + AUGLAG, + AUGLAG_EQ, + G_MLSL, + G_MLSL_LDS, + LD_SLSQP, + LD_CCSAQ, + GN_ESCH, + NUM_ALGORITHMS /* not an algorithm, just the number of them */ + }; + enum result { + FAILURE = -1, /* generic failure code */ + INVALID_ARGS = -2, + OUT_OF_MEMORY = -3, + ROUNDOFF_LIMITED = -4, + FORCED_STOP = -5, + SUCCESS = 1, /* generic success code */ + STOPVAL_REACHED = 2, + FTOL_REACHED = 3, + XTOL_REACHED = 4, + MAXEVAL_REACHED = 5, + MAXTIME_REACHED = 6 + }; + // GEN_ENUMS_HERE + ////////////////////////////////////////////////////////////////////// + + typedef nlopt_func func; // nlopt::func synoynm + typedef nlopt_mfunc mfunc; // nlopt::mfunc synoynm + + // alternative to nlopt_func that takes std::vector + // ... unfortunately requires a data copy + typedef double (*vfunc)(const std::vector &x, + std::vector &grad, void *data); + + ////////////////////////////////////////////////////////////////////// + + // NLopt-specific exceptions (corresponding to error codes): + class roundoff_limited : public std::runtime_error { + public: + roundoff_limited() : std::runtime_error("nlopt roundoff-limited") {} + }; + + class forced_stop : public std::runtime_error { + public: + forced_stop() : std::runtime_error("nlopt forced stop") {} + }; + + ////////////////////////////////////////////////////////////////////// + + class opt { + private: + nlopt_opt o; + + void mythrow(nlopt_result ret) const { + switch (ret) { + case NLOPT_FAILURE: throw std::runtime_error("nlopt failure"); + case NLOPT_OUT_OF_MEMORY: throw std::bad_alloc(); + case NLOPT_INVALID_ARGS: throw std::invalid_argument("nlopt invalid argument"); + case NLOPT_ROUNDOFF_LIMITED: throw roundoff_limited(); + case NLOPT_FORCED_STOP: throw forced_stop(); + default: break; + } + } + + typedef struct { + opt *o; + mfunc mf; func f; void *f_data; + vfunc vf; + nlopt_munge munge_destroy, munge_copy; // non-NULL for SWIG wrappers + } myfunc_data; + + // free/destroy f_data in nlopt_destroy and nlopt_copy, respectively + static void *free_myfunc_data(void *p) { + myfunc_data *d = (myfunc_data *) p; + if (d) { + if (d->f_data && d->munge_destroy) d->munge_destroy(d->f_data); + delete d; + } + return NULL; + } + static void *dup_myfunc_data(void *p) { + myfunc_data *d = (myfunc_data *) p; + if (d) { + void *f_data; + if (d->f_data && d->munge_copy) { + f_data = d->munge_copy(d->f_data); + if (!f_data) return NULL; + } + else + f_data = d->f_data; + myfunc_data *dnew = new myfunc_data; + if (dnew) { + *dnew = *d; + dnew->f_data = f_data; + } + return (void*) dnew; + } + else return NULL; + } + + // nlopt_func wrapper that catches exceptions + static double myfunc(unsigned n, const double *x, double *grad, void *d_) { + myfunc_data *d = reinterpret_cast(d_); + try { + return d->f(n, x, grad, d->f_data); + } + catch (std::bad_alloc&) + { d->o->forced_stop_reason = NLOPT_OUT_OF_MEMORY; } + catch (std::invalid_argument&) + { d->o->forced_stop_reason = NLOPT_INVALID_ARGS; } + catch (roundoff_limited&) + { d->o->forced_stop_reason = NLOPT_ROUNDOFF_LIMITED; } + catch (forced_stop&) + { d->o->forced_stop_reason = NLOPT_FORCED_STOP; } + catch (...) + { d->o->forced_stop_reason = NLOPT_FAILURE; } + d->o->force_stop(); // stop gracefully, opt::optimize will re-throw + return HUGE_VAL; + } + + // nlopt_mfunc wrapper that catches exceptions + static void mymfunc(unsigned m, double *result, + unsigned n, const double *x, double *grad, void *d_) { + myfunc_data *d = reinterpret_cast(d_); + try { + d->mf(m, result, n, x, grad, d->f_data); + return; + } + catch (std::bad_alloc&) + { d->o->forced_stop_reason = NLOPT_OUT_OF_MEMORY; } + catch (std::invalid_argument&) + { d->o->forced_stop_reason = NLOPT_INVALID_ARGS; } + catch (roundoff_limited&) + { d->o->forced_stop_reason = NLOPT_ROUNDOFF_LIMITED; } + catch (forced_stop&) + { d->o->forced_stop_reason = NLOPT_FORCED_STOP; } + catch (...) + { d->o->forced_stop_reason = NLOPT_FAILURE; } + d->o->force_stop(); // stop gracefully, opt::optimize will re-throw + for (unsigned i = 0; i < m; ++i) result[i] = HUGE_VAL; + } + + std::vector xtmp, gradtmp, gradtmp0; // scratch for myvfunc + + // nlopt_func wrapper, using std::vector + static double myvfunc(unsigned n, const double *x, double *grad, void *d_){ + myfunc_data *d = reinterpret_cast(d_); + try { + std::vector &xv = d->o->xtmp; + if (n) std::memcpy(&xv[0], x, n * sizeof(double)); + double val=d->vf(xv, grad ? d->o->gradtmp : d->o->gradtmp0, d->f_data); + if (grad && n) { + std::vector &gradv = d->o->gradtmp; + std::memcpy(grad, &gradv[0], n * sizeof(double)); + } + return val; + } + catch (std::bad_alloc&) + { d->o->forced_stop_reason = NLOPT_OUT_OF_MEMORY; } + catch (std::invalid_argument&) + { d->o->forced_stop_reason = NLOPT_INVALID_ARGS; } + catch (roundoff_limited&) + { d->o->forced_stop_reason = NLOPT_ROUNDOFF_LIMITED; } + catch (forced_stop&) + { d->o->forced_stop_reason = NLOPT_FORCED_STOP; } + catch (...) + { d->o->forced_stop_reason = NLOPT_FAILURE; } + d->o->force_stop(); // stop gracefully, opt::optimize will re-throw + return HUGE_VAL; + } + + void alloc_tmp() { + if (xtmp.size() != nlopt_get_dimension(o)) { + xtmp = std::vector(nlopt_get_dimension(o)); + gradtmp = std::vector(nlopt_get_dimension(o)); + } + } + + result last_result; + double last_optf; + nlopt_result forced_stop_reason; + + public: + // Constructors etc. + opt() : o(NULL), xtmp(0), gradtmp(0), gradtmp0(0), + last_result(nlopt::FAILURE), last_optf(HUGE_VAL), + forced_stop_reason(NLOPT_FORCED_STOP) {} + ~opt() { nlopt_destroy(o); } + opt(algorithm a, unsigned n) : + o(nlopt_create(nlopt_algorithm(a), n)), + xtmp(0), gradtmp(0), gradtmp0(0), + last_result(nlopt::FAILURE), last_optf(HUGE_VAL), + forced_stop_reason(NLOPT_FORCED_STOP) { + if (!o) throw std::bad_alloc(); + nlopt_set_munge(o, free_myfunc_data, dup_myfunc_data); + } + opt(const opt& f) : o(nlopt_copy(f.o)), + xtmp(f.xtmp), gradtmp(f.gradtmp), gradtmp0(0), + last_result(f.last_result), last_optf(f.last_optf), + forced_stop_reason(f.forced_stop_reason) { + if (f.o && !o) throw std::bad_alloc(); + } + opt& operator=(opt const& f) { + if (this == &f) return *this; // self-assignment + nlopt_destroy(o); + o = nlopt_copy(f.o); + if (f.o && !o) throw std::bad_alloc(); + xtmp = f.xtmp; gradtmp = f.gradtmp; + last_result = f.last_result; last_optf = f.last_optf; + forced_stop_reason = f.forced_stop_reason; + return *this; + } + + // Do the optimization: + result optimize(std::vector &x, double &opt_f) { + if (o && nlopt_get_dimension(o) != x.size()) + throw std::invalid_argument("dimension mismatch"); + forced_stop_reason = NLOPT_FORCED_STOP; + nlopt_result ret = nlopt_optimize(o, x.empty() ? NULL : &x[0], &opt_f); + last_result = result(ret); + last_optf = opt_f; + if (ret == NLOPT_FORCED_STOP) + mythrow(forced_stop_reason); + mythrow(ret); + return last_result; + } + + // variant mainly useful for SWIG wrappers: + std::vector optimize(const std::vector &x0) { + std::vector x(x0); + last_result = optimize(x, last_optf); + return x; + } + result last_optimize_result() const { return last_result; } + double last_optimum_value() const { return last_optf; } + + // accessors: + algorithm get_algorithm() const { + if (!o) throw std::runtime_error("uninitialized nlopt::opt"); + return algorithm(nlopt_get_algorithm(o)); + } + const char *get_algorithm_name() const { + if (!o) throw std::runtime_error("uninitialized nlopt::opt"); + return nlopt_algorithm_name(nlopt_get_algorithm(o)); + } + unsigned get_dimension() const { + if (!o) throw std::runtime_error("uninitialized nlopt::opt"); + return nlopt_get_dimension(o); + } + + // Set the objective function + void set_min_objective(func f, void *f_data) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->f = f; d->f_data = f_data; d->mf = NULL; d->vf = NULL; + d->munge_destroy = d->munge_copy = NULL; + mythrow(nlopt_set_min_objective(o, myfunc, d)); // d freed via o + } + void set_min_objective(vfunc vf, void *f_data) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->f = NULL; d->f_data = f_data; d->mf = NULL; d->vf = vf; + d->munge_destroy = d->munge_copy = NULL; + mythrow(nlopt_set_min_objective(o, myvfunc, d)); // d freed via o + alloc_tmp(); + } + void set_max_objective(func f, void *f_data) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->f = f; d->f_data = f_data; d->mf = NULL; d->vf = NULL; + d->munge_destroy = d->munge_copy = NULL; + mythrow(nlopt_set_max_objective(o, myfunc, d)); // d freed via o + } + void set_max_objective(vfunc vf, void *f_data) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->f = NULL; d->f_data = f_data; d->mf = NULL; d->vf = vf; + d->munge_destroy = d->munge_copy = NULL; + mythrow(nlopt_set_max_objective(o, myvfunc, d)); // d freed via o + alloc_tmp(); + } + + // for internal use in SWIG wrappers -- variant that + // takes ownership of f_data, with munging for destroy/copy + void set_min_objective(func f, void *f_data, + nlopt_munge md, nlopt_munge mc) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->f = f; d->f_data = f_data; d->mf = NULL; d->vf = NULL; + d->munge_destroy = md; d->munge_copy = mc; + mythrow(nlopt_set_min_objective(o, myfunc, d)); // d freed via o + } + void set_max_objective(func f, void *f_data, + nlopt_munge md, nlopt_munge mc) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->f = f; d->f_data = f_data; d->mf = NULL; d->vf = NULL; + d->munge_destroy = md; d->munge_copy = mc; + mythrow(nlopt_set_max_objective(o, myfunc, d)); // d freed via o + } + + // Nonlinear constraints: + + void remove_inequality_constraints() { + nlopt_result ret = nlopt_remove_inequality_constraints(o); + mythrow(ret); + } + void add_inequality_constraint(func f, void *f_data, double tol=0) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->f = f; d->f_data = f_data; d->mf = NULL; d->vf = NULL; + d->munge_destroy = d->munge_copy = NULL; + mythrow(nlopt_add_inequality_constraint(o, myfunc, d, tol)); + } + void add_inequality_constraint(vfunc vf, void *f_data, double tol=0) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->f = NULL; d->f_data = f_data; d->mf = NULL; d->vf = vf; + d->munge_destroy = d->munge_copy = NULL; + mythrow(nlopt_add_inequality_constraint(o, myvfunc, d, tol)); + alloc_tmp(); + } + void add_inequality_mconstraint(mfunc mf, void *f_data, + const std::vector &tol) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->mf = mf; d->f_data = f_data; d->f = NULL; d->vf = NULL; + d->munge_destroy = d->munge_copy = NULL; + mythrow(nlopt_add_inequality_mconstraint(o, tol.size(), mymfunc, d, + tol.empty() ? NULL : &tol[0])); + } + + void remove_equality_constraints() { + nlopt_result ret = nlopt_remove_equality_constraints(o); + mythrow(ret); + } + void add_equality_constraint(func f, void *f_data, double tol=0) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->f = f; d->f_data = f_data; d->mf = NULL; d->vf = NULL; + d->munge_destroy = d->munge_copy = NULL; + mythrow(nlopt_add_equality_constraint(o, myfunc, d, tol)); + } + void add_equality_constraint(vfunc vf, void *f_data, double tol=0) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->f = NULL; d->f_data = f_data; d->mf = NULL; d->vf = vf; + d->munge_destroy = d->munge_copy = NULL; + mythrow(nlopt_add_equality_constraint(o, myvfunc, d, tol)); + alloc_tmp(); + } + void add_equality_mconstraint(mfunc mf, void *f_data, + const std::vector &tol) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->mf = mf; d->f_data = f_data; d->f = NULL; d->vf = NULL; + d->munge_destroy = d->munge_copy = NULL; + mythrow(nlopt_add_equality_mconstraint(o, tol.size(), mymfunc, d, + tol.empty() ? NULL : &tol[0])); + } + + // For internal use in SWIG wrappers (see also above) + void add_inequality_constraint(func f, void *f_data, + nlopt_munge md, nlopt_munge mc, + double tol=0) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->f = f; d->f_data = f_data; d->mf = NULL; d->vf = NULL; + d->munge_destroy = md; d->munge_copy = mc; + mythrow(nlopt_add_inequality_constraint(o, myfunc, d, tol)); + } + void add_equality_constraint(func f, void *f_data, + nlopt_munge md, nlopt_munge mc, + double tol=0) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->f = f; d->f_data = f_data; d->mf = NULL; d->vf = NULL; + d->munge_destroy = md; d->munge_copy = mc; + mythrow(nlopt_add_equality_constraint(o, myfunc, d, tol)); + } + void add_inequality_mconstraint(mfunc mf, void *f_data, + nlopt_munge md, nlopt_munge mc, + const std::vector &tol) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->mf = mf; d->f_data = f_data; d->f = NULL; d->vf = NULL; + d->munge_destroy = md; d->munge_copy = mc; + mythrow(nlopt_add_inequality_mconstraint(o, tol.size(), mymfunc, d, + tol.empty() ? NULL : &tol[0])); + } + void add_equality_mconstraint(mfunc mf, void *f_data, + nlopt_munge md, nlopt_munge mc, + const std::vector &tol) { + myfunc_data *d = new myfunc_data; + if (!d) throw std::bad_alloc(); + d->o = this; d->mf = mf; d->f_data = f_data; d->f = NULL; d->vf = NULL; + d->munge_destroy = md; d->munge_copy = mc; + mythrow(nlopt_add_equality_mconstraint(o, tol.size(), mymfunc, d, + tol.empty() ? NULL : &tol[0])); + } + +#define NLOPT_GETSET_VEC(name) \ + void set_##name(double val) { \ + mythrow(nlopt_set_##name##1(o, val)); \ + } \ + void get_##name(std::vector &v) const { \ + if (o && nlopt_get_dimension(o) != v.size()) \ + throw std::invalid_argument("dimension mismatch"); \ + mythrow(nlopt_get_##name(o, v.empty() ? NULL : &v[0])); \ + } \ + std::vector get_##name() const { \ + if (!o) throw std::runtime_error("uninitialized nlopt::opt"); \ + std::vector v(nlopt_get_dimension(o)); \ + get_##name(v); \ + return v; \ + } \ + void set_##name(const std::vector &v) { \ + if (o && nlopt_get_dimension(o) != v.size()) \ + throw std::invalid_argument("dimension mismatch"); \ + mythrow(nlopt_set_##name(o, v.empty() ? NULL : &v[0])); \ + } + + NLOPT_GETSET_VEC(lower_bounds) + NLOPT_GETSET_VEC(upper_bounds) + + // stopping criteria: + +#define NLOPT_GETSET(T, name) \ + T get_##name() const { \ + if (!o) throw std::runtime_error("uninitialized nlopt::opt"); \ + return nlopt_get_##name(o); \ + } \ + void set_##name(T name) { \ + mythrow(nlopt_set_##name(o, name)); \ + } + NLOPT_GETSET(double, stopval) + NLOPT_GETSET(double, ftol_rel) + NLOPT_GETSET(double, ftol_abs) + NLOPT_GETSET(double, xtol_rel) + NLOPT_GETSET_VEC(xtol_abs) + NLOPT_GETSET(int, maxeval) + NLOPT_GETSET(double, maxtime) + + NLOPT_GETSET(int, force_stop) + void force_stop() { set_force_stop(1); } + + // algorithm-specific parameters: + + void set_local_optimizer(const opt &lo) { + nlopt_result ret = nlopt_set_local_optimizer(o, lo.o); + mythrow(ret); + } + + NLOPT_GETSET(unsigned, population) + NLOPT_GETSET(unsigned, vector_storage) + NLOPT_GETSET_VEC(initial_step) + + void set_default_initial_step(const std::vector &x) { + nlopt_result ret + = nlopt_set_default_initial_step(o, x.empty() ? NULL : &x[0]); + mythrow(ret); + } + void get_initial_step(const std::vector &x, std::vector &dx) const { + if (o && (nlopt_get_dimension(o) != x.size() + || nlopt_get_dimension(o) != dx.size())) + throw std::invalid_argument("dimension mismatch"); + nlopt_result ret = nlopt_get_initial_step(o, x.empty() ? NULL : &x[0], + dx.empty() ? NULL : &dx[0]); + mythrow(ret); + } + std::vector get_initial_step_(const std::vector &x) const { + if (!o) throw std::runtime_error("uninitialized nlopt::opt"); + std::vector v(nlopt_get_dimension(o)); + get_initial_step(x, v); + return v; + } + }; + +#undef NLOPT_GETSET +#undef NLOPT_GETSET_VEC + + ////////////////////////////////////////////////////////////////////// + + inline void srand(unsigned long seed) { nlopt_srand(seed); } + inline void srand_time() { nlopt_srand_time(); } + inline void version(int &major, int &minor, int &bugfix) { + nlopt_version(&major, &minor, &bugfix); + } + inline int version_major() { + int major, minor, bugfix; + nlopt_version(&major, &minor, &bugfix); + return major; + } + inline int version_minor() { + int major, minor, bugfix; + nlopt_version(&major, &minor, &bugfix); + return minor; + } + inline int version_bugfix() { + int major, minor, bugfix; + nlopt_version(&major, &minor, &bugfix); + return bugfix; + } + inline const char *algorithm_name(algorithm a) { + return nlopt_algorithm_name(nlopt_algorithm(a)); + } + + ////////////////////////////////////////////////////////////////////// + +} // namespace nlopt + +#endif /* NLOPT_HPP */ diff --git a/project_template/project_template.cpp b/project_template/project_template.cpp new file mode 100644 index 0000000..f0b690a --- /dev/null +++ b/project_template/project_template.cpp @@ -0,0 +1,416 @@ +#include "stdafx.h" +#include +#include "Method.h" +#include "Dsa.h" +#include "Aic.h" +#include "AicWeights.h" +#include "Normalization.h" +#include "Mape.h" +#include "SMape.h" +#include "Vovk.h" +#include "NoTrendNoSeasonality.h" +#include "NoTrendAddSeasonality.h" +#include "NoTrendMultSeasonality.h" +#include "AddTrendNoSeasonality.h" +#include "AddTrendAddSeasonality.h" +#include "AddTrendMultSeasonality.h" +#include "MultTrendNoSeasonality.h" +#include "MultTrendAddSeasonality.h" +#include "MultTrendMultSeasonality.h" +#include "DATrendNoSeasonality.h" +#include "DATrendAddSeasonality.h" +#include "DATrendMultSeasonality.h" +#include "DMTrendNoSeasonality.h" +#include "DMTrendAddSeasonality.h" +#include "DMTrendMultSeasonality.h" +#include "Tsaur.h" +#include "A.h" +#include "Fuzzy.h" +#include "FuzzyWithSets.h" +#include "File.h" +#include +#include "Param.h" +#include +#include +#include +#include +#include +#include "Transformation.h" +#include "nlopt.hpp" +#include "nlopt.h" +#include "BIC.h" + + +int iMethod; // +const int n = 29; // , +int numberParam; // +Method** m = new Method*[n]; +Estimation* est1 = new SMape(); +int ii; // + +double round (double value) { + return floor(value + 0.5); +} + +double myRound(double num) { + double n = ((double)round(num * 1000)) / 1000; + return n; +} + + +void printTs(vector ts) { + for (unsigned int i = 0; i < ts.size(); i++) { + cout << ts[i]; + cout << " "; + } +} + +double getEstimation(const std::vector &x, std::vector &grad, void *f_data) +{ + ii++; // + + int radix = 10; // + char sbuffer[10]; // + + for (int iiParam = 0; iiParam < numberParam; iiParam++) + { + + m[iMethod]->setParam(itoa(iiParam, sbuffer, radix), myRound(x[iiParam])); + } + + m[iMethod]->createModelForEstimation(); + + double smapeValue = myRound(est1->getValue(m[iMethod]->x, m[iMethod]->getForecast())); + return smapeValue; +} + + +int main(int argc, char* argv[]) +{ + + Param* bestParams = new Param(); + double bestSmape = DBL_MAX; + double bestAic = DBL_MAX; + int bestMethodIndex = 0; + + setlocale(LC_ALL, "Russian"); + string path; + string fileName; + string s; + if (argc < 1) { + cout << "set ts file like 'project_template.exe 1.csv count_forecast period!!!"; + return 0; + } else { + stringstream ss; + ss << argv[1]; + ss >> s; + int found = 0; + int offset =0; + do { + found = s.find("\\", found +1); + if (found > 0) { + offset = found; + cout << found << "\n"; + } + } while (found!=std::string::npos); + if (offset ==0) { + fileName = s.substr(offset, s.length()); + } else { + fileName = s.substr(offset+1, s.length()); + } + path = s.substr(0, offset); + } + // : + vector timeSeries = File::readFile(argv[1]); + Transformation *transform = new Transformation(timeSeries); + transform->seasonalPeriod = atof(argv[3]); +// timeSeries = transform->Normalization(); + //transform->setTimeSeries(tsTransform); + transform->lambdaEstimation(); + timeSeries = transform->ModifiedBoxCox(); + + Normalization * norm = new Normalization(); + //timeSeries = norm->normalizeTimeSeria(timeSeries); + + // + int pointForecast = 18; + if (strcmp(argv[2], "") != 0) { + pointForecast = atof(argv[2]); + } + int p = 12; + if (strcmp(argv[3], "") != 0) { + p = atof(argv[3]); + } + double alpha = 0.1; + double delta = 0.2; + double gamma = 0.4; + double phi = 0.1; + int countRulesIn = 1; + int countFuzzyParts = 2; + // , + //... + // ( ) + // + + m[0] = new NoTrendNoSeasonality(timeSeries, pointForecast); + m[1] = new NoTrendAddSeasonality(timeSeries, pointForecast); + m[2] = new NoTrendMultSeasonality(timeSeries, pointForecast); + + m[3] = new AddTrendNoSeasonality(timeSeries, pointForecast); + m[4] = new MultTrendNoSeasonality(timeSeries, pointForecast); + m[5] = new DATrendNoSeasonality(timeSeries, pointForecast); + m[6] = new DMTrendNoSeasonality(timeSeries, pointForecast); + + m[7] = new AddTrendAddSeasonality(timeSeries, pointForecast); + m[8] = new MultTrendAddSeasonality(timeSeries, pointForecast); + m[9] = new DATrendAddSeasonality(timeSeries, pointForecast); + m[10] = new DMTrendAddSeasonality(timeSeries, pointForecast); + + m[11] = new AddTrendMultSeasonality(timeSeries, pointForecast); + m[12] = new MultTrendMultSeasonality(timeSeries, pointForecast); + m[13] = new DATrendMultSeasonality(timeSeries, pointForecast); + m[14] = new DMTrendMultSeasonality(timeSeries, pointForecast); + m[15] = new Dsa(timeSeries, pointForecast); + m[16] = new Fuzzy("None", "None", timeSeries, pointForecast); + m[17] = new Fuzzy("Add", "None", timeSeries, pointForecast); + m[18] = new Fuzzy("None", "Add", timeSeries, pointForecast); + m[19] = new Fuzzy("Add", "Add", timeSeries, pointForecast); + m[20] = new FuzzyWithSets("None", "None", timeSeries, pointForecast); + m[21] = new FuzzyWithSets("None", "Add", timeSeries, pointForecast); + m[22] = new FuzzyWithSets("None", "Mult", timeSeries, pointForecast); + m[23] = new FuzzyWithSets("Add", "None", timeSeries, pointForecast); + m[24] = new FuzzyWithSets("Add", "Add", timeSeries, pointForecast); + m[25] = new FuzzyWithSets("Add", "Mult", timeSeries, pointForecast); + m[26] = new FuzzyWithSets("Mult", "None", timeSeries, pointForecast); + m[27] = new FuzzyWithSets("Mult", "Add", timeSeries, pointForecast); + m[28] = new FuzzyWithSets("Mult", "Mult", timeSeries, pointForecast); + //m[29] = new Tsaur(timeSeries, pointForecast); + + AicWeights *aicWeights = new AicWeights(); + vector aicValues; + vector> modelsResults; + vector optimal; + string smapeResults = ""; + + for (int i = 15; i < n; i++) { + iMethod = i; + m[i]->setParam("p", p); + // + //Estimation *est1 = new SMape(); + Estimation *est1 = new Bic(algoritm::RMSE, m[i]->getNamberParam()); + //optimal.push_back(m[i]->optimize(est1)); + + numberParam = m[i]->getNamberParam(); + + if (numberParam == 1) // + { + optimal.push_back(m[i]->optimize(est1)); + m[i]->setParam("alpha", optimal[optimal.size()-1]->alpha); + } + else + { + // , + std::vector x(numberParam); + std::vector lb(numberParam); + std::vector ub(numberParam); + + // + for (int iParam = 0; iParam < numberParam; iParam++) + { + lb[iParam] = 0.01; // + ub[iParam] = 0.99; // + x[iParam] = 0.3; // + } + + // + nlopt::opt opt(nlopt::AUGLAG, numberParam); + + // + nlopt::opt subOpt(nlopt::GN_ORIG_DIRECT_L, numberParam); + subOpt.set_lower_bounds(lb); + subOpt.set_upper_bounds(ub); + + // + opt.set_lower_bounds(lb); + opt.set_upper_bounds(ub); + opt.set_local_optimizer(subOpt); + + // , + if (numberParam == 2) + { + subOpt.set_maxeval(10000); + opt.set_maxeval(10000); + } + else if (numberParam == 3) + { + subOpt.set_maxeval(50000); + opt.set_maxeval(50000); + } + else + { + subOpt.set_maxeval(100000); + opt.set_maxeval(100000); + } + + // + // getEstimation - , + double minf; + opt.set_min_objective(getEstimation, NULL); + nlopt::result subResult = opt.optimize(x, minf); + + cout << minf << endl; + + // + nlopt::opt ssubOpt(nlopt::LN_NELDERMEAD, numberParam); + ssubOpt.set_lower_bounds(lb); + ssubOpt.set_upper_bounds(ub); + ssubOpt.set_maxeval(500); + + ssubOpt.set_min_objective(getEstimation, NULL); + nlopt::result Result = ssubOpt.optimize(x, minf); + + cout << minf << endl; + if (m[i]->getNamberParam() > 1) { + optimal.push_back(new Param()); + } + optimal[optimal.size()-1]->countFuzzyParts = x[0] * 100; + optimal[optimal.size()-1]->countRulesIn = x[1] * 5; + if (i == 15) { + optimal[optimal.size()-1]->sizeLabels = x[2] * 100; + } else { + optimal[optimal.size()-1]->gamma = x[2]; + } + if (m[i]->getNamberParam() > 3) { + optimal[optimal.size()-1]->delta = x[3]; + if (m[i]->getNamberParam() > 4) { + optimal[optimal.size()-1]->alpha = x[4]; + } + } + + // x , min - smape + + } + + // + // + m[i]->setParam("alpha", optimal[optimal.size()-1]->alpha); + m[i]->setParam("gamma", optimal[optimal.size()-1]->gamma); + m[i]->setParam("delta", optimal[optimal.size()-1]->delta); + m[i]->setParam("phi", optimal[optimal.size()-1]->phi); + m[i]->setParam("countRulesIn", optimal[optimal.size()-1]->countRulesIn); + m[i]->setParam("countFuzzyParts", optimal[optimal.size()-1]->countFuzzyParts); + m[i]->setParam("lb1", optimal[optimal.size()-1]->lb1); + m[i]->setParam("rb1", optimal[optimal.size()-1]->rb1); + m[i]->setParam("lb2", optimal[optimal.size()-1]->lb2); + m[i]->setParam("rb2", optimal[optimal.size()-1]->rb2); + m[i]->setParam("sizeLabels", optimal[optimal.size()-1]->sizeLabels); + // + m[i]->createModelForEstimation(); + // + //aicValues.push_back(aic->getValue(4, m[i]->getXEstimation(), m[i]->getForecast())); + //aicValues.push_back(aic->getAiccValue(4, m[i]->getXEstimation(), m[i]->getForecast())); + //aicValues.push_back(aic->getBicValue(4, m[i]->getXEstimation(), m[i]->getForecast())); + //modelsResults.push_back(m[i]->getForecast()); + SMape* smape = new SMape(); + stringstream ss1; + ss1 << i; + string str1 = ss1.str(); + stringstream ss2; + double sm = smape->getValue(m[i]->x, m[i]->getForecast()); + double ai = optimal[optimal.size()-1]->estim; + ss2 << sm; + + // , + if (ai < bestAic) { + bestAic = ai; + bestParams = optimal[optimal.size()-1]; + bestMethodIndex = i; + } + + string str2 = ss2.str(); + smapeResults = smapeResults + "Smape for " + str1 + " method: " + str2 + " "; + smapeResults = smapeResults + optimal[optimal.size()-1]->toString() + "\n"; + // + // + string target = path +"\\"+ fileName+"result"; + _mkdir(target.c_str()); + string fileNameOut = target + "\\"; + char suff[3]; + itoa(i, suff, 10); + fileNameOut.append(suff); + fileNameOut.append("out"); + m[i]->createModel(); + //File::writeFile(fileNameOut, norm->deNormalizeTimeSeria(m[i]->getForecast())); + //transform->setTimeSeries(m[i]->getForecast()); + //transform->setTimeSeries(transform->invModifiedBoxCox()); + transform->setTimeSeries(m[i]->getForecast()); + File::writeFile(fileNameOut, transform->invModifiedBoxCox()); + + } + Method* bestMethod = m[bestMethodIndex]; + // , + // + bestMethod->setParam("alpha", bestParams->alpha); + bestMethod->setParam("gamma", bestParams->gamma); + bestMethod->setParam("delta", bestParams->delta); + + bestMethod->setParam("phi", bestParams->phi); + + bestMethod->setParam("countRulesIn", bestParams->countRulesIn); + bestMethod->setParam("countFuzzyParts", bestParams->countFuzzyParts); + bestMethod->setParam("lb1", bestParams->lb1); + bestMethod->setParam("rb1", bestParams->rb1); + bestMethod->setParam("lb2", bestParams->lb2); + bestMethod->setParam("rb2", bestParams->rb2); + bestMethod->setParam("sizeLabels", bestParams->sizeLabels); + bestMethod->createModel(); + string target = path +"\\"+ fileName+"result"; + string fileNameOut = target + "\\"; + fileNameOut.append("best"); + char _s[3]; + itoa(bestMethodIndex, _s, 10); + fileNameOut.append(_s); + //File::writeFile(fileNameOut, norm->deNormalizeTimeSeria(bestMethod->getForecast())); + transform->setTimeSeries(bestMethod->getForecast()); + File::writeFile(fileNameOut, transform->invModifiedBoxCox()); + + + string smapeFileName = path +"\\"+ fileName+"result\\smape"; + FILE * file = fopen(smapeFileName.c_str(), "w"); + if (file) // , + { + bool result = fputs(smapeResults.c_str(), file); // + if (!result) // + cout << " !"; // + fclose(file); + } + return 0; + + + + + + + Vovk *vovk = new Vovk(timeSeries, pointForecast, m); + vector res; + res.push_back(vovk->getForecast()); + for (int i = 0; i < pointForecast - 1; i++){ + //cout << i + 1 << " " << vovk->getForecast() << endl; + vovk->updateTS(res[res.size() - 1]); + res.push_back(vovk->getForecast()); + } + res = norm->deNormalizeTimeSeria(res); + for (int i = 0; i < pointForecast - 1; i++){ + cout << i + 1 << " " << res[i] << endl; + } + SMape *smape = new SMape(); + cout << "Smape for Vovk " << " method: " << smape->getValue(norm->deNormalizeTimeSeria(m[0]->getXEstimation()), res) << "\n"; + vector weights = aicWeights->calculateWeights(aicValues); + vector fuzzyAdoptiveWeights = aicWeights->calculateFuzzyAdaptiveWeights(timeSeries, modelsResults); + cout << "Smape for etalon forecast " << " method: " << smape->getValue(m[21]->getXEstimation(), m[21]->getXEstimation()) << "\n"; + + delete(m); + return 0; +} + + diff --git a/project_template/project_template.vcproj b/project_template/project_template.vcproj new file mode 100644 index 0000000..be4ee46 --- /dev/null +++ b/project_template/project_template.vcproj @@ -0,0 +1,551 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/project_template/project_template.vcproj.ADMIN-4A0D49B0D.orion.user b/project_template/project_template.vcproj.ADMIN-4A0D49B0D.orion.user new file mode 100644 index 0000000..1520c9c --- /dev/null +++ b/project_template/project_template.vcproj.ADMIN-4A0D49B0D.orion.user @@ -0,0 +1,65 @@ + + + + + + + + + + + diff --git a/project_template/project_template.vcproj.orion-ПК.orion.user b/project_template/project_template.vcproj.orion-ПК.orion.user new file mode 100644 index 0000000..6d7a489 --- /dev/null +++ b/project_template/project_template.vcproj.orion-ПК.orion.user @@ -0,0 +1,65 @@ + + + + + + + + + + + diff --git a/project_template/project_template.vcxproj b/project_template/project_template.vcxproj new file mode 100644 index 0000000..3b2101b --- /dev/null +++ b/project_template/project_template.vcxproj @@ -0,0 +1,162 @@ + + + + + Debug + Win32 + + + Release + Win32 + + + + {D68B91E2-1169-4096-AC00-5992226D29DF} + project_template + Win32Proj + + + + Application + v120 + Unicode + true + + + Application + v120 + Unicode + + + + + + + + + + + + + <_ProjectFileVersion>12.0.21005.1 + + + $(SolutionDir)$(Configuration)\ + $(Configuration)\ + true + + + $(SolutionDir)$(Configuration)\ + $(Configuration)\ + false + + + + Disabled + WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions) + true + EnableFastChecks + MultiThreadedDebugDLL + Use + Level3 + EditAndContinue + + + true + Console + MachineX86 + + + + + MaxSpeed + true + WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions) + MultiThreadedDLL + true + Use + Level3 + ProgramDatabase + + + true + Console + true + true + MachineX86 + + + + + + + + + + + + + + + + + + + + + + + + + + + + Create + Create + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/project_template/project_template.vcxproj.filters b/project_template/project_template.vcxproj.filters new file mode 100644 index 0000000..324a16d --- /dev/null +++ b/project_template/project_template.vcxproj.filters @@ -0,0 +1,222 @@ + + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {5d90801d-4ed6-4356-8b18-d8eb47265efa} + + + {abb28eca-6911-465e-b9bd-9fa5690d4db4} + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hpp;hxx;hm;inl;inc;xsd + + + {07a8211b-11b0-40e4-b1a3-0f4f3999ecee} + + + {55e5d4a8-6f19-439d-b569-2d0161448794} + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav + + + {f03145ee-d3fb-41c6-b262-5a21194ffff4} + + + {c9ac4971-b0dd-420b-b475-de086bf4f99b} + + + + + Source Files + + + Source Files + + + Source Files\Methods + + + Source Files\Methods + + + Source Files\Estimations + + + Source Files\Estimations + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files\Methods + + + Source Files\AggregationMethods + + + Source Files + + + Source Files + + + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files\Methods + + + Header Files\Methods + + + Header Files\Estimations + + + Header Files\Estimations + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files\Methods + + + Header Files\AggregationMethods + + + Header Files + + + Header Files + + + Header Files + + + + + + \ No newline at end of file diff --git a/project_template/project_template.vcxproj.user b/project_template/project_template.vcxproj.user new file mode 100644 index 0000000..96ba5a5 --- /dev/null +++ b/project_template/project_template.vcxproj.user @@ -0,0 +1,6 @@ + + + + true + + \ No newline at end of file diff --git a/project_template/stdafx.cpp b/project_template/stdafx.cpp new file mode 100644 index 0000000..0e08b93 --- /dev/null +++ b/project_template/stdafx.cpp @@ -0,0 +1,8 @@ +// stdafx.cpp : source file that includes just the standard includes +// project_template.pch will be the pre-compiled header +// stdafx.obj will contain the pre-compiled type information + +#include "stdafx.h" + +// TODO: reference any additional headers you need in STDAFX.H +// and not in this file diff --git a/project_template/stdafx.h b/project_template/stdafx.h new file mode 100644 index 0000000..b005a83 --- /dev/null +++ b/project_template/stdafx.h @@ -0,0 +1,15 @@ +// stdafx.h : include file for standard system include files, +// or project specific include files that are used frequently, but +// are changed infrequently +// + +#pragma once + +#include "targetver.h" + +#include +#include + + + +// TODO: reference additional headers your program requires here diff --git a/project_template/targetver.h b/project_template/targetver.h new file mode 100644 index 0000000..6fe8eb7 --- /dev/null +++ b/project_template/targetver.h @@ -0,0 +1,13 @@ +#pragma once + +// The following macros define the minimum required platform. The minimum required platform +// is the earliest version of Windows, Internet Explorer etc. that has the necessary features to run +// your application. The macros work by enabling all features available on platform versions up to and +// including the version specified. + +// Modify the following defines if you have to target a platform prior to the ones specified below. +// Refer to MSDN for the latest info on corresponding values for different platforms. +#ifndef _WIN32_WINNT // Specifies that the minimum required platform is Windows Vista. +#define _WIN32_WINNT 0x0600 // Change this to the appropriate value to target other versions of Windows. +#endif + diff --git a/ts/1 b/ts/1 new file mode 100644 index 0000000..9bc3b26 --- /dev/null +++ b/ts/1 @@ -0,0 +1,212 @@ +2332 +2800 +2375 +3447 +4819 +4973 +2705 +2639 +2644 +2611 +3530 +4438 +3467 +2542 +2778 +4488 +5505 +2953 +2534 +2867 +2426 +2229 +3607 +2099 +2325 +3725 +3618 +2247 +2209 +1121 +2160 +2107 +2280 +1300 +1625 +1344 +1417 +1615 +2112 +2209 +1401 +1502 +2260 +2145 +2562 +3318 +3514 +2258 +2341 +2186 +1794 +2643 +4308 +3593 +2025 +1746 +3256 +3579 +4101 +4916 +4490 +3108 +1869 +1625 +2685 +3099 +375 +1689 +2569 +2721 +3289 +3494 +2822 +3620 +3512 +2643 +4783 +5135 +4911 +4053 +3963 +3709 +3846 +4447 +5963 +4817 +4027 +4381 +3925 +4063 +5066 +5759 +4816 +3576 +3743 +3679 +3767 +3716 +4425 +4601 +3839 +3809 +4028 +4913 +4435 +4898 +5284 +4027 +4207 +4171 +4381 +5722 +7957 +9368 +4581 +4336 +4267 +3799 +3973 +4292 +3889 +4269 +4047 +4328 +9163 +3895 +3954 +4344 +4171 +3610 +3529 +3604 +3834 +4427 +3828 +3964 +8300 +4979 +3810 +3918 +4407 +4393 +3931 +4039 +3695 +3753 +3790 +5224 +5322 +3822 +3667 +4113 +3605 +4082 +5671 +2135 +3952 +4712 +3231 +2905 +2668 +8266 +3047 +4438 +15733 +4203 +1638 +2557 +2363 +2207 +4646 +3862 +1877 +1754 +1793 +1617 +4410 +2605 +2636 +2286 +1930 +2866 +3626 +2975 +4324 +3672 +2733 +2182 +1930 +1388 +2795 +4180 +4146 +2739 +2518 +2420 +2445 +3406 +4312 +5179 +2409 +2166 +2315 +2065 +3691 +7717 +6163 +3467 +2847 +3293 +2637 +3458 diff --git a/ts/1result/15out b/ts/1result/15out new file mode 100644 index 0000000..3a3f51d --- /dev/null +++ b/ts/1result/15out @@ -0,0 +1,4 @@ +3137,72 +3138,72 +3139,72 +3140,7 diff --git a/ts/57 b/ts/57 new file mode 100644 index 0000000..806e629 --- /dev/null +++ b/ts/57 @@ -0,0 +1,8 @@ +4071.5 +1070 +1302.5 +1533 +1729 +1853 +1926 +2298.75