Cascaded transformations in all fts models

This commit is contained in:
Petrônio Cândido de Lima e Silva 2017-01-27 08:26:47 -02:00
parent 15b4aa1137
commit 8b3aceed58
15 changed files with 195 additions and 86 deletions

View File

@ -18,7 +18,7 @@ def acf(data, k):
# Erro quadrático médio
def rmse(targets, forecasts):
return np.sqrt(np.nanmean((forecasts - targets) ** 2))
return np.sqrt(np.nanmean((targets - forecasts) ** 2))
def rmse_interval(targets, forecasts):
@ -28,7 +28,16 @@ def rmse_interval(targets, forecasts):
# Erro Percentual médio
def mape(targets, forecasts):
return np.mean(abs(forecasts - targets) / forecasts) * 100
return np.mean(np.abs(targets - forecasts) / targets) * 100
def smape(targets, forecasts, type=2):
if type == 1:
return np.mean(np.abs(forecasts - targets) / ((forecasts + targets)/2))
elif type == 2:
return np.mean(np.abs(forecasts - targets) / (abs(forecasts) + abs(targets)) )*100
else:
return sum(np.abs(forecasts - targets)) / sum(forecasts + targets)
def mape_interval(targets, forecasts):

View File

@ -70,3 +70,31 @@ def plotResiduals(targets, models, tam=[8, 8], save=False, file=None):
Util.showAndSaveImage(fig, file, save)
def plotResiduals2(targets, models, tam=[8, 8], save=False, file=None):
fig, axes = plt.subplots(nrows=len(models), ncols=3, figsize=tam)
for c, mfts in enumerate(models, start=0):
forecasts = mfts.forecast(targets)
res = residuals(targets, forecasts, mfts.order)
mu = np.mean(res)
sig = np.std(res)
if c == 0: axes[c][0].set_title("Residuals", size='large')
axes[c][0].set_ylabel(mfts.shortname, size='large')
axes[c][0].set_xlabel(' ')
axes[c][0].plot(res)
if c == 0: axes[c][1].set_title("Residuals Autocorrelation", size='large')
axes[c][1].set_ylabel('ACS')
axes[c][1].set_xlabel('Lag')
axes[c][1].acorr(res)
if c == 0: axes[c][2].set_title("Residuals Histogram", size='large')
axes[c][2].set_ylabel('Freq')
axes[c][2].set_xlabel('Bins')
axes[c][2].hist(res)
plt.tight_layout()
Util.showAndSaveImage(fig, file, save)

View File

@ -13,59 +13,74 @@ from pyFTS.partitioners import Grid
from pyFTS.common import Membership, FuzzySet, FLR, Transformations, Util
from pyFTS import fts, chen, yu, ismailefendi, sadaei, hofts, hwang, pfts, ifts
colors = ['grey', 'rosybrown', 'maroon', 'red','orange', 'yellow', 'olive', 'green',
'cyan', 'blue', 'darkblue', 'purple', 'darkviolet']
def allPointForecasters(data_train, data_test, partitions, max_order=3, statistics=True, residuals=True, series=True,
save=False, file=None, tam=[20, 5]):
ncol = len(colors)
styles = ['-','--','-.',':','.']
nsty = len(styles)
def allPointForecasters(data_train, data_test, partitions, max_order=3, statistics=True, residuals=True,
series=True, save=False, file=None, tam=[20, 5], models=None, transformation=None):
if models is None:
models = [naive.Naive, chen.ConventionalFTS, yu.WeightedFTS, ismailefendi.ImprovedWeightedFTS,
sadaei.ExponentialyWeightedFTS, hofts.HighOrderFTS, pfts.ProbabilisticFTS]
objs = []
all_colors = [clr for clr in pltcolors.cnames.keys() ]
if transformation is not None:
data_train_fs = Grid.GridPartitionerTrimf(transformation.apply(data_train),partitions)
else:
data_train_fs = Grid.GridPartitionerTrimf(data_train, partitions)
count = 1
colors = []
lcolors = []
for model in models:
for count, model in enumerate(models, start=0):
#print(model)
mfts = model("")
if not mfts.isHighOrder:
if transformation is not None:
mfts.appendTransformation(transformation)
mfts.train(data_train, data_train_fs)
objs.append(mfts)
colors.append( all_colors[count] )
lcolors.append( colors[count % ncol] )
else:
for order in np.arange(1,max_order+1):
if order >= mfts.minOrder:
mfts = model(" n = " + str(order))
if transformation is not None:
mfts.appendTransformation(transformation)
mfts.train(data_train, data_train_fs, order=order)
objs.append(mfts)
colors.append(all_colors[count])
count += 10
lcolors.append(colors[count % ncol])
if statistics:
print(getPointStatistics(data_test, objs))
if residuals:
print(ResidualAnalysis.compareResiduals(data_test, objs))
ResidualAnalysis.plotResiduals(data_test, objs, save=save, file=file, tam=[tam[0], 5 * tam[1]])
ResidualAnalysis.plotResiduals2(data_test, objs, save=save, file=file, tam=tam)
if series:
plotComparedSeries(data_test, objs, colors, typeonlegend=False, save=save, file=file, tam=tam, intervals=False)
plotComparedSeries(data_test, objs, lcolors, typeonlegend=False, save=save, file=file, tam=tam,
intervals=False)
def getPointStatistics(data, models, externalmodels = None, externalforecasts = None):
ret = "Model & Order & RMSE & MAPE & Theil's U & Theil's I \\\\ \n"
ret = "Model & Order & RMSE & MAPE & Theil's U \\\\ \n"
for fts in models:
forecasts = fts.forecast(data)
ret += fts.shortname + " & "
ret += str(fts.order) + " & "
ret += str(round(Measures.rmse(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2)) + " & "
ret += str(round(Measures.mape(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2))+ " & "
ret += str(round(Measures.UStatistic(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2))+ " & "
ret += str(round(Measures.TheilsInequality(np.array(data[fts.order:]), np.array(forecasts[:-1])), 4))
ret += str(round(Measures.smape(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2))+ " & "
ret += str(round(Measures.UStatistic(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2))
#ret += str(round(Measures.TheilsInequality(np.array(data[fts.order:]), np.array(forecasts[:-1])), 4))
ret += " \\\\ \n"
if externalmodels is not None:
l = len(externalmodels)
@ -73,44 +88,48 @@ def getPointStatistics(data, models, externalmodels = None, externalforecasts =
ret += externalmodels[k] + " & "
ret += " 1 & "
ret += str(round(Measures.rmse(data[fts.order:], externalforecasts[k][:-1]), 2)) + " & "
ret += str(round(Measures.mape(data[fts.order:], externalforecasts[k][:-1]), 2))+ " & "
ret += str(round(Measures.smape(data[fts.order:], externalforecasts[k][:-1]), 2))+ " & "
ret += str(round(Measures.UStatistic(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2))
ret += " \\\\ \n"
return ret
def allIntervalForecasters(data_train, data_test, partitions, max_order=3,save=False, file=None, tam=[20, 5]):
def allIntervalForecasters(data_train, data_test, partitions, max_order=3,save=False, file=None, tam=[20, 5],
models=None, transformation=None):
if models is None:
models = [ifts.IntervalFTS, pfts.ProbabilisticFTS]
objs = []
all_colors = [clr for clr in pltcolors.cnames.keys() ]
if transformation is not None:
data_train_fs = Grid.GridPartitionerTrimf(transformation.apply(data_train),partitions)
else:
data_train_fs = Grid.GridPartitionerTrimf(data_train, partitions)
count = 1
lcolors = []
colors = []
for model in models:
#print(model)
for count, model in enumerate(models, start=0):
mfts = model("")
if not mfts.isHighOrder:
if transformation is not None:
mfts.appendTransformation(transformation)
mfts.train(data_train, data_train_fs)
objs.append(mfts)
colors.append( all_colors[count] )
lcolors.append( colors[count % ncol] )
else:
for order in np.arange(1,max_order+1):
if order >= mfts.minOrder:
mfts = model(" n = " + str(order))
if transformation is not None:
mfts.appendTransformation(transformation)
mfts.train(data_train, data_train_fs, order=order)
objs.append(mfts)
colors.append(all_colors[count])
count += 5
lcolors.append(colors[count % ncol])
print(getIntervalStatistics(data_test, objs))
plotComparedSeries(data_test, objs, colors, typeonlegend=False, save=save, file=file, tam=tam, intervals=True)
plotComparedSeries(data_test, objs, lcolors, typeonlegend=False, save=save, file=file, tam=tam, intervals=True)
def getIntervalStatistics(original, models):
@ -142,9 +161,11 @@ def plotComparedSeries(original, models, colors, typeonlegend=False, save=False,
mi = []
ma = []
legends = []
ax.plot(original, color='black', label="Original", linewidth=1.5)
count = 0
for fts in models:
for count, fts in enumerate(models, start=0):
if fts.hasPointForecasting and not intervals:
forecasted = fts.forecast(original)
mi.append(min(forecasted) * 0.95)
@ -170,15 +191,16 @@ def plotComparedSeries(original, models, colors, typeonlegend=False, save=False,
ax.plot(upper, color=colors[count], ls="-")
handles0, labels0 = ax.get_legend_handles_labels()
ax.legend(handles0, labels0, loc=2)
count = count + 1
lgd = ax.legend(handles0, labels0, loc=2, bbox_to_anchor=(1, 1))
legends.append(lgd)
# ax.set_title(fts.name)
ax.set_ylim([min(mi), max(ma)])
ax.set_ylabel('F(T)')
ax.set_xlabel('T')
ax.set_xlim([0, len(original)])
Util.showAndSaveImage(fig, file, save)
Util.showAndSaveImage(fig, file, save, lgd=legends)
def plotComparedIntervalsAhead(original, models, colors, distributions, time_from, time_to,
@ -530,7 +552,7 @@ def compareModelsTable(original, models_fo, models_ho):
return sup + header + body + "\\end{tabular}"
def simpleSearch_RMSE(original, model, partitions, orders, save=False, file=None, tam=[10, 15], plotforecasts=False,
def simpleSearch_RMSE(train, test, model, partitions, orders, save=False, file=None, tam=[10, 15], plotforecasts=False,
elev=30, azim=144):
ret = []
errors = np.array([[0 for k in range(len(partitions))] for kk in range(len(orders))])
@ -539,8 +561,8 @@ def simpleSearch_RMSE(original, model, partitions, orders, save=False, file=None
# fig.suptitle("Comparação de modelos ")
if plotforecasts:
ax0 = fig.add_axes([0, 0.4, 0.9, 0.5]) # left, bottom, width, height
ax0.set_xlim([0, len(original)])
ax0.set_ylim([min(original) * 0.9, max(original) * 1.1])
ax0.set_xlim([0, len(train)])
ax0.set_ylim([min(train) * 0.9, max(train) * 1.1])
ax0.set_title('Forecasts')
ax0.set_ylabel('F(T)')
ax0.set_xlabel('T')
@ -550,13 +572,13 @@ def simpleSearch_RMSE(original, model, partitions, orders, save=False, file=None
for p in partitions:
oc = 0
for o in orders:
sets = Grid.GridPartitionerTrimf(original, p)
sets = Grid.GridPartitionerTrimf(train, p)
fts = model("q = " + str(p) + " n = " + str(o))
fts.train(original, sets, o)
forecasted = fts.forecast(original)
error = Measures.rmse(np.array(original[o:]), np.array(forecasted[:-1]))
mape = Measures.mape(np.array(original[o:]), np.array(forecasted[:-1]))
# print(original[o:])
fts.train(train, sets, o)
forecasted = fts.forecast(test)
error = Measures.rmse(np.array(test[o:]), np.array(forecasted[:-1]))
mape = Measures.mape(np.array(test[o:]), np.array(forecasted[:-1]))
# print(train[o:])
# print(forecasted[-1])
for kk in range(o):
forecasted.insert(0, None)
@ -573,7 +595,7 @@ def simpleSearch_RMSE(original, model, partitions, orders, save=False, file=None
if plotforecasts:
# handles0, labels0 = ax0.get_legend_handles_labels()
# ax0.legend(handles0, labels0)
ax0.plot(original, label="Original", linewidth=3.0, color="black")
ax0.plot(test, label="Original", linewidth=3.0, color="black")
ax1 = Axes3D(fig, rect=[0, 1, 0.9, 0.9], elev=elev, azim=azim)
if not plotforecasts: ax1 = Axes3D(fig, rect=[0, 1, 0.9, 0.9], elev=elev, azim=azim)
# ax1 = fig.add_axes([0.6, 0.5, 0.45, 0.45], projection='3d')

View File

@ -40,13 +40,14 @@ class ConventionalFTS(fts.FTS):
def train(self, data, sets,order=1,parameters=None):
self.sets = sets
tmpdata = FuzzySet.fuzzySeries(data, sets)
ndata = self.doTransformations(data)
tmpdata = FuzzySet.fuzzySeries(ndata, sets)
flrs = FLR.generateNonRecurrentFLRs(tmpdata)
self.flrgs = self.generateFLRG(flrs)
def forecast(self, data):
ndata = np.array(data)
ndata = np.array(self.doTransformations(data))
l = len(ndata)
@ -66,4 +67,6 @@ class ConventionalFTS(fts.FTS):
ret.append(sum(mp) / len(mp))
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret

View File

@ -8,6 +8,7 @@ class Transformation(object):
def __init__(self, parameters):
self.isInversible = True
self.parameters = parameters
self.minimalLength = 1
def apply(self,data,param):
pass
@ -24,6 +25,7 @@ class Differential(Transformation):
def __init__(self, parameters):
super(Differential, self).__init__(parameters)
self.lag = parameters
self.minimalLength = 2
def apply(self, data, param=None):
if param is not None:
@ -31,12 +33,12 @@ class Differential(Transformation):
n = len(data)
diff = [data[t - self.lag] - data[t] for t in np.arange(self.lag, n)]
for t in np.arange(0, self.lag): diff.insert(0, 0)
return np.array(diff)
return diff
def inverse(self,data, param):
n = len(data)
inc = [data[t] + param[t] for t in np.arange(1, n)]
return np.array(inc)
return inc
def boxcox(original, plambda):

View File

@ -13,8 +13,11 @@ def uniquefilename(name):
return name + str(current_milli_time())
def showAndSaveImage(fig,file,flag):
def showAndSaveImage(fig,file,flag,lgd=None):
if flag:
plt.show()
if lgd is not None:
fig.savefig(uniquefilename(file), additional_artists=lgd,bbox_inches='tight') #bbox_extra_artists=(lgd,), )
else:
fig.savefig(uniquefilename(file))
plt.close(fig)

12
fts.py
View File

@ -61,23 +61,23 @@ class FTS(object):
def doTransformations(self,data,params=None):
ndata = data
if len(self.transformations) > 0:
if params is None:
params = [ None for k in self.transformations]
c = 0
for t in self.transformations:
for c, t in enumerate(self.transformations, start=0):
ndata = t.apply(ndata,params[c])
c += 1
return ndata
def doInverseTransformations(self, data, params=None):
ndata = data
if len(self.transformations) > 0:
if params is None:
params = [None for k in self.transformations]
c = 0
for t in reversed(self.transformations):
for c, t in enumerate(reversed(self.transformations), start=0):
ndata = t.inverse(ndata, params[c])
c += 1
return ndata

View File

@ -61,6 +61,9 @@ class HighOrderFTS(fts.FTS):
return (flrgs)
def train(self, data, sets, order=1,parameters=None):
data = self.doTransformations(data)
self.order = order
self.sets = sets
for s in self.sets: self.setsDict[s.name] = s
@ -81,8 +84,10 @@ class HighOrderFTS(fts.FTS):
if l <= self.order:
return data
ndata = self.doTransformations(data)
for k in np.arange(self.order, l+1):
tmpdata = FuzzySet.fuzzySeries(data[k - self.order: k], self.sets)
tmpdata = FuzzySet.fuzzySeries(ndata[k - self.order: k], self.sets)
tmpflrg = HighOrderFLRG(self.order)
for s in tmpdata: tmpflrg.appendLHS(s)
@ -95,4 +100,6 @@ class HighOrderFTS(fts.FTS):
ret.append(sum(mp) / len(mp))
ret = self.doInverseTransformations(ret, params=[data[self.order-1:]])
return ret

View File

@ -13,6 +13,9 @@ class HighOrderFTS(fts.FTS):
self.detail = "Hwang"
def forecast(self, data):
ndata = self.doTransformations(data)
cn = np.array([0.0 for k in range(len(self.sets))])
ow = np.array([[0.0 for k in range(len(self.sets))] for z in range(self.order - 1)])
rn = np.array([[0.0 for k in range(len(self.sets))] for z in range(self.order - 1)])
@ -20,12 +23,12 @@ class HighOrderFTS(fts.FTS):
ret = []
for t in np.arange(self.order-1, len(data)):
for t in np.arange(self.order-1, len(ndata)):
for s in range(len(self.sets)):
cn[s] = self.sets[s].membership(data[t])
cn[s] = self.sets[s].membership(ndata[t])
for w in range(self.order - 1):
ow[w, s] = self.sets[s].membership(data[t - w])
ow[w, s] = self.sets[s].membership(ndata[t - w])
rn[w, s] = ow[w, s] * cn[s]
ft[s] = max(ft[s], rn[w, s])
mft = max(ft)
@ -37,6 +40,8 @@ class HighOrderFTS(fts.FTS):
count += 1.0
ret.append(out / count)
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret
def train(self, data, sets, order=1, parameters=None):

View File

@ -49,7 +49,9 @@ class IntervalFTS(hofts.HighOrderFTS):
def forecastInterval(self, data):
ndata = np.array(data)
data = np.array(data)
ndata = self.doTransformations(data)
l = len(ndata)
@ -113,6 +115,8 @@ class IntervalFTS(hofts.HighOrderFTS):
# gerar o intervalo
norm = sum(affected_flrgs_memberships)
ret.append([sum(lo) / norm, sum(up) / norm])
lo_ = self.doInverseTransformations(sum(lo) / norm, param=[data[k - (self.order - 1): k + 1]])
up_ = self.doInverseTransformations(sum(up) / norm, param=[data[k - (self.order - 1): k + 1]])
ret.append([lo_, up_])
return ret

View File

@ -51,7 +51,9 @@ class ImprovedWeightedFTS(fts.FTS):
for s in self.sets: self.setsDict[s.name] = s
tmpdata = FuzzySet.fuzzySeries(data, self.sets)
ndata = self.doTransformations(data)
tmpdata = FuzzySet.fuzzySeries(ndata, self.sets)
flrs = FLR.generateRecurrentFLRs(tmpdata)
self.flrgs = self.generateFLRG(flrs)
@ -62,7 +64,8 @@ class ImprovedWeightedFTS(fts.FTS):
def forecast(self, data):
l = 1
ndata = np.array(data)
data = np.array(data)
ndata = self.doTransformations(data)
l = len(ndata)
@ -82,4 +85,6 @@ class ImprovedWeightedFTS(fts.FTS):
ret.append(mp.dot(flrg.weights()))
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret

12
pfts.py
View File

@ -112,7 +112,7 @@ class ProbabilisticFTS(ifts.IntervalFTS):
def forecast(self, data):
ndata = np.array(data)
ndata = np.array(self.doTransformations(data))
l = len(ndata)
@ -208,11 +208,15 @@ class ProbabilisticFTS(ifts.IntervalFTS):
else:
ret.append(sum(mp) / norm)
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret
def forecastInterval(self, data):
ndata = np.array(data)
data = np.array(data)
ndata = self.doTransformations(data)
l = len(ndata)
@ -308,7 +312,9 @@ class ProbabilisticFTS(ifts.IntervalFTS):
if norm == 0:
ret.append([0, 0])
else:
ret.append([sum(lo) / norm, sum(up) / norm])
lo_ = self.doInverseTransformations(sum(lo) / norm, params=[data[k - (self.order - 1): k + 1]])
up_ = self.doInverseTransformations(sum(up) / norm, params=[data[k - (self.order - 1): k + 1]])
ret.append([lo_, up_])
return ret

View File

@ -52,14 +52,17 @@ class ExponentialyWeightedFTS(fts.FTS):
def train(self, data, sets,order=1,parameters=2):
self.c = parameters
self.sets = sets
tmpdata = FuzzySet.fuzzySeries(data, sets)
ndata = self.doTransformations(data)
tmpdata = FuzzySet.fuzzySeries(ndata, sets)
flrs = FLR.generateRecurrentFLRs(tmpdata)
self.flrgs = self.generateFLRG(flrs, self.c)
def forecast(self, data):
l = 1
ndata = np.array(data)
data = np.array(data)
ndata = self.doTransformations(data)
l = len(ndata)
@ -79,4 +82,6 @@ class ExponentialyWeightedFTS(fts.FTS):
ret.append(mp.dot(flrg.weights()))
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret

View File

@ -47,13 +47,16 @@ class SeasonalFTS(fts.FTS):
def train(self, data, sets, order=1,parameters=12):
self.sets = sets
self.seasonality = parameters
tmpdata = FuzzySet.fuzzySeries(data, sets)
ndata = self.doTransformations(data)
tmpdata = FuzzySet.fuzzySeries(ndata, sets)
flrs = FLR.generateRecurrentFLRs(tmpdata)
self.flrgs = self.generateFLRG(flrs)
def forecast(self, data):
ndata = np.array(data)
data = np.array(data)
ndata = self.doTransformations(data)
l = len(ndata)
@ -66,4 +69,6 @@ class SeasonalFTS(fts.FTS):
ret.append(sum(mp) / len(mp))
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret

9
yu.py
View File

@ -48,14 +48,17 @@ class WeightedFTS(fts.FTS):
def train(self, data, sets,order=1,parameters=None):
self.sets = sets
tmpdata = FuzzySet.fuzzySeries(data, sets)
ndata = self.doTransformations(data)
tmpdata = FuzzySet.fuzzySeries(ndata, sets)
flrs = FLR.generateRecurrentFLRs(tmpdata)
self.flrgs = self.generateFLRG(flrs)
def forecast(self, data):
l = 1
ndata = np.array(data)
data = np.array(data)
ndata = self.doTransformations(data)
l = len(ndata)
@ -75,4 +78,6 @@ class WeightedFTS(fts.FTS):
ret.append(mp.dot(flrg.weights()))
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret