Cascaded transformations in all fts models

This commit is contained in:
Petrônio Cândido de Lima e Silva 2017-01-27 08:26:47 -02:00
parent 15b4aa1137
commit 8b3aceed58
15 changed files with 195 additions and 86 deletions

View File

@ -18,7 +18,7 @@ def acf(data, k):
# Erro quadrático médio # Erro quadrático médio
def rmse(targets, forecasts): def rmse(targets, forecasts):
return np.sqrt(np.nanmean((forecasts - targets) ** 2)) return np.sqrt(np.nanmean((targets - forecasts) ** 2))
def rmse_interval(targets, forecasts): def rmse_interval(targets, forecasts):
@ -28,7 +28,16 @@ def rmse_interval(targets, forecasts):
# Erro Percentual médio # Erro Percentual médio
def mape(targets, forecasts): def mape(targets, forecasts):
return np.mean(abs(forecasts - targets) / forecasts) * 100 return np.mean(np.abs(targets - forecasts) / targets) * 100
def smape(targets, forecasts, type=2):
if type == 1:
return np.mean(np.abs(forecasts - targets) / ((forecasts + targets)/2))
elif type == 2:
return np.mean(np.abs(forecasts - targets) / (abs(forecasts) + abs(targets)) )*100
else:
return sum(np.abs(forecasts - targets)) / sum(forecasts + targets)
def mape_interval(targets, forecasts): def mape_interval(targets, forecasts):

View File

@ -70,3 +70,31 @@ def plotResiduals(targets, models, tam=[8, 8], save=False, file=None):
Util.showAndSaveImage(fig, file, save) Util.showAndSaveImage(fig, file, save)
def plotResiduals2(targets, models, tam=[8, 8], save=False, file=None):
fig, axes = plt.subplots(nrows=len(models), ncols=3, figsize=tam)
for c, mfts in enumerate(models, start=0):
forecasts = mfts.forecast(targets)
res = residuals(targets, forecasts, mfts.order)
mu = np.mean(res)
sig = np.std(res)
if c == 0: axes[c][0].set_title("Residuals", size='large')
axes[c][0].set_ylabel(mfts.shortname, size='large')
axes[c][0].set_xlabel(' ')
axes[c][0].plot(res)
if c == 0: axes[c][1].set_title("Residuals Autocorrelation", size='large')
axes[c][1].set_ylabel('ACS')
axes[c][1].set_xlabel('Lag')
axes[c][1].acorr(res)
if c == 0: axes[c][2].set_title("Residuals Histogram", size='large')
axes[c][2].set_ylabel('Freq')
axes[c][2].set_xlabel('Bins')
axes[c][2].hist(res)
plt.tight_layout()
Util.showAndSaveImage(fig, file, save)

View File

@ -13,59 +13,74 @@ from pyFTS.partitioners import Grid
from pyFTS.common import Membership, FuzzySet, FLR, Transformations, Util from pyFTS.common import Membership, FuzzySet, FLR, Transformations, Util
from pyFTS import fts, chen, yu, ismailefendi, sadaei, hofts, hwang, pfts, ifts from pyFTS import fts, chen, yu, ismailefendi, sadaei, hofts, hwang, pfts, ifts
colors = ['grey', 'rosybrown', 'maroon', 'red','orange', 'yellow', 'olive', 'green',
'cyan', 'blue', 'darkblue', 'purple', 'darkviolet']
def allPointForecasters(data_train, data_test, partitions, max_order=3, statistics=True, residuals=True, series=True, ncol = len(colors)
save=False, file=None, tam=[20, 5]):
models = [naive.Naive, chen.ConventionalFTS, yu.WeightedFTS, ismailefendi.ImprovedWeightedFTS, styles = ['-','--','-.',':','.']
sadaei.ExponentialyWeightedFTS, hofts.HighOrderFTS, pfts.ProbabilisticFTS]
nsty = len(styles)
def allPointForecasters(data_train, data_test, partitions, max_order=3, statistics=True, residuals=True,
series=True, save=False, file=None, tam=[20, 5], models=None, transformation=None):
if models is None:
models = [naive.Naive, chen.ConventionalFTS, yu.WeightedFTS, ismailefendi.ImprovedWeightedFTS,
sadaei.ExponentialyWeightedFTS, hofts.HighOrderFTS, pfts.ProbabilisticFTS]
objs = [] objs = []
all_colors = [clr for clr in pltcolors.cnames.keys() ] if transformation is not None:
data_train_fs = Grid.GridPartitionerTrimf(transformation.apply(data_train),partitions)
data_train_fs = Grid.GridPartitionerTrimf(data_train,partitions) else:
data_train_fs = Grid.GridPartitionerTrimf(data_train, partitions)
count = 1 count = 1
colors = [] lcolors = []
for model in models: for count, model in enumerate(models, start=0):
#print(model) #print(model)
mfts = model("") mfts = model("")
if not mfts.isHighOrder: if not mfts.isHighOrder:
if transformation is not None:
mfts.appendTransformation(transformation)
mfts.train(data_train, data_train_fs) mfts.train(data_train, data_train_fs)
objs.append(mfts) objs.append(mfts)
colors.append( all_colors[count] ) lcolors.append( colors[count % ncol] )
else: else:
for order in np.arange(1,max_order+1): for order in np.arange(1,max_order+1):
if order >= mfts.minOrder: if order >= mfts.minOrder:
mfts = model(" n = " + str(order)) mfts = model(" n = " + str(order))
if transformation is not None:
mfts.appendTransformation(transformation)
mfts.train(data_train, data_train_fs, order=order) mfts.train(data_train, data_train_fs, order=order)
objs.append(mfts) objs.append(mfts)
colors.append(all_colors[count]) lcolors.append(colors[count % ncol])
count += 10
if statistics: if statistics:
print(getPointStatistics(data_test, objs)) print(getPointStatistics(data_test, objs))
if residuals: if residuals:
print(ResidualAnalysis.compareResiduals(data_test, objs)) print(ResidualAnalysis.compareResiduals(data_test, objs))
ResidualAnalysis.plotResiduals(data_test, objs, save=save, file=file, tam=[tam[0], 5 * tam[1]]) ResidualAnalysis.plotResiduals2(data_test, objs, save=save, file=file, tam=tam)
if series: if series:
plotComparedSeries(data_test, objs, colors, typeonlegend=False, save=save, file=file, tam=tam, intervals=False) plotComparedSeries(data_test, objs, lcolors, typeonlegend=False, save=save, file=file, tam=tam,
intervals=False)
def getPointStatistics(data, models, externalmodels = None, externalforecasts = None): def getPointStatistics(data, models, externalmodels = None, externalforecasts = None):
ret = "Model & Order & RMSE & MAPE & Theil's U & Theil's I \\\\ \n" ret = "Model & Order & RMSE & MAPE & Theil's U \\\\ \n"
for fts in models: for fts in models:
forecasts = fts.forecast(data) forecasts = fts.forecast(data)
ret += fts.shortname + " & " ret += fts.shortname + " & "
ret += str(fts.order) + " & " ret += str(fts.order) + " & "
ret += str(round(Measures.rmse(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2)) + " & " ret += str(round(Measures.rmse(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2)) + " & "
ret += str(round(Measures.mape(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2))+ " & " ret += str(round(Measures.smape(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2))+ " & "
ret += str(round(Measures.UStatistic(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2))+ " & " ret += str(round(Measures.UStatistic(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2))
ret += str(round(Measures.TheilsInequality(np.array(data[fts.order:]), np.array(forecasts[:-1])), 4)) #ret += str(round(Measures.TheilsInequality(np.array(data[fts.order:]), np.array(forecasts[:-1])), 4))
ret += " \\\\ \n" ret += " \\\\ \n"
if externalmodels is not None: if externalmodels is not None:
l = len(externalmodels) l = len(externalmodels)
@ -73,44 +88,48 @@ def getPointStatistics(data, models, externalmodels = None, externalforecasts =
ret += externalmodels[k] + " & " ret += externalmodels[k] + " & "
ret += " 1 & " ret += " 1 & "
ret += str(round(Measures.rmse(data[fts.order:], externalforecasts[k][:-1]), 2)) + " & " ret += str(round(Measures.rmse(data[fts.order:], externalforecasts[k][:-1]), 2)) + " & "
ret += str(round(Measures.mape(data[fts.order:], externalforecasts[k][:-1]), 2))+ " & " ret += str(round(Measures.smape(data[fts.order:], externalforecasts[k][:-1]), 2))+ " & "
ret += str(round(Measures.UStatistic(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2)) ret += str(round(Measures.UStatistic(np.array(data[fts.order:]), np.array(forecasts[:-1])), 2))
ret += " \\\\ \n" ret += " \\\\ \n"
return ret return ret
def allIntervalForecasters(data_train, data_test, partitions, max_order=3,save=False, file=None, tam=[20, 5]): def allIntervalForecasters(data_train, data_test, partitions, max_order=3,save=False, file=None, tam=[20, 5],
models = [ifts.IntervalFTS, pfts.ProbabilisticFTS] models=None, transformation=None):
if models is None:
models = [ifts.IntervalFTS, pfts.ProbabilisticFTS]
objs = [] objs = []
all_colors = [clr for clr in pltcolors.cnames.keys() ] if transformation is not None:
data_train_fs = Grid.GridPartitionerTrimf(transformation.apply(data_train),partitions)
else:
data_train_fs = Grid.GridPartitionerTrimf(data_train, partitions)
data_train_fs = Grid.GridPartitionerTrimf(data_train,partitions) lcolors = []
count = 1 for count, model in enumerate(models, start=0):
colors = []
for model in models:
#print(model)
mfts = model("") mfts = model("")
if not mfts.isHighOrder: if not mfts.isHighOrder:
if transformation is not None:
mfts.appendTransformation(transformation)
mfts.train(data_train, data_train_fs) mfts.train(data_train, data_train_fs)
objs.append(mfts) objs.append(mfts)
colors.append( all_colors[count] ) lcolors.append( colors[count % ncol] )
else: else:
for order in np.arange(1,max_order+1): for order in np.arange(1,max_order+1):
if order >= mfts.minOrder: if order >= mfts.minOrder:
mfts = model(" n = " + str(order)) mfts = model(" n = " + str(order))
if transformation is not None:
mfts.appendTransformation(transformation)
mfts.train(data_train, data_train_fs, order=order) mfts.train(data_train, data_train_fs, order=order)
objs.append(mfts) objs.append(mfts)
colors.append(all_colors[count]) lcolors.append(colors[count % ncol])
count += 5
print(getIntervalStatistics(data_test, objs)) print(getIntervalStatistics(data_test, objs))
plotComparedSeries(data_test, objs, colors, typeonlegend=False, save=save, file=file, tam=tam, intervals=True) plotComparedSeries(data_test, objs, lcolors, typeonlegend=False, save=save, file=file, tam=tam, intervals=True)
def getIntervalStatistics(original, models): def getIntervalStatistics(original, models):
@ -142,9 +161,11 @@ def plotComparedSeries(original, models, colors, typeonlegend=False, save=False,
mi = [] mi = []
ma = [] ma = []
legends = []
ax.plot(original, color='black', label="Original", linewidth=1.5) ax.plot(original, color='black', label="Original", linewidth=1.5)
count = 0
for fts in models: for count, fts in enumerate(models, start=0):
if fts.hasPointForecasting and not intervals: if fts.hasPointForecasting and not intervals:
forecasted = fts.forecast(original) forecasted = fts.forecast(original)
mi.append(min(forecasted) * 0.95) mi.append(min(forecasted) * 0.95)
@ -170,15 +191,16 @@ def plotComparedSeries(original, models, colors, typeonlegend=False, save=False,
ax.plot(upper, color=colors[count], ls="-") ax.plot(upper, color=colors[count], ls="-")
handles0, labels0 = ax.get_legend_handles_labels() handles0, labels0 = ax.get_legend_handles_labels()
ax.legend(handles0, labels0, loc=2) lgd = ax.legend(handles0, labels0, loc=2, bbox_to_anchor=(1, 1))
count = count + 1 legends.append(lgd)
# ax.set_title(fts.name) # ax.set_title(fts.name)
ax.set_ylim([min(mi), max(ma)]) ax.set_ylim([min(mi), max(ma)])
ax.set_ylabel('F(T)') ax.set_ylabel('F(T)')
ax.set_xlabel('T') ax.set_xlabel('T')
ax.set_xlim([0, len(original)]) ax.set_xlim([0, len(original)])
Util.showAndSaveImage(fig, file, save) Util.showAndSaveImage(fig, file, save, lgd=legends)
def plotComparedIntervalsAhead(original, models, colors, distributions, time_from, time_to, def plotComparedIntervalsAhead(original, models, colors, distributions, time_from, time_to,
@ -530,7 +552,7 @@ def compareModelsTable(original, models_fo, models_ho):
return sup + header + body + "\\end{tabular}" return sup + header + body + "\\end{tabular}"
def simpleSearch_RMSE(original, model, partitions, orders, save=False, file=None, tam=[10, 15], plotforecasts=False, def simpleSearch_RMSE(train, test, model, partitions, orders, save=False, file=None, tam=[10, 15], plotforecasts=False,
elev=30, azim=144): elev=30, azim=144):
ret = [] ret = []
errors = np.array([[0 for k in range(len(partitions))] for kk in range(len(orders))]) errors = np.array([[0 for k in range(len(partitions))] for kk in range(len(orders))])
@ -539,8 +561,8 @@ def simpleSearch_RMSE(original, model, partitions, orders, save=False, file=None
# fig.suptitle("Comparação de modelos ") # fig.suptitle("Comparação de modelos ")
if plotforecasts: if plotforecasts:
ax0 = fig.add_axes([0, 0.4, 0.9, 0.5]) # left, bottom, width, height ax0 = fig.add_axes([0, 0.4, 0.9, 0.5]) # left, bottom, width, height
ax0.set_xlim([0, len(original)]) ax0.set_xlim([0, len(train)])
ax0.set_ylim([min(original) * 0.9, max(original) * 1.1]) ax0.set_ylim([min(train) * 0.9, max(train) * 1.1])
ax0.set_title('Forecasts') ax0.set_title('Forecasts')
ax0.set_ylabel('F(T)') ax0.set_ylabel('F(T)')
ax0.set_xlabel('T') ax0.set_xlabel('T')
@ -550,13 +572,13 @@ def simpleSearch_RMSE(original, model, partitions, orders, save=False, file=None
for p in partitions: for p in partitions:
oc = 0 oc = 0
for o in orders: for o in orders:
sets = Grid.GridPartitionerTrimf(original, p) sets = Grid.GridPartitionerTrimf(train, p)
fts = model("q = " + str(p) + " n = " + str(o)) fts = model("q = " + str(p) + " n = " + str(o))
fts.train(original, sets, o) fts.train(train, sets, o)
forecasted = fts.forecast(original) forecasted = fts.forecast(test)
error = Measures.rmse(np.array(original[o:]), np.array(forecasted[:-1])) error = Measures.rmse(np.array(test[o:]), np.array(forecasted[:-1]))
mape = Measures.mape(np.array(original[o:]), np.array(forecasted[:-1])) mape = Measures.mape(np.array(test[o:]), np.array(forecasted[:-1]))
# print(original[o:]) # print(train[o:])
# print(forecasted[-1]) # print(forecasted[-1])
for kk in range(o): for kk in range(o):
forecasted.insert(0, None) forecasted.insert(0, None)
@ -573,7 +595,7 @@ def simpleSearch_RMSE(original, model, partitions, orders, save=False, file=None
if plotforecasts: if plotforecasts:
# handles0, labels0 = ax0.get_legend_handles_labels() # handles0, labels0 = ax0.get_legend_handles_labels()
# ax0.legend(handles0, labels0) # ax0.legend(handles0, labels0)
ax0.plot(original, label="Original", linewidth=3.0, color="black") ax0.plot(test, label="Original", linewidth=3.0, color="black")
ax1 = Axes3D(fig, rect=[0, 1, 0.9, 0.9], elev=elev, azim=azim) ax1 = Axes3D(fig, rect=[0, 1, 0.9, 0.9], elev=elev, azim=azim)
if not plotforecasts: ax1 = Axes3D(fig, rect=[0, 1, 0.9, 0.9], elev=elev, azim=azim) if not plotforecasts: ax1 = Axes3D(fig, rect=[0, 1, 0.9, 0.9], elev=elev, azim=azim)
# ax1 = fig.add_axes([0.6, 0.5, 0.45, 0.45], projection='3d') # ax1 = fig.add_axes([0.6, 0.5, 0.45, 0.45], projection='3d')

View File

@ -40,13 +40,14 @@ class ConventionalFTS(fts.FTS):
def train(self, data, sets,order=1,parameters=None): def train(self, data, sets,order=1,parameters=None):
self.sets = sets self.sets = sets
tmpdata = FuzzySet.fuzzySeries(data, sets) ndata = self.doTransformations(data)
tmpdata = FuzzySet.fuzzySeries(ndata, sets)
flrs = FLR.generateNonRecurrentFLRs(tmpdata) flrs = FLR.generateNonRecurrentFLRs(tmpdata)
self.flrgs = self.generateFLRG(flrs) self.flrgs = self.generateFLRG(flrs)
def forecast(self, data): def forecast(self, data):
ndata = np.array(data) ndata = np.array(self.doTransformations(data))
l = len(ndata) l = len(ndata)
@ -66,4 +67,6 @@ class ConventionalFTS(fts.FTS):
ret.append(sum(mp) / len(mp)) ret.append(sum(mp) / len(mp))
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret return ret

View File

@ -8,6 +8,7 @@ class Transformation(object):
def __init__(self, parameters): def __init__(self, parameters):
self.isInversible = True self.isInversible = True
self.parameters = parameters self.parameters = parameters
self.minimalLength = 1
def apply(self,data,param): def apply(self,data,param):
pass pass
@ -24,6 +25,7 @@ class Differential(Transformation):
def __init__(self, parameters): def __init__(self, parameters):
super(Differential, self).__init__(parameters) super(Differential, self).__init__(parameters)
self.lag = parameters self.lag = parameters
self.minimalLength = 2
def apply(self, data, param=None): def apply(self, data, param=None):
if param is not None: if param is not None:
@ -31,12 +33,12 @@ class Differential(Transformation):
n = len(data) n = len(data)
diff = [data[t - self.lag] - data[t] for t in np.arange(self.lag, n)] diff = [data[t - self.lag] - data[t] for t in np.arange(self.lag, n)]
for t in np.arange(0, self.lag): diff.insert(0, 0) for t in np.arange(0, self.lag): diff.insert(0, 0)
return np.array(diff) return diff
def inverse(self,data, param): def inverse(self,data, param):
n = len(data) n = len(data)
inc = [data[t] + param[t] for t in np.arange(1, n)] inc = [data[t] + param[t] for t in np.arange(1, n)]
return np.array(inc) return inc
def boxcox(original, plambda): def boxcox(original, plambda):

View File

@ -13,8 +13,11 @@ def uniquefilename(name):
return name + str(current_milli_time()) return name + str(current_milli_time())
def showAndSaveImage(fig,file,flag): def showAndSaveImage(fig,file,flag,lgd=None):
if flag: if flag:
plt.show() plt.show()
fig.savefig(uniquefilename(file)) if lgd is not None:
fig.savefig(uniquefilename(file), additional_artists=lgd,bbox_inches='tight') #bbox_extra_artists=(lgd,), )
else:
fig.savefig(uniquefilename(file))
plt.close(fig) plt.close(fig)

26
fts.py
View File

@ -61,23 +61,23 @@ class FTS(object):
def doTransformations(self,data,params=None): def doTransformations(self,data,params=None):
ndata = data ndata = data
if params is None: if len(self.transformations) > 0:
params = [ None for k in self.transformations] if params is None:
c = 0 params = [ None for k in self.transformations]
for t in self.transformations:
ndata = t.apply(ndata,params[c]) for c, t in enumerate(self.transformations, start=0):
c += 1 ndata = t.apply(ndata,params[c])
return ndata return ndata
def doInverseTransformations(self,data,params=None): def doInverseTransformations(self, data, params=None):
ndata = data ndata = data
if params is None: if len(self.transformations) > 0:
params = [None for k in self.transformations] if params is None:
c = 0 params = [None for k in self.transformations]
for t in reversed(self.transformations):
ndata = t.inverse(ndata, params[c]) for c, t in enumerate(reversed(self.transformations), start=0):
c += 1 ndata = t.inverse(ndata, params[c])
return ndata return ndata

View File

@ -61,6 +61,9 @@ class HighOrderFTS(fts.FTS):
return (flrgs) return (flrgs)
def train(self, data, sets, order=1,parameters=None): def train(self, data, sets, order=1,parameters=None):
data = self.doTransformations(data)
self.order = order self.order = order
self.sets = sets self.sets = sets
for s in self.sets: self.setsDict[s.name] = s for s in self.sets: self.setsDict[s.name] = s
@ -81,8 +84,10 @@ class HighOrderFTS(fts.FTS):
if l <= self.order: if l <= self.order:
return data return data
ndata = self.doTransformations(data)
for k in np.arange(self.order, l+1): for k in np.arange(self.order, l+1):
tmpdata = FuzzySet.fuzzySeries(data[k - self.order: k], self.sets) tmpdata = FuzzySet.fuzzySeries(ndata[k - self.order: k], self.sets)
tmpflrg = HighOrderFLRG(self.order) tmpflrg = HighOrderFLRG(self.order)
for s in tmpdata: tmpflrg.appendLHS(s) for s in tmpdata: tmpflrg.appendLHS(s)
@ -95,4 +100,6 @@ class HighOrderFTS(fts.FTS):
ret.append(sum(mp) / len(mp)) ret.append(sum(mp) / len(mp))
ret = self.doInverseTransformations(ret, params=[data[self.order-1:]])
return ret return ret

View File

@ -13,6 +13,9 @@ class HighOrderFTS(fts.FTS):
self.detail = "Hwang" self.detail = "Hwang"
def forecast(self, data): def forecast(self, data):
ndata = self.doTransformations(data)
cn = np.array([0.0 for k in range(len(self.sets))]) cn = np.array([0.0 for k in range(len(self.sets))])
ow = np.array([[0.0 for k in range(len(self.sets))] for z in range(self.order - 1)]) ow = np.array([[0.0 for k in range(len(self.sets))] for z in range(self.order - 1)])
rn = np.array([[0.0 for k in range(len(self.sets))] for z in range(self.order - 1)]) rn = np.array([[0.0 for k in range(len(self.sets))] for z in range(self.order - 1)])
@ -20,12 +23,12 @@ class HighOrderFTS(fts.FTS):
ret = [] ret = []
for t in np.arange(self.order-1, len(data)): for t in np.arange(self.order-1, len(ndata)):
for s in range(len(self.sets)): for s in range(len(self.sets)):
cn[s] = self.sets[s].membership(data[t]) cn[s] = self.sets[s].membership(ndata[t])
for w in range(self.order - 1): for w in range(self.order - 1):
ow[w, s] = self.sets[s].membership(data[t - w]) ow[w, s] = self.sets[s].membership(ndata[t - w])
rn[w, s] = ow[w, s] * cn[s] rn[w, s] = ow[w, s] * cn[s]
ft[s] = max(ft[s], rn[w, s]) ft[s] = max(ft[s], rn[w, s])
mft = max(ft) mft = max(ft)
@ -37,6 +40,8 @@ class HighOrderFTS(fts.FTS):
count += 1.0 count += 1.0
ret.append(out / count) ret.append(out / count)
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret return ret
def train(self, data, sets, order=1, parameters=None): def train(self, data, sets, order=1, parameters=None):

View File

@ -49,7 +49,9 @@ class IntervalFTS(hofts.HighOrderFTS):
def forecastInterval(self, data): def forecastInterval(self, data):
ndata = np.array(data) data = np.array(data)
ndata = self.doTransformations(data)
l = len(ndata) l = len(ndata)
@ -113,6 +115,8 @@ class IntervalFTS(hofts.HighOrderFTS):
# gerar o intervalo # gerar o intervalo
norm = sum(affected_flrgs_memberships) norm = sum(affected_flrgs_memberships)
ret.append([sum(lo) / norm, sum(up) / norm]) lo_ = self.doInverseTransformations(sum(lo) / norm, param=[data[k - (self.order - 1): k + 1]])
up_ = self.doInverseTransformations(sum(up) / norm, param=[data[k - (self.order - 1): k + 1]])
ret.append([lo_, up_])
return ret return ret

View File

@ -51,7 +51,9 @@ class ImprovedWeightedFTS(fts.FTS):
for s in self.sets: self.setsDict[s.name] = s for s in self.sets: self.setsDict[s.name] = s
tmpdata = FuzzySet.fuzzySeries(data, self.sets) ndata = self.doTransformations(data)
tmpdata = FuzzySet.fuzzySeries(ndata, self.sets)
flrs = FLR.generateRecurrentFLRs(tmpdata) flrs = FLR.generateRecurrentFLRs(tmpdata)
self.flrgs = self.generateFLRG(flrs) self.flrgs = self.generateFLRG(flrs)
@ -62,7 +64,8 @@ class ImprovedWeightedFTS(fts.FTS):
def forecast(self, data): def forecast(self, data):
l = 1 l = 1
ndata = np.array(data) data = np.array(data)
ndata = self.doTransformations(data)
l = len(ndata) l = len(ndata)
@ -82,4 +85,6 @@ class ImprovedWeightedFTS(fts.FTS):
ret.append(mp.dot(flrg.weights())) ret.append(mp.dot(flrg.weights()))
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret return ret

12
pfts.py
View File

@ -112,7 +112,7 @@ class ProbabilisticFTS(ifts.IntervalFTS):
def forecast(self, data): def forecast(self, data):
ndata = np.array(data) ndata = np.array(self.doTransformations(data))
l = len(ndata) l = len(ndata)
@ -208,11 +208,15 @@ class ProbabilisticFTS(ifts.IntervalFTS):
else: else:
ret.append(sum(mp) / norm) ret.append(sum(mp) / norm)
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret return ret
def forecastInterval(self, data): def forecastInterval(self, data):
ndata = np.array(data) data = np.array(data)
ndata = self.doTransformations(data)
l = len(ndata) l = len(ndata)
@ -308,7 +312,9 @@ class ProbabilisticFTS(ifts.IntervalFTS):
if norm == 0: if norm == 0:
ret.append([0, 0]) ret.append([0, 0])
else: else:
ret.append([sum(lo) / norm, sum(up) / norm]) lo_ = self.doInverseTransformations(sum(lo) / norm, params=[data[k - (self.order - 1): k + 1]])
up_ = self.doInverseTransformations(sum(up) / norm, params=[data[k - (self.order - 1): k + 1]])
ret.append([lo_, up_])
return ret return ret

View File

@ -52,14 +52,17 @@ class ExponentialyWeightedFTS(fts.FTS):
def train(self, data, sets,order=1,parameters=2): def train(self, data, sets,order=1,parameters=2):
self.c = parameters self.c = parameters
self.sets = sets self.sets = sets
tmpdata = FuzzySet.fuzzySeries(data, sets) ndata = self.doTransformations(data)
tmpdata = FuzzySet.fuzzySeries(ndata, sets)
flrs = FLR.generateRecurrentFLRs(tmpdata) flrs = FLR.generateRecurrentFLRs(tmpdata)
self.flrgs = self.generateFLRG(flrs, self.c) self.flrgs = self.generateFLRG(flrs, self.c)
def forecast(self, data): def forecast(self, data):
l = 1 l = 1
ndata = np.array(data) data = np.array(data)
ndata = self.doTransformations(data)
l = len(ndata) l = len(ndata)
@ -79,4 +82,6 @@ class ExponentialyWeightedFTS(fts.FTS):
ret.append(mp.dot(flrg.weights())) ret.append(mp.dot(flrg.weights()))
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret return ret

View File

@ -47,13 +47,16 @@ class SeasonalFTS(fts.FTS):
def train(self, data, sets, order=1,parameters=12): def train(self, data, sets, order=1,parameters=12):
self.sets = sets self.sets = sets
self.seasonality = parameters self.seasonality = parameters
tmpdata = FuzzySet.fuzzySeries(data, sets) ndata = self.doTransformations(data)
tmpdata = FuzzySet.fuzzySeries(ndata, sets)
flrs = FLR.generateRecurrentFLRs(tmpdata) flrs = FLR.generateRecurrentFLRs(tmpdata)
self.flrgs = self.generateFLRG(flrs) self.flrgs = self.generateFLRG(flrs)
def forecast(self, data): def forecast(self, data):
ndata = np.array(data) data = np.array(data)
ndata = self.doTransformations(data)
l = len(ndata) l = len(ndata)
@ -66,4 +69,6 @@ class SeasonalFTS(fts.FTS):
ret.append(sum(mp) / len(mp)) ret.append(sum(mp) / len(mp))
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret return ret

9
yu.py
View File

@ -48,14 +48,17 @@ class WeightedFTS(fts.FTS):
def train(self, data, sets,order=1,parameters=None): def train(self, data, sets,order=1,parameters=None):
self.sets = sets self.sets = sets
tmpdata = FuzzySet.fuzzySeries(data, sets) ndata = self.doTransformations(data)
tmpdata = FuzzySet.fuzzySeries(ndata, sets)
flrs = FLR.generateRecurrentFLRs(tmpdata) flrs = FLR.generateRecurrentFLRs(tmpdata)
self.flrgs = self.generateFLRG(flrs) self.flrgs = self.generateFLRG(flrs)
def forecast(self, data): def forecast(self, data):
l = 1 l = 1
ndata = np.array(data) data = np.array(data)
ndata = self.doTransformations(data)
l = len(ndata) l = len(ndata)
@ -75,4 +78,6 @@ class WeightedFTS(fts.FTS):
ret.append(mp.dot(flrg.weights())) ret.append(mp.dot(flrg.weights()))
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
return ret return ret