- Correction of inverse transformation on interval forecasts
This commit is contained in:
parent
2c0e685e87
commit
f8ac95d24e
@ -103,7 +103,7 @@ class ARIMA(fts.FTS):
|
|||||||
|
|
||||||
ret.append(tmp)
|
ret.append(tmp)
|
||||||
|
|
||||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]], interval=True)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@ -55,8 +55,9 @@ def run_point(mfts, partitioner, train_data, test_data, window_key=None, transfo
|
|||||||
pttr = str(partitioner.__module__).split('.')[-1]
|
pttr = str(partitioner.__module__).split('.')[-1]
|
||||||
_key = mfts.shortname + " n = " + str(mfts.order) + " " + pttr + " q = " + str(partitioner.partitions)
|
_key = mfts.shortname + " n = " + str(mfts.order) + " " + pttr + " q = " + str(partitioner.partitions)
|
||||||
mfts.partitioner = partitioner
|
mfts.partitioner = partitioner
|
||||||
if transformation is not None:
|
|
||||||
mfts.appendTransformation(transformation)
|
if transformation is not None:
|
||||||
|
mfts.appendTransformation(transformation)
|
||||||
|
|
||||||
_start = time.time()
|
_start = time.time()
|
||||||
mfts.train(train_data, partitioner.sets, order=mfts.order)
|
mfts.train(train_data, partitioner.sets, order=mfts.order)
|
||||||
@ -234,8 +235,9 @@ def run_interval(mfts, partitioner, train_data, test_data, window_key=None, tran
|
|||||||
pttr = str(partitioner.__module__).split('.')[-1]
|
pttr = str(partitioner.__module__).split('.')[-1]
|
||||||
_key = mfts.shortname + " n = " + str(mfts.order) + " " + pttr + " q = " + str(partitioner.partitions)
|
_key = mfts.shortname + " n = " + str(mfts.order) + " " + pttr + " q = " + str(partitioner.partitions)
|
||||||
mfts.partitioner = partitioner
|
mfts.partitioner = partitioner
|
||||||
if transformation is not None:
|
|
||||||
mfts.appendTransformation(transformation)
|
if transformation is not None:
|
||||||
|
mfts.appendTransformation(transformation)
|
||||||
|
|
||||||
_start = time.time()
|
_start = time.time()
|
||||||
mfts.train(train_data, partitioner.sets, order=mfts.order)
|
mfts.train(train_data, partitioner.sets, order=mfts.order)
|
||||||
|
@ -76,6 +76,6 @@ class QuantileRegression(fts.FTS):
|
|||||||
down = self.linearmodel(sample, self.down_qt)
|
down = self.linearmodel(sample, self.down_qt)
|
||||||
ret.append([up, down])
|
ret.append([up, down])
|
||||||
|
|
||||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]], interval=True)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
@ -13,10 +13,10 @@ class Transformation(object):
|
|||||||
self.parameters = parameters
|
self.parameters = parameters
|
||||||
self.minimalLength = 1
|
self.minimalLength = 1
|
||||||
|
|
||||||
def apply(self,data,param):
|
def apply(self,data,param,**kwargs):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def inverse(self,data, param):
|
def inverse(self,data, param,**kwargs):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
@ -32,7 +32,7 @@ class Differential(Transformation):
|
|||||||
self.lag = parameters
|
self.lag = parameters
|
||||||
self.minimalLength = 2
|
self.minimalLength = 2
|
||||||
|
|
||||||
def apply(self, data, param=None):
|
def apply(self, data, param=None,**kwargs):
|
||||||
if param is not None:
|
if param is not None:
|
||||||
self.lag = param
|
self.lag = param
|
||||||
|
|
||||||
@ -47,7 +47,9 @@ class Differential(Transformation):
|
|||||||
for t in np.arange(0, self.lag): diff.insert(0, 0)
|
for t in np.arange(0, self.lag): diff.insert(0, 0)
|
||||||
return diff
|
return diff
|
||||||
|
|
||||||
def inverse(self,data, param):
|
def inverse(self,data, param, **kwargs):
|
||||||
|
|
||||||
|
interval = kwargs.get("interval",False)
|
||||||
|
|
||||||
if isinstance(data, (np.ndarray, np.generic)):
|
if isinstance(data, (np.ndarray, np.generic)):
|
||||||
data = data.tolist()
|
data = data.tolist()
|
||||||
@ -57,7 +59,10 @@ class Differential(Transformation):
|
|||||||
|
|
||||||
n = len(data)
|
n = len(data)
|
||||||
|
|
||||||
inc = [data[t] + param[t] for t in np.arange(0, n)]
|
if not interval:
|
||||||
|
inc = [data[t] + param[t] for t in np.arange(0, n)]
|
||||||
|
else:
|
||||||
|
inc = [[data[t][0] + param[t], data[t][0] + param[t]] for t in np.arange(0, n)]
|
||||||
|
|
||||||
if n == 1:
|
if n == 1:
|
||||||
return inc[0]
|
return inc[0]
|
||||||
@ -73,10 +78,10 @@ class AdaptiveExpectation(Transformation):
|
|||||||
super(AdaptiveExpectation, self).__init__(parameters)
|
super(AdaptiveExpectation, self).__init__(parameters)
|
||||||
self.h = parameters
|
self.h = parameters
|
||||||
|
|
||||||
def apply(self, data, param=None):
|
def apply(self, data, param=None,**kwargs):
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def inverse(self, data, param):
|
def inverse(self, data, param,**kwargs):
|
||||||
n = len(data)
|
n = len(data)
|
||||||
|
|
||||||
inc = [param[t] + self.h*(data[t] - param[t]) for t in np.arange(0, n)]
|
inc = [param[t] + self.h*(data[t] - param[t]) for t in np.arange(0, n)]
|
||||||
|
4
fts.py
4
fts.py
@ -135,7 +135,7 @@ class FTS(object):
|
|||||||
if transformation is not None:
|
if transformation is not None:
|
||||||
self.transformations.append(transformation)
|
self.transformations.append(transformation)
|
||||||
|
|
||||||
def doTransformations(self,data,params=None,updateUoD=False):
|
def doTransformations(self,data,params=None,updateUoD=False, **kwargs):
|
||||||
ndata = data
|
ndata = data
|
||||||
if updateUoD:
|
if updateUoD:
|
||||||
if min(data) < 0:
|
if min(data) < 0:
|
||||||
@ -157,7 +157,7 @@ class FTS(object):
|
|||||||
|
|
||||||
return ndata
|
return ndata
|
||||||
|
|
||||||
def doInverseTransformations(self, data, params=None):
|
def doInverseTransformations(self, data, params=None, **kwargs):
|
||||||
ndata = data
|
ndata = data
|
||||||
if len(self.transformations) > 0:
|
if len(self.transformations) > 0:
|
||||||
if params is None:
|
if params is None:
|
||||||
|
6
ifts.py
6
ifts.py
@ -133,8 +133,10 @@ class IntervalFTS(hofts.HighOrderFTS):
|
|||||||
|
|
||||||
# gerar o intervalo
|
# gerar o intervalo
|
||||||
norm = sum(affected_flrgs_memberships)
|
norm = sum(affected_flrgs_memberships)
|
||||||
lo_ = self.doInverseTransformations(sum(lo) / norm, params=[data[k - (self.order - 1): k + 1]])
|
lo_ = sum(lo) / norm
|
||||||
up_ = self.doInverseTransformations(sum(up) / norm, params=[data[k - (self.order - 1): k + 1]])
|
up_ = sum(up) / norm
|
||||||
ret.append([lo_, up_])
|
ret.append([lo_, up_])
|
||||||
|
|
||||||
|
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]], interval=True)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
6
pwfts.py
6
pwfts.py
@ -398,10 +398,12 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
if norm == 0:
|
if norm == 0:
|
||||||
ret.append([0, 0])
|
ret.append([0, 0])
|
||||||
else:
|
else:
|
||||||
lo_ = self.doInverseTransformations(sum(lo) / norm, params=[data[k - (self.order - 1): k + 1]])
|
lo_ = sum(lo) / norm
|
||||||
up_ = self.doInverseTransformations(sum(up) / norm, params=[data[k - (self.order - 1): k + 1]])
|
up_ = sum(up) / norm
|
||||||
ret.append([lo_, up_])
|
ret.append([lo_, up_])
|
||||||
|
|
||||||
|
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]], interval=True)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def forecastAhead(self, data, steps, **kwargs):
|
def forecastAhead(self, data, steps, **kwargs):
|
||||||
|
@ -29,8 +29,8 @@ DATASETS
|
|||||||
#gauss = random.normal(0,1.0,5000)
|
#gauss = random.normal(0,1.0,5000)
|
||||||
#gauss_teste = random.normal(0,1.0,400)
|
#gauss_teste = random.normal(0,1.0,400)
|
||||||
|
|
||||||
#taiexpd = pd.read_csv("DataSets/TAIEX.csv", sep=",")
|
taiexpd = pd.read_csv("DataSets/TAIEX.csv", sep=",")
|
||||||
#taiex = np.array(taiexpd["avg"][:5000])
|
taiex = np.array(taiexpd["avg"][:5000])
|
||||||
|
|
||||||
#nasdaqpd = pd.read_csv("DataSets/NASDAQ_IXIC.csv", sep=",")
|
#nasdaqpd = pd.read_csv("DataSets/NASDAQ_IXIC.csv", sep=",")
|
||||||
#nasdaq = np.array(nasdaqpd["avg"][0:5000])
|
#nasdaq = np.array(nasdaqpd["avg"][0:5000])
|
||||||
@ -39,10 +39,10 @@ DATASETS
|
|||||||
#sp500 = np.array(sp500pd["Avg"][11000:])
|
#sp500 = np.array(sp500pd["Avg"][11000:])
|
||||||
#del(sp500pd)
|
#del(sp500pd)
|
||||||
|
|
||||||
sondapd = pd.read_csv("DataSets/SONDA_BSB_HOURLY_AVG.csv", sep=";")
|
#sondapd = pd.read_csv("DataSets/SONDA_BSB_HOURLY_AVG.csv", sep=";")
|
||||||
sondapd = sondapd.dropna(axis=0, how='any')
|
#sondapd = sondapd.dropna(axis=0, how='any')
|
||||||
sonda = np.array(sondapd["ws_10m"])
|
#sonda = np.array(sondapd["ws_10m"])
|
||||||
del(sondapd)
|
#del(sondapd)
|
||||||
|
|
||||||
#bestpd = pd.read_csv("DataSets/BEST_TAVG.csv", sep=";")
|
#bestpd = pd.read_csv("DataSets/BEST_TAVG.csv", sep=";")
|
||||||
#best = np.array(bestpd["Anomaly"])
|
#best = np.array(bestpd["Anomaly"])
|
||||||
@ -75,7 +75,7 @@ from pyFTS.benchmarks import arima, quantreg
|
|||||||
|
|
||||||
from pyFTS import song, chen, yu, cheng
|
from pyFTS import song, chen, yu, cheng
|
||||||
|
|
||||||
|
"""
|
||||||
bchmk.point_sliding_window(sonda, 9000, train=0.8, inc=0.4,#models=[yu.WeightedFTS], # #
|
bchmk.point_sliding_window(sonda, 9000, train=0.8, inc=0.4,#models=[yu.WeightedFTS], # #
|
||||||
partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
|
partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
|
||||||
partitions= np.arange(10,200,step=10), #transformation=diff,
|
partitions= np.arange(10,200,step=10), #transformation=diff,
|
||||||
@ -90,65 +90,21 @@ bchmk.point_sliding_window(sonda, 9000, train=0.8, inc=0.4, #models=[yu.Weighted
|
|||||||
dump=True, save=True, file="experiments/sondaws_point_analytic_diff.csv",
|
dump=True, save=True, file="experiments/sondaws_point_analytic_diff.csv",
|
||||||
nodes=['192.168.0.103', '192.168.0.106', '192.168.0.108', '192.168.0.109']) #, depends=[hofts, ifts])
|
nodes=['192.168.0.103', '192.168.0.106', '192.168.0.108', '192.168.0.109']) #, depends=[hofts, ifts])
|
||||||
#"""
|
#"""
|
||||||
#bchmk.testa(taiex,[10,20],partitioners=[Grid.GridPartitioner], nodes=['192.168.0.109', '192.168.0.101'])
|
|
||||||
|
|
||||||
#parallel_util.explore_partitioners(taiex,20)
|
from pyFTS.partitioners import Grid
|
||||||
|
from pyFTS import pwfts
|
||||||
|
|
||||||
#nasdaqpd = pd.read_csv("DataSets/NASDAQ_IXIC.csv", sep=",")
|
diff = Transformations.Differential(1)
|
||||||
#nasdaq = np.array(nasdaqpd["avg"][:5000])
|
|
||||||
|
|
||||||
#taiex = pd.read_csv("DataSets/TAIEX.csv", sep=",")
|
fs = Grid.GridPartitioner(taiex[:2000], 10, transformation=diff)
|
||||||
#taiex_treino = np.array(taiex["avg"][2500:3900])
|
|
||||||
#taiex_teste = np.array(taiex["avg"][3901:4500])
|
|
||||||
|
|
||||||
#print(len(taiex))
|
tmp = pwfts.ProbabilisticWeightedFTS("")
|
||||||
|
|
||||||
#from pyFTS.common import Util
|
tmp.appendTransformation(diff)
|
||||||
|
|
||||||
#, ,
|
tmp.train(taiex[:1600], fs.sets, order=1)
|
||||||
|
|
||||||
#diff = Transformations.Differential(1)
|
x = tmp.forecastInterval(taiex[1600:1610])
|
||||||
|
|
||||||
|
print(taiex[1600:1610])
|
||||||
#bchmk.external_point_sliding_window([naive.Naive, arima.ARIMA, arima.ARIMA, arima.ARIMA, arima.ARIMA, arima.ARIMA, arima.ARIMA],
|
print(x)
|
||||||
# [None, (1,0,0),(1,1,0),(2,0,0), (2,1,0), (1,1,1), (1,0,1)],
|
|
||||||
# gauss,2000,train=0.8, dump=True, save=True, file="experiments/arima_gauss.csv")
|
|
||||||
|
|
||||||
|
|
||||||
#bchmk.interval_sliding_window(gauss,2000,train=0.8, #transformation=diff, #models=[pwfts.ProbabilisticWeightedFTS], # #
|
|
||||||
# partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
|
|
||||||
# partitions= np.arange(10,200,step=5), #
|
|
||||||
# dump=True, save=False, file="experiments/nasdaq_interval.csv")
|
|
||||||
|
|
||||||
#3bchmk.ahead_sliding_window(taiex,2000,train=0.8, steps=20, resolution=250, #transformation=diff, #models=[pwfts.ProbabilisticWeightedFTS], # #
|
|
||||||
# partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
|
|
||||||
# partitions= np.arange(10,200,step=10), #
|
|
||||||
# dump=True, save=True, file="experiments/taiex_ahead.csv")
|
|
||||||
|
|
||||||
|
|
||||||
#bchmk.allPointForecasters(taiex_treino, taiex_treino, 95, #transformation=diff,
|
|
||||||
# models=[ naive.Naive, pfts.ProbabilisticFTS, pwfts.ProbabilisticWeightedFTS],
|
|
||||||
# statistics=True, residuals=False, series=False)
|
|
||||||
|
|
||||||
#data_train_fs = Grid.GridPartitioner(nasdaq[:1600], 95).sets
|
|
||||||
|
|
||||||
#fts1 = pwfts.ProbabilisticWeightedFTS("")
|
|
||||||
#fts1.appendTransformation(diff)
|
|
||||||
#fts1.train(nasdaq[:1600], data_train_fs, order=1)
|
|
||||||
|
|
||||||
#_crps1, _crps2, _t1, _t2 = bchmk.get_distribution_statistics(nasdaq[1600:2000], fts1, steps=20, resolution=200)
|
|
||||||
|
|
||||||
#print(_crps1, _crps2, _t1, _t2)
|
|
||||||
|
|
||||||
#print(fts1.forecast([5000, 5000]))
|
|
||||||
|
|
||||||
#fts2 = pwfts.ProbabilisticWeightedFTS("")
|
|
||||||
#fts2.appendTransformation(diff)
|
|
||||||
#fts2.train(taiex_treino, data_train_fs, order=1)
|
|
||||||
|
|
||||||
#print(fts2.forecast([5000, 5000]))
|
|
||||||
|
|
||||||
|
|
||||||
#tmp = Grid.GridPartitioner(taiex_treino,7,transformation=diff)
|
|
||||||
|
|
||||||
#for s in tmp.sets: print(s)
|
|
||||||
|
Loading…
Reference in New Issue
Block a user