- Bugfix on interval forecast of quantreg

This commit is contained in:
Petrônio Cândido de Lima e Silva 2017-05-13 22:32:40 -03:00
parent f8ac95d24e
commit 0b7799a9bb
3 changed files with 26 additions and 10 deletions

View File

@ -219,7 +219,7 @@ def run_interval(mfts, partitioner, train_data, test_data, window_key=None, tran
import time import time
from pyFTS import hofts,ifts,pwfts from pyFTS import hofts,ifts,pwfts
from pyFTS.partitioners import Grid, Entropy, FCM from pyFTS.partitioners import Grid, Entropy, FCM
from pyFTS.benchmarks import Measures from pyFTS.benchmarks import Measures, arima, quantreg
tmp = [hofts.HighOrderFTS, ifts.IntervalFTS, pwfts.ProbabilisticWeightedFTS] tmp = [hofts.HighOrderFTS, ifts.IntervalFTS, pwfts.ProbabilisticWeightedFTS]
@ -291,7 +291,7 @@ def interval_sliding_window(data, windowsize, train=0.8, inc=0.1, models=None,
if benchmark_models_parameters is None: if benchmark_models_parameters is None:
benchmark_models_parameters = [(1, 0, 0), (1, 0, 1), (2, 0, 1), (2, 0, 2), 1, 2] benchmark_models_parameters = [(1, 0, 0), (1, 0, 1), (2, 0, 1), (2, 0, 2), 1, 2]
cluster = dispy.JobCluster(run_point, nodes=nodes) #, depends=dependencies) cluster = dispy.JobCluster(run_interval, nodes=nodes) #, depends=dependencies)
http_server = dispy.httpd.DispyHTTPServer(cluster) http_server = dispy.httpd.DispyHTTPServer(cluster)

View File

@ -38,8 +38,8 @@ class QuantileRegression(fts.FTS):
self.mean_qt = [k for k in mqt.params] self.mean_qt = [k for k in mqt.params]
if self.alpha is not None: if self.alpha is not None:
self.upper_qt = [uqt.params[k] for k in uqt.params.keys()] self.upper_qt = [k for k in uqt.params]
self.lower_qt = [lqt.params[k] for k in lqt.params.keys()] self.lower_qt = [k for k in lqt.params]
self.shortname = "QAR(" + str(self.order) + ")" self.shortname = "QAR(" + str(self.order) + ")"

View File

@ -29,11 +29,11 @@ DATASETS
#gauss = random.normal(0,1.0,5000) #gauss = random.normal(0,1.0,5000)
#gauss_teste = random.normal(0,1.0,400) #gauss_teste = random.normal(0,1.0,400)
taiexpd = pd.read_csv("DataSets/TAIEX.csv", sep=",") #taiexpd = pd.read_csv("DataSets/TAIEX.csv", sep=",")
taiex = np.array(taiexpd["avg"][:5000]) #taiex = np.array(taiexpd["avg"][:5000])
#nasdaqpd = pd.read_csv("DataSets/NASDAQ_IXIC.csv", sep=",") nasdaqpd = pd.read_csv("DataSets/NASDAQ_IXIC.csv", sep=",")
#nasdaq = np.array(nasdaqpd["avg"][0:5000]) nasdaq = np.array(nasdaqpd["avg"][0:5000])
#sp500pd = pd.read_csv("DataSets/S&P500.csv", sep=",") #sp500pd = pd.read_csv("DataSets/S&P500.csv", sep=",")
#sp500 = np.array(sp500pd["Avg"][11000:]) #sp500 = np.array(sp500pd["Avg"][11000:])
@ -73,7 +73,7 @@ from pyFTS.benchmarks import arima, quantreg
#bchmk.teste(taiex,['192.168.0.109', '192.168.0.101']) #bchmk.teste(taiex,['192.168.0.109', '192.168.0.101'])
from pyFTS import song, chen, yu, cheng diff = Transformations.Differential(1)
""" """
bchmk.point_sliding_window(sonda, 9000, train=0.8, inc=0.4,#models=[yu.WeightedFTS], # # bchmk.point_sliding_window(sonda, 9000, train=0.8, inc=0.4,#models=[yu.WeightedFTS], # #
@ -82,7 +82,7 @@ bchmk.point_sliding_window(sonda, 9000, train=0.8, inc=0.4,#models=[yu.WeightedF
dump=True, save=True, file="experiments/sondaws_point_analytic.csv", dump=True, save=True, file="experiments/sondaws_point_analytic.csv",
nodes=['192.168.0.103', '192.168.0.106', '192.168.0.108', '192.168.0.109']) #, depends=[hofts, ifts]) nodes=['192.168.0.103', '192.168.0.106', '192.168.0.108', '192.168.0.109']) #, depends=[hofts, ifts])
diff = Transformations.Differential(1)
bchmk.point_sliding_window(sonda, 9000, train=0.8, inc=0.4, #models=[yu.WeightedFTS], # # bchmk.point_sliding_window(sonda, 9000, train=0.8, inc=0.4, #models=[yu.WeightedFTS], # #
partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ], partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
@ -91,6 +91,21 @@ bchmk.point_sliding_window(sonda, 9000, train=0.8, inc=0.4, #models=[yu.Weighted
nodes=['192.168.0.103', '192.168.0.106', '192.168.0.108', '192.168.0.109']) #, depends=[hofts, ifts]) nodes=['192.168.0.103', '192.168.0.106', '192.168.0.108', '192.168.0.109']) #, depends=[hofts, ifts])
#""" #"""
bchmk.interval_sliding_window(nasdaq, 2000, train=0.8, inc=0.1,#models=[yu.WeightedFTS], # #
partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
partitions= np.arange(10,200,step=10), #transformation=diff,
dump=True, save=True, file="experiments/nasdaq_interval_analytic.csv",
nodes=['192.168.0.103', '192.168.0.106', '192.168.0.108', '192.168.0.109']) #, depends=[hofts, ifts])
bchmk.interval_sliding_window(nasdaq, 2000, train=0.8, inc=0.1, #models=[yu.WeightedFTS], # #
partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
partitions= np.arange(3,20,step=2), #transformation=diff,
dump=True, save=True, file="experiments/nasdaq_interval_analytic_diff.csv",
nodes=['192.168.0.103', '192.168.0.106', '192.168.0.108', '192.168.0.109']) #, depends=[hofts, ifts])
"""
from pyFTS.partitioners import Grid from pyFTS.partitioners import Grid
from pyFTS import pwfts from pyFTS import pwfts
@ -108,3 +123,4 @@ x = tmp.forecastInterval(taiex[1600:1610])
print(taiex[1600:1610]) print(taiex[1600:1610])
print(x) print(x)
"""