2017-02-24 20:29:55 +04:00
|
|
|
#!/usr/bin/python
|
|
|
|
# -*- coding: utf8 -*-
|
|
|
|
|
|
|
|
import os
|
|
|
|
import numpy as np
|
|
|
|
import pandas as pd
|
|
|
|
import matplotlib as plt
|
|
|
|
import matplotlib.pyplot as plt
|
|
|
|
from mpl_toolkits.mplot3d import Axes3D
|
|
|
|
|
|
|
|
import pandas as pd
|
2017-02-27 22:53:29 +04:00
|
|
|
from pyFTS.partitioners import Grid, Entropy, FCM, Huarng
|
2017-02-24 20:29:55 +04:00
|
|
|
from pyFTS.common import FLR,FuzzySet,Membership,Transformations
|
2017-03-03 15:53:55 +04:00
|
|
|
from pyFTS import fts,hofts,ifts,pwfts,tree, chen
|
2017-05-03 00:16:49 +04:00
|
|
|
#from pyFTS.benchmarks import benchmarks as bchmk
|
2017-03-03 15:53:55 +04:00
|
|
|
from pyFTS.benchmarks import naive, arima
|
2017-02-24 20:29:55 +04:00
|
|
|
from pyFTS.benchmarks import Measures
|
|
|
|
from numpy import random
|
|
|
|
|
2017-05-07 00:04:37 +04:00
|
|
|
os.chdir("/home/petronio/dados/Dropbox/Doutorado/Codigos/")
|
|
|
|
|
2017-05-07 18:41:31 +04:00
|
|
|
#enrollments = pd.read_csv("DataSets/Enrollments.csv", sep=";")
|
|
|
|
#enrollments = np.array(enrollments["Enrollments"])
|
2017-05-07 00:04:37 +04:00
|
|
|
|
2017-05-14 15:54:41 +04:00
|
|
|
diff = Transformations.Differential(1)
|
|
|
|
|
2017-05-14 04:03:49 +04:00
|
|
|
"""
|
|
|
|
DATASETS
|
|
|
|
"""
|
2017-02-24 20:29:55 +04:00
|
|
|
|
2017-05-08 20:12:08 +04:00
|
|
|
#gauss = random.normal(0,1.0,5000)
|
2017-02-24 20:29:55 +04:00
|
|
|
#gauss_teste = random.normal(0,1.0,400)
|
|
|
|
|
2017-05-14 05:32:40 +04:00
|
|
|
#taiexpd = pd.read_csv("DataSets/TAIEX.csv", sep=",")
|
|
|
|
#taiex = np.array(taiexpd["avg"][:5000])
|
2017-05-08 20:12:08 +04:00
|
|
|
|
2017-05-14 05:32:40 +04:00
|
|
|
nasdaqpd = pd.read_csv("DataSets/NASDAQ_IXIC.csv", sep=",")
|
|
|
|
nasdaq = np.array(nasdaqpd["avg"][0:5000])
|
2017-02-27 22:53:29 +04:00
|
|
|
|
2017-05-14 04:03:49 +04:00
|
|
|
#sp500pd = pd.read_csv("DataSets/S&P500.csv", sep=",")
|
|
|
|
#sp500 = np.array(sp500pd["Avg"][11000:])
|
|
|
|
#del(sp500pd)
|
2017-04-13 19:36:22 +04:00
|
|
|
|
2017-05-14 04:37:10 +04:00
|
|
|
#sondapd = pd.read_csv("DataSets/SONDA_BSB_HOURLY_AVG.csv", sep=";")
|
|
|
|
#sondapd = sondapd.dropna(axis=0, how='any')
|
|
|
|
#sonda = np.array(sondapd["ws_10m"])
|
|
|
|
#del(sondapd)
|
2017-04-13 19:36:22 +04:00
|
|
|
|
2017-05-14 04:03:49 +04:00
|
|
|
#bestpd = pd.read_csv("DataSets/BEST_TAVG.csv", sep=";")
|
|
|
|
#best = np.array(bestpd["Anomaly"])
|
2017-04-13 19:36:22 +04:00
|
|
|
|
2017-05-03 00:16:49 +04:00
|
|
|
#print(lag)
|
|
|
|
#print(a)
|
2017-04-13 19:36:22 +04:00
|
|
|
|
2017-05-14 04:03:49 +04:00
|
|
|
#from pyFTS.benchmarks import benchmarks as bchmk
|
|
|
|
from pyFTS.benchmarks import distributed_benchmarks as bchmk
|
2017-04-06 06:45:11 +04:00
|
|
|
#from pyFTS.benchmarks import parallel_benchmarks as bchmk
|
2017-05-08 20:12:08 +04:00
|
|
|
from pyFTS.benchmarks import Util
|
2017-05-14 08:19:49 +04:00
|
|
|
from pyFTS.benchmarks import arima, quantreg, Measures
|
2017-04-13 19:36:22 +04:00
|
|
|
|
2017-05-10 02:04:51 +04:00
|
|
|
#Util.cast_dataframe_to_synthetic_point("experiments/taiex_point_analitic.csv","experiments/taiex_point_sintetic.csv",11)
|
2017-05-08 20:12:08 +04:00
|
|
|
|
2017-05-08 21:49:45 +04:00
|
|
|
#Util.plot_dataframe_point("experiments/taiex_point_sintetic.csv","experiments/taiex_point_analitic.csv",11)
|
2017-04-13 19:36:22 +04:00
|
|
|
|
2017-05-14 15:54:41 +04:00
|
|
|
tmp = arima.ARIMA("", alpha=0.25)
|
|
|
|
#tmp.appendTransformation(diff)
|
|
|
|
tmp.train(nasdaq[:1600], None, order=(2,0,2))
|
|
|
|
teste = tmp.forecastInterval(nasdaq[1600:1604])
|
2017-04-13 19:36:22 +04:00
|
|
|
|
2017-05-14 08:19:49 +04:00
|
|
|
"""
|
|
|
|
tmp = quantreg.QuantileRegression("", alpha=0.25)
|
|
|
|
tmp.train(taiex[:1600], None, order=1)
|
|
|
|
teste = tmp.forecastInterval(taiex[1600:1605])
|
2017-05-14 15:54:41 +04:00
|
|
|
"""
|
|
|
|
print(nasdaq[1600:1605])
|
2017-05-14 08:19:49 +04:00
|
|
|
print(teste)
|
2017-05-09 00:50:35 +04:00
|
|
|
|
2017-05-14 15:54:41 +04:00
|
|
|
kk = Measures.get_interval_statistics(nasdaq[1600:1605], tmp)
|
2017-04-01 03:34:12 +04:00
|
|
|
|
2017-05-14 08:19:49 +04:00
|
|
|
print(kk)
|
2017-05-14 15:54:41 +04:00
|
|
|
|
2017-04-06 06:45:11 +04:00
|
|
|
#bchmk.teste(taiex,['192.168.0.109', '192.168.0.101'])
|
|
|
|
|
2017-05-14 15:54:41 +04:00
|
|
|
|
2017-05-10 02:04:51 +04:00
|
|
|
|
2017-05-14 04:37:10 +04:00
|
|
|
"""
|
2017-05-14 04:03:49 +04:00
|
|
|
bchmk.point_sliding_window(sonda, 9000, train=0.8, inc=0.4,#models=[yu.WeightedFTS], # #
|
2017-05-08 21:49:45 +04:00
|
|
|
partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
|
2017-05-14 04:03:49 +04:00
|
|
|
partitions= np.arange(10,200,step=10), #transformation=diff,
|
|
|
|
dump=True, save=True, file="experiments/sondaws_point_analytic.csv",
|
|
|
|
nodes=['192.168.0.103', '192.168.0.106', '192.168.0.108', '192.168.0.109']) #, depends=[hofts, ifts])
|
2017-04-06 06:45:11 +04:00
|
|
|
|
2017-05-14 05:32:40 +04:00
|
|
|
|
2017-05-09 17:27:47 +04:00
|
|
|
|
2017-05-14 04:03:49 +04:00
|
|
|
bchmk.point_sliding_window(sonda, 9000, train=0.8, inc=0.4, #models=[yu.WeightedFTS], # #
|
2017-05-09 17:27:47 +04:00
|
|
|
partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
|
2017-05-14 04:03:49 +04:00
|
|
|
partitions= np.arange(3,20,step=2), #transformation=diff,
|
|
|
|
dump=True, save=True, file="experiments/sondaws_point_analytic_diff.csv",
|
|
|
|
nodes=['192.168.0.103', '192.168.0.106', '192.168.0.108', '192.168.0.109']) #, depends=[hofts, ifts])
|
2017-05-14 08:19:49 +04:00
|
|
|
"""
|
2017-05-14 15:54:41 +04:00
|
|
|
"""
|
2017-03-03 15:53:55 +04:00
|
|
|
|
2017-05-14 15:54:41 +04:00
|
|
|
bchmk.interval_sliding_window(taiex, 2000, train=0.8, inc=0.1,#models=[yu.WeightedFTS], # #
|
2017-05-14 05:32:40 +04:00
|
|
|
partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
|
|
|
|
partitions= np.arange(10,200,step=10), #transformation=diff,
|
2017-05-14 15:54:41 +04:00
|
|
|
dump=True, save=True, file="experiments/taiex_interval_analytic.csv",
|
2017-05-14 05:32:40 +04:00
|
|
|
nodes=['192.168.0.103', '192.168.0.106', '192.168.0.108', '192.168.0.109']) #, depends=[hofts, ifts])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bchmk.interval_sliding_window(nasdaq, 2000, train=0.8, inc=0.1, #models=[yu.WeightedFTS], # #
|
|
|
|
partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
|
2017-05-14 15:54:41 +04:00
|
|
|
partitions= np.arange(3,20,step=2), transformation=diff,
|
2017-05-14 05:32:40 +04:00
|
|
|
dump=True, save=True, file="experiments/nasdaq_interval_analytic_diff.csv",
|
|
|
|
nodes=['192.168.0.103', '192.168.0.106', '192.168.0.108', '192.168.0.109']) #, depends=[hofts, ifts])
|
|
|
|
|
2017-05-14 15:54:41 +04:00
|
|
|
"""
|
2017-05-14 08:19:49 +04:00
|
|
|
|
2017-05-14 05:32:40 +04:00
|
|
|
"""
|
2017-05-14 04:37:10 +04:00
|
|
|
from pyFTS.partitioners import Grid
|
|
|
|
from pyFTS import pwfts
|
2017-03-03 15:53:55 +04:00
|
|
|
|
2017-05-14 04:37:10 +04:00
|
|
|
diff = Transformations.Differential(1)
|
2017-02-27 22:53:29 +04:00
|
|
|
|
2017-05-14 04:37:10 +04:00
|
|
|
fs = Grid.GridPartitioner(taiex[:2000], 10, transformation=diff)
|
2017-02-27 22:53:29 +04:00
|
|
|
|
2017-05-14 04:37:10 +04:00
|
|
|
tmp = pwfts.ProbabilisticWeightedFTS("")
|
2017-02-27 22:53:29 +04:00
|
|
|
|
2017-05-14 04:37:10 +04:00
|
|
|
tmp.appendTransformation(diff)
|
2017-02-27 22:53:29 +04:00
|
|
|
|
2017-05-14 04:37:10 +04:00
|
|
|
tmp.train(taiex[:1600], fs.sets, order=1)
|
2017-02-24 20:29:55 +04:00
|
|
|
|
2017-05-14 04:37:10 +04:00
|
|
|
x = tmp.forecastInterval(taiex[1600:1610])
|
2017-02-24 20:29:55 +04:00
|
|
|
|
2017-05-14 04:37:10 +04:00
|
|
|
print(taiex[1600:1610])
|
|
|
|
print(x)
|
2017-05-14 08:19:49 +04:00
|
|
|
#"""
|