- Several bugfixes

- Issue #2 - PEP 8 compliance
  - Issue #3 - Code documentation with PEP 257 compliance
This commit is contained in:
Petrônio Cândido de Lima e Silva 2017-05-02 17:16:49 -03:00
parent a4903fd932
commit 18e795bcd3
15 changed files with 56 additions and 56 deletions

View File

@ -205,12 +205,12 @@ def get_point_statistics(data, model, indexer=None):
else:
ndata = np.array(data[model.order:])
if model.isMultivariate or indexer is None:
if model.is_multivariate or indexer is None:
forecasts = model.forecast(data)
elif not model.isMultivariate and indexer is not None:
elif not model.is_multivariate and indexer is not None:
forecasts = model.forecast(indexer.get_data(data))
if model.hasSeasonality:
if model.has_seasonality:
nforecasts = np.array(forecasts)
else:
nforecasts = np.array(forecasts[:-1])

View File

@ -122,7 +122,7 @@ def point_sliding_window(data, windowsize, train=0.8,models=None,partitioners=[G
_key = mfts.shortname + " " + pttr + " q = " + str(partition)
mfts.partitioner = data_train_fs
if not mfts.isHighOrder:
if not mfts.is_high_order:
if dump: print(ct,_key)
@ -155,7 +155,7 @@ def point_sliding_window(data, windowsize, train=0.8,models=None,partitioners=[G
else:
for order in np.arange(1, max_order + 1):
if order >= mfts.minOrder:
if order >= mfts.min_order:
mfts = model("")
_key = mfts.shortname + " n = " + str(order) + " " + pttr + " q = " + str(partition)
@ -225,7 +225,7 @@ def all_point_forecasters(data_train, data_test, partitions, max_order=3, statis
for count, model in enumerate(models, start=0):
#print(model)
mfts = model("")
if not mfts.isHighOrder:
if not mfts.is_high_order:
if transformation is not None:
mfts.appendTransformation(transformation)
mfts.train(data_train, data_train_fs.sets)
@ -233,7 +233,7 @@ def all_point_forecasters(data_train, data_test, partitions, max_order=3, statis
lcolors.append( colors[count % ncol] )
else:
for order in np.arange(1,max_order+1):
if order >= mfts.minOrder:
if order >= mfts.min_order:
mfts = model(" n = " + str(order))
if transformation is not None:
mfts.appendTransformation(transformation)
@ -387,7 +387,7 @@ def interval_sliding_window(data, windowsize, train=0.8,models=None,partitioners
_key = mfts.shortname + " " + pttr+ " q = " +str(partition)
mfts.partitioner = data_train_fs
if not mfts.isHighOrder:
if not mfts.is_high_order:
if dump: print(ct,_key)
@ -418,7 +418,7 @@ def interval_sliding_window(data, windowsize, train=0.8,models=None,partitioners
else:
for order in np.arange(1, max_order + 1):
if order >= mfts.minOrder:
if order >= mfts.min_order:
mfts = model("")
_key = mfts.shortname + " n = " + str(order) + " " + pttr + " q = " + str(partition)
mfts.partitioner = data_train_fs
@ -467,7 +467,7 @@ def all_interval_forecasters(data_train, data_test, partitions, max_order=3,save
for count, model in Util.enumerate2(models, start=0, step=2):
mfts = model("")
if not mfts.isHighOrder:
if not mfts.is_high_order:
if transformation is not None:
mfts.appendTransformation(transformation)
mfts.train(data_train, data_train_fs)
@ -475,7 +475,7 @@ def all_interval_forecasters(data_train, data_test, partitions, max_order=3,save
lcolors.append( colors[count % ncol] )
else:
for order in np.arange(1,max_order+1):
if order >= mfts.minOrder:
if order >= mfts.min_order:
mfts = model(" n = " + str(order))
if transformation is not None:
mfts.appendTransformation(transformation)
@ -522,7 +522,7 @@ def plot_compared_series(original, models, colors, typeonlegend=False, save=Fals
ax.plot(original, color='black', label="Original", linewidth=linewidth*1.5)
for count, fts in enumerate(models, start=0):
if fts.hasPointForecasting and points:
if fts.has_point_forecasting and points:
forecasted = fts.forecast(original)
mi.append(min(forecasted) * 0.95)
ma.append(max(forecasted) * 1.05)
@ -532,7 +532,7 @@ def plot_compared_series(original, models, colors, typeonlegend=False, save=Fals
if typeonlegend: lbl += " (Point)"
ax.plot(forecasted, color=colors[count], label=lbl, ls="-",linewidth=linewidth)
if fts.hasIntervalForecasting and intervals:
if fts.has_interval_forecasting and intervals:
forecasted = fts.forecastInterval(original)
lower = [kk[0] for kk in forecasted]
upper = [kk[1] for kk in forecasted]
@ -660,7 +660,7 @@ def ahead_sliding_window(data, windowsize, train, steps, models=None, resolution
_key = mfts.shortname + " " + pttr+ " q = " +str(partition)
mfts.partitioner = data_train_fs
if not mfts.isHighOrder:
if not mfts.is_high_order:
if dump: print(ct,_key)
@ -692,7 +692,7 @@ def ahead_sliding_window(data, windowsize, train, steps, models=None, resolution
else:
for order in np.arange(1, max_order + 1):
if order >= mfts.minOrder:
if order >= mfts.min_order:
mfts = model("")
_key = mfts.shortname + " n = " + str(order) + " " + pttr + " q = " + str(partition)
mfts.partitioner = data_train_fs
@ -743,7 +743,7 @@ def all_ahead_forecasters(data_train, data_test, partitions, start, steps, resol
for count, model in Util.enumerate2(models, start=0, step=2):
mfts = model("")
if not mfts.isHighOrder:
if not mfts.is_high_order:
if transformation is not None:
mfts.appendTransformation(transformation)
mfts.train(data_train, data_train_fs)
@ -751,7 +751,7 @@ def all_ahead_forecasters(data_train, data_test, partitions, start, steps, resol
lcolors.append( colors[count % ncol] )
else:
for order in np.arange(1,max_order+1):
if order >= mfts.minOrder:
if order >= mfts.min_order:
mfts = model(" n = " + str(order))
if transformation is not None:
mfts.appendTransformation(transformation)
@ -822,7 +822,7 @@ def plot_compared_intervals_ahead(original, models, colors, distributions, time_
ma = []
for count, fts in enumerate(models, start=0):
if fts.hasDistributionForecasting and distributions[count]:
if fts.has_probability_forecasting and distributions[count]:
density = fts.forecastAheadDistribution(original[time_from - fts.order:time_from], time_to,
resolution=resolution, method=option)
@ -863,7 +863,7 @@ def plot_compared_intervals_ahead(original, models, colors, distributions, time_
cb.set_label('Density')
if fts.hasIntervalForecasting:
if fts.has_interval_forecasting:
forecasts = fts.forecastAheadInterval(original[time_from - fts.order:time_from], time_to)
lower = [kk[0] for kk in forecasts]
upper = [kk[1] for kk in forecasts]
@ -939,7 +939,7 @@ def SelecaoSimples_MenorRMSE(original, parameters, modelo):
min_rmse = 100000.0
best = None
for p in parameters:
sets = Grid.GridPartitionerTrimf(original, p)
sets = Grid.GridPartitioner(original, p).sets
fts = modelo(str(p) + " particoes")
fts.train(original, sets)
# print(original)
@ -1095,7 +1095,7 @@ def simpleSearch_RMSE(train, test, model, partitions, orders, save=False, file=N
fts.train(train, sets, o, parameters=parameters)
if not intervals:
forecasted = fts.forecast(test)
if not fts.hasSeasonality:
if not fts.has_seasonality:
error = Measures.rmse(np.array(test[o:]), np.array(forecasted[:-1]))
else:
error = Measures.rmse(np.array(test[o:]), np.array(forecasted))
@ -1171,7 +1171,7 @@ def sliding_window_simple_search(data, windowsize, model, partitions, orders, sa
fts.train(data, sets, o, parameters=parameters)
if not intervals:
forecasted = fts.forecast(test)
if not fts.hasSeasonality:
if not fts.has_seasonality:
_error.append( Measures.rmse(np.array(test[o:]), np.array(forecasted[:-1])) )
else:
_error.append( Measures.rmse(np.array(test[o:]), np.array(forecasted)) )
@ -1221,7 +1221,7 @@ def sliding_window_simple_search(data, windowsize, model, partitions, orders, sa
def pftsExploreOrderAndPartitions(data,save=False, file=None):
fig, axes = plt.subplots(nrows=4, ncols=1, figsize=[6, 8])
data_fs1 = Grid.GridPartitionerTrimf(data, 10)
data_fs1 = Grid.GridPartitioner(data, 10).sets
mi = []
ma = []
@ -1250,7 +1250,7 @@ def pftsExploreOrderAndPartitions(data,save=False, file=None):
axes[3].set_title('Interval Forecasts by Number of Partitions')
for partitions in np.arange(5, 11):
data_fs = Grid.GridPartitionerTrimf(data, partitions)
data_fs = Grid.GridPartitioner(data, partitions).sets
fts = pwfts.ProbabilisticWeightedFTS("")
fts.shortname = "q = " + str(partitions)
fts.train(data, data_fs, 1)

View File

@ -74,9 +74,9 @@ def point_sliding_window(data, windowsize, train=0.8, models=None, partitioners=
for model in models:
mfts = model("")
if mfts.isHighOrder:
if mfts.is_high_order:
for order in np.arange(1, max_order + 1):
if order >= mfts.minOrder:
if order >= mfts.min_order:
mfts = model("")
mfts.order = order
pool.append(mfts)
@ -193,9 +193,9 @@ def interval_sliding_window(data, windowsize, train=0.8, models=None, partitione
for model in models:
mfts = model("")
if mfts.isHighOrder:
if mfts.is_high_order:
for order in np.arange(1, max_order + 1):
if order >= mfts.minOrder:
if order >= mfts.min_order:
mfts = model("")
mfts.order = order
pool.append(mfts)

View File

@ -25,7 +25,7 @@ class ConventionalFLRG(object):
class ConventionalFTS(fts.FTS):
def __init__(self, order, **kwargs):
def __init__(self, name, **kwargs):
super(ConventionalFTS, self).__init__(1, "CFTS " + name)
self.name = "Conventional FTS"
self.detail = "Chen"

View File

@ -31,7 +31,7 @@ class TrendWeightedFLRG(yu.WeightedFTS):
class TrendWeightedFTS(yu.WeightedFTS):
def __init__(self, order, name, **kwargs):
def __init__(self, name, **kwargs):
super(TrendWeightedFTS, self).__init__(1, "TWFTS " + name)
self.name = "Trend Weighted FTS"
self.detail = "Cheng"

View File

@ -10,7 +10,7 @@ from pyFTS import fts
class EnsembleFTS(fts.FTS):
def __init__(self, order, name, **kwargs):
def __init__(self, name, **kwargs):
super(EnsembleFTS, self).__init__("Ensemble FTS")
self.shortname = "Ensemble FTS " + name
self.name = "Ensemble FTS"

View File

@ -39,7 +39,7 @@ class HighOrderFLRG(object):
class HighOrderFTS(fts.FTS):
def __init__(self, order, name, **kwargs):
def __init__(self, name, **kwargs):
super(HighOrderFTS, self).__init__(1, "HOFTS" + name)
self.name = "High Order FTS"
self.shortname = "HOFTS" + name

View File

@ -4,7 +4,7 @@ from pyFTS import fts
class HighOrderFTS(fts.FTS):
def __init__(self, order, name, **kwargs):
def __init__(self, name, **kwargs):
super(HighOrderFTS, self).__init__(1, name)
self.is_high_order = True
self.min_order = 2

View File

@ -7,7 +7,7 @@ from pyFTS import hofts, fts, tree
class IntervalFTS(hofts.HighOrderFTS):
def __init__(self, order, name, **kwargs):
def __init__(self, name, **kwargs):
super(IntervalFTS, self).__init__(order=1, name="IFTS " + name)
self.shortname = "IFTS " + name
self.name = "Interval FTS"

View File

@ -33,7 +33,7 @@ class ImprovedWeightedFLRG(object):
class ImprovedWeightedFTS(fts.FTS):
def __init__(self, order, name, **kwargs):
def __init__(self, name, **kwargs):
super(ImprovedWeightedFTS, self).__init__(1, "IWFTS " + name)
self.name = "Improved Weighted FTS"
self.detail = "Ismail & Efendi"

View File

@ -37,7 +37,7 @@ class ExponentialyWeightedFLRG(object):
class ExponentialyWeightedFTS(fts.FTS):
def __init__(self, order, **kwargs):
def __init__(self, name, **kwargs):
super(ExponentialyWeightedFTS, self).__init__(1, "EWFTS")
self.name = "Exponentialy Weighted FTS"
self.detail = "Sadaei"

View File

@ -26,7 +26,7 @@ class SeasonalFLRG(FLR.FLR):
class SeasonalFTS(fts.FTS):
def __init__(self, order, **kwargs):
def __init__(self, name, **kwargs):
super(SeasonalFTS, self).__init__(1, "SFTS")
self.name = "Seasonal FTS"
self.detail = "Chen"

View File

@ -12,14 +12,14 @@ import pandas as pd
from pyFTS.partitioners import Grid, Entropy, FCM, Huarng
from pyFTS.common import FLR,FuzzySet,Membership,Transformations
from pyFTS import fts,hofts,ifts,pwfts,tree, chen
from pyFTS.benchmarks import benchmarks as bchmk
#from pyFTS.benchmarks import benchmarks as bchmk
from pyFTS.benchmarks import naive, arima
from pyFTS.benchmarks import Measures
from numpy import random
#print(FCM.FCMPartitionerTrimf.__module__)
#gauss = random.normal(0,1.0,5000)
#gauss = random.normal(0,1.0,1000)
#gauss_teste = random.normal(0,1.0,400)
@ -31,14 +31,14 @@ taiex = np.array(taiexpd["avg"][:5000])
#from statsmodels.tsa.arima_model import ARIMA as stats_arima
from statsmodels.tsa.tsatools import lagmat
tmp = np.arange(10)
#tmp = np.arange(10)
lag, a = lagmat(tmp, maxlag=2, trim="both", original='sep')
#lag, a = lagmat(tmp, maxlag=2, trim="both", original='sep')
print(lag)
print(a)
#print(lag)
#print(a)
#from pyFTS.benchmarks import distributed_benchmarks as bchmk
from pyFTS.benchmarks import distributed_benchmarks as bchmk
#from pyFTS.benchmarks import parallel_benchmarks as bchmk
#from pyFTS.benchmarks import benchmarks as bchmk
#from pyFTS.benchmarks import arima
@ -52,11 +52,11 @@ print(a)
#bchmk.teste(taiex,['192.168.0.109', '192.168.0.101'])
#bchmk.point_sliding_window(taiex,2000,train=0.8, #models=[yu.WeightedFTS], # #
# partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
# partitions= np.arange(10,200,step=5), #transformation=diff,
# dump=False, save=True, file="experiments/nasdaq_point_distributed.csv",
# nodes=['192.168.0.109', '192.168.0.101']) #, depends=[hofts, ifts])
bchmk.point_sliding_window(taiex,2000,train=0.8, #models=[yu.WeightedFTS], # #
partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
partitions= np.arange(10,200,step=5), #transformation=diff,
dump=False, save=False, file="experiments/nasdaq_point_distributed.csv",
nodes=['192.168.1.42']) #, depends=[hofts, ifts])
#bchmk.testa(taiex,[10,20],partitioners=[Grid.GridPartitioner], nodes=['192.168.0.109', '192.168.0.101'])
@ -83,10 +83,10 @@ print(a)
# gauss,2000,train=0.8, dump=True, save=True, file="experiments/arima_gauss.csv")
#bchmk.interval_sliding_window(nasdaq,2000,train=0.8, #transformation=diff, #models=[pwfts.ProbabilisticWeightedFTS], # #
# partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
# partitions= np.arange(10,200,step=5), #
# dump=True, save=True, file="experiments/nasdaq_interval.csv")
bchmk.interval_sliding_window(gauss,2000,train=0.8, #transformation=diff, #models=[pwfts.ProbabilisticWeightedFTS], # #
partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
partitions= np.arange(10,200,step=5), #
dump=True, save=False, file="experiments/nasdaq_interval.csv")
#3bchmk.ahead_sliding_window(taiex,2000,train=0.8, steps=20, resolution=250, #transformation=diff, #models=[pwfts.ProbabilisticWeightedFTS], # #
# partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],

View File

@ -53,8 +53,8 @@ pfts3_enrollments.train(enrollments, enrollments_fs1, 3)
bchmk.plot_compared_series(enrollments,[pfts1_enrollments,pfts2_enrollments, pfts3_enrollments],
["red","blue","green"], linewidth=2,
typeonlegend=True,save=True,file="pictures/pwfts_enrollments_interval.png",
tam=[20,7],points=False, intervals=False)
typeonlegend=True,save=False,file="pictures/pwfts_enrollments_interval.png",
tam=[20,7],points=False, intervals=True)

2
yu.py
View File

@ -31,7 +31,7 @@ class WeightedFLRG(fts.FTS):
class WeightedFTS(fts.FTS):
def __init__(self, order, name, **kwargs):
def __init__(self, name, **kwargs):
super(WeightedFTS, self).__init__(1, "WFTS " + name)
self.name = "Weighted FTS"
self.detail = "Yu"