- Several bugfixes

This commit is contained in:
Petrônio Cândido de Lima e Silva 2017-05-01 20:56:47 -03:00
parent e5c2e0dcbd
commit 474a9d87a7
9 changed files with 84 additions and 78 deletions

View File

@ -144,7 +144,7 @@ def point_sliding_window(data, windowsize, train=0.8,models=None,partitioners=[G
times[_key].append(_end - _start)
_start = time.time()
_rmse, _smape, _u = get_point_statistics(test, mfts, indexer)
_rmse, _smape, _u = Measures.get_point_statistics(test, mfts, indexer)
_end = time.time()
rmse[_key].append(_rmse)
smape[_key].append(_smape)
@ -271,7 +271,7 @@ def all_point_forecasters(data_train, data_test, partitions, max_order=3, statis
def print_point_statistics(data, models, externalmodels = None, externalforecasts = None, indexers=None):
ret = "Model & Order & RMSE & SMAPE & Theil's U \\\\ \n"
for count,model in enumerate(models,start=0):
_rmse, _smape, _u = get_point_statistics(data, model, indexers)
_rmse, _smape, _u = Measures.get_point_statistics(data, model, indexers)
ret += model.shortname + " & "
ret += str(model.order) + " & "
ret += str(_rmse) + " & "
@ -765,7 +765,7 @@ def all_ahead_forecasters(data_train, data_test, partitions, start, steps, resol
print_distribution_statistics(data_test[start:], objs, steps, resolution)
plotComparedIntervalsAhead(data_test, objs, lcolors, distributions=distributions, time_from=start, time_to=steps,
plot_compared_intervals_ahead(data_test, objs, lcolors, distributions=distributions, time_from=start, time_to=steps,
interpol=False, save=save, file=file, tam=tam, resolution=resolution, option=option)
@ -806,7 +806,7 @@ def print_distribution_statistics(original, models, steps, resolution):
print(ret)
def plotComparedIntervalsAhead(original, models, colors, distributions, time_from, time_to,
def plot_compared_intervals_ahead(original, models, colors, distributions, time_from, time_to,
interpol=False, save=False, file=None, tam=[20, 5], resolution=None,
cmap='Blues',option=2):
fig = plt.figure(figsize=tam)
@ -824,7 +824,7 @@ def plotComparedIntervalsAhead(original, models, colors, distributions, time_fro
for count, fts in enumerate(models, start=0):
if fts.hasDistributionForecasting and distributions[count]:
density = fts.forecastAheadDistribution(original[time_from - fts.order:time_from], time_to,
parameters=option)
resolution=resolution, method=option)
Y = []
X = []

View File

@ -39,7 +39,7 @@ class HighOrderFLRG(object):
class HighOrderFTS(fts.FTS):
def __init__(self, order, **kwargs):
def __init__(self, order, name, **kwargs):
super(HighOrderFTS, self).__init__(1, "HOFTS" + name)
self.name = "High Order FTS"
self.shortname = "HOFTS" + name

View File

@ -7,8 +7,8 @@ from pyFTS import hofts, fts, tree
class IntervalFTS(hofts.HighOrderFTS):
def __init__(self, order, **kwargs):
super(IntervalFTS, self).__init__("IFTS " + name)
def __init__(self, order, name, **kwargs):
super(IntervalFTS, self).__init__(order=1, name="IFTS " + name)
self.shortname = "IFTS " + name
self.name = "Interval FTS"
self.detail = "Silva, P.; Guimarães, F.; Sadaei, H. (2016)"

View File

@ -98,7 +98,7 @@ class EntropyPartitioner(partitioner.Partitioner):
b2 = (partitions[c + 1] - partitions[c]) / 2
sets.append(FuzzySet.FuzzySet(self.prefix + str(c), Membership.trapmf,
[partitions[c - 1], partitions[c] - b1,
partitions[c] - b2, partitions[c + 1]],
partitions[c] + b2, partitions[c + 1]],
partitions[c]))
return sets

View File

@ -107,12 +107,22 @@ class FCMPartitioner(partitioner.Partitioner):
def build(self,data):
sets = []
centroides = fuzzy_cmeans(self.partitions, data, 1, 2)
centroides.append(self.max)
centroides.append(self.min)
centroides = list(set(centroides))
centroides.sort()
for c in np.arange(1,len(centroides)-1):
sets.append(FuzzySet.FuzzySet(self.prefix+str(c),Membership.trimf,[round(centroides[c-1],3), round(centroides[c],3), round(centroides[c+1],3)], round(centroides[c],3) ) )
centroids = fuzzy_cmeans(self.partitions, data, 1, 2)
centroids.append(self.max)
centroids.append(self.min)
centroids = list(set(centroids))
centroids.sort()
for c in np.arange(1,len(centroids)-1):
if self.membership_function == Membership.trimf:
sets.append(FuzzySet.FuzzySet(self.prefix+str(c),Membership.trimf,
[round(centroids[c-1],3), round(centroids[c],3), round(centroids[c+1],3)],
round(centroids[c],3) ) )
elif self.membership_function == Membership.trapmf:
q1 = (round(centroids[c], 3) - round(centroids[c - 1], 3))/2
q2 = (round(centroids[c+1], 3) - round(centroids[c], 3)) / 2
sets.append(FuzzySet.FuzzySet(self.prefix + str(c), Membership.trimf,
[round(centroids[c - 1], 3), round(centroids[c], 3) - q1,
round(centroids[c], 3) + q2, round(centroids[c + 1], 3)],
round(centroids[c], 3)))
return sets

View File

@ -34,8 +34,17 @@ class HuarngPartitioner(partitioner.Partitioner):
npart = math.ceil(dlen / base)
partition = math.ceil(self.min)
for c in range(npart):
sets.append(
FuzzySet.FuzzySet(self.prefix + str(c), Membership.trimf, [partition - base, partition, partition + base], partition))
if self.membership_function == Membership.trimf:
sets.append( FuzzySet.FuzzySet(self.prefix + str(c), Membership.trimf,
[partition - base, partition, partition + base], partition))
elif self.membership_function == Membership.gaussmf:
sets.append(FuzzySet.FuzzySet(self.prefix + str(c), Membership.gaussmf,
[partition, base/2], partition))
elif self.membership_function == Membership.trapmf:
sets.append(FuzzySet.FuzzySet(self.prefix + str(c), Membership.trapmf,
[partition - base, partition - (base/2),
partition + (base / 2), partition + base], partition))
partition += base
return sets

View File

@ -7,6 +7,9 @@ from mpl_toolkits.mplot3d import Axes3D
from pyFTS.common import Membership, Util
from pyFTS.partitioners import Grid,Huarng,FCM,Entropy
all_methods = [Grid.GridPartitioner, Entropy.EntropyPartitioner, FCM.FCMPartitioner, Huarng.HuarngPartitioner]
mfs = [Membership.trimf, Membership.gaussmf, Membership.trapmf]
def plot_sets(data, sets, titles, tam=[12, 10], save=False, file=None):
num = len(sets)
@ -20,7 +23,7 @@ def plot_sets(data, sets, titles, tam=[12, 10], save=False, file=None):
#ax = fig.add_axes([0.05, 1-(k*h), 0.9, h*0.7]) # left, bottom, width, height
ax = axes[k]
ax.set_title(titles[k])
ax.set_ylim([0, 1])
ax.set_ylim([0, 1.1])
ax.set_xlim([minx, maxx])
for s in sets[k]:
if s.mf == Membership.trimf:
@ -29,7 +32,7 @@ def plot_sets(data, sets, titles, tam=[12, 10], save=False, file=None):
tmpx = [ kk for kk in np.arange(s.lower, s.upper)]
tmpy = [s.membership(kk) for kk in np.arange(s.lower, s.upper)]
ax.plot(tmpx, tmpy)
elif s.mf == Membership.gaussmf:
elif s.mf == Membership.trapmf:
ax.plot(s.parameters, [0, 1, 1, 0])
plt.tight_layout()
@ -44,9 +47,6 @@ def plot_partitioners(data, objs, tam=[12, 10], save=False, file=None):
def explore_partitioners(data, npart, methods=None, mf=None, tam=[12, 10], save=False, file=None):
all_methods = [Grid.GridPartitioner, Entropy.EntropyPartitioner, FCM.FCMPartitioner, Huarng.HuarngPartitioner]
mfs = [Membership.trimf, Membership.gaussmf, Membership.trapmf]
if methods is None:
methods = all_methods

View File

@ -42,8 +42,8 @@ class ProbabilisticWeightedFLRG(hofts.HighOrderFLRG):
class ProbabilisticWeightedFTS(ifts.IntervalFTS):
def __init__(self, order, name, **kwargs):
super(ProbabilisticWeightedFTS, self).__init__("PWFTS")
def __init__(self, name, **kwargs):
super(ProbabilisticWeightedFTS, self).__init__(order=1, name=name)
self.shortname = "PWFTS " + name
self.name = "Probabilistic FTS"
self.detail = "Silva, P.; Guimarães, F.; Sadaei, H."
@ -53,7 +53,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
self.hasIntervalForecasting = True
self.hasDistributionForecasting = True
self.isHighOrder = True
self.auto_update = update
self.auto_update = kwargs.get('update',False)
def train(self, data, sets, order=1,parameters=None):
@ -468,13 +468,17 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
ret = []
resolution = kwargs.get('resolution',100)
method = kwargs.get('method',2)
intervals = self.forecastAheadInterval(data, steps)
grid = self.getGridClean(resolution)
index = SortedCollection.SortedCollection(iterable=grid.keys())
if parameters == 1:
if method == 1:
grids = []
for k in np.arange(0, steps):
@ -522,7 +526,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
tmp = np.array([grids[k][q] for q in sorted(grids[k])])
ret.append(tmp / sum(tmp))
elif parameters == 2:
elif method == 2:
ret = []

View File

@ -20,60 +20,43 @@ from numpy import random
#gauss_teste = random.normal(0,1.0,400)
os.chdir("/home/petronio/dados/Dropbox/Doutorado/Disciplinas/AdvancedFuzzyTimeSeriesModels/")
os.chdir("/home/petronio/dados/Dropbox/Doutorado/Codigos/")
#enrollments = pd.read_csv("DataSets/Enrollments.csv", sep=";")
#enrollments = np.array(enrollments["Enrollments"])
enrollments = pd.read_csv("DataSets/Enrollments.csv", sep=";")
enrollments = np.array(enrollments["Enrollments"])
taiex = pd.read_csv("DataSets/TAIEX.csv", sep=",")
taiex_treino = np.array(taiex["avg"][2500:3900])
taiex_teste = np.array(taiex["avg"][3901:4500])
import importlib
import pandas as pd
from pyFTS.partitioners import Grid
from pyFTS.common import FLR, FuzzySet, Membership, SortedCollection
from pyFTS import fts
from pyFTS import hofts
from pyFTS import pwfts
from pyFTS import tree
from pyFTS.benchmarks import benchmarks as bchmk
#nasdaq = pd.read_csv("DataSets/NASDAQ_IXIC.csv", sep=",")
#nasdaq_treino = np.array(nasdaq["avg"][0:1600])
#nasdaq_teste = np.array(nasdaq["avg"][1601:2000])
enrollments_fs1 = Grid.GridPartitioner(enrollments, 6).sets
for s in enrollments_fs1:
print(s)
pfts1_enrollments = pwfts.ProbabilisticWeightedFTS("1")
pfts1_enrollments.train(enrollments, enrollments_fs1, 1)
pfts1_enrollments.shortname = "1st Order"
pfts2_enrollments = pwfts.ProbabilisticWeightedFTS("2")
pfts2_enrollments.dump = False
pfts2_enrollments.shortname = "2nd Order"
pfts2_enrollments.train(enrollments, enrollments_fs1, 2)
pfts3_enrollments = pwfts.ProbabilisticWeightedFTS("3")
pfts3_enrollments.dump = False
pfts3_enrollments.shortname = "3rd Order"
pfts3_enrollments.train(enrollments, enrollments_fs1, 3)
bchmk.plot_compared_series(enrollments,[pfts1_enrollments,pfts2_enrollments, pfts3_enrollments],
["red","blue","green"], linewidth=2,
typeonlegend=True,save=True,file="pictures/pwfts_enrollments_interval.png",
tam=[20,7],points=False, intervals=False)
diff = Transformations.Differential(1)
fs = Grid.GridPartitionerTrimf(taiex_treino,10)
#tmp = chen.ConventionalFTS("")
pfts1 = pwfts.ProbabilisticWeightedFTS("1")
#pfts1.appendTransformation(diff)
pfts1.train(taiex_treino,fs,1)
from pyFTS.benchmarks import ProbabilityDistribution as dist
forecasts = pfts1.forecast(taiex_treino)
pmf1 = dist.ProbabilityDistribution("Original",100,[min(taiex_treino),max(taiex_treino)],data=taiex_treino)
#print(pmf1.entropy())
pmf2 = dist.ProbabilityDistribution("Original",100,[min(taiex_treino),max(taiex_treino)],data=forecasts)
#print(pmf2.entropy())
#print(pmf2.kullbackleiblerdivergence(pmf1))
#print(pmf2.crossentropy(pmf1))
print(pmf1.averageloglikelihood(taiex_treino))
print(pmf2.averageloglikelihood(taiex_treino))
#pfts2 = pfts.ProbabilisticWeightedFTS("n = 2")
#pfts2.appendTransformation(diff)
#pfts2.train(gauss_treino,fs,2)
#pfts3 = pfts.ProbabilisticWeightedFTS("n = 3")
#pfts3.appendTransformation(diff)
#pfts3.train(gauss_treino,fs,3)
#densities1 = pfts1.forecastAheadDistribution(gauss_teste[:50],2,1.50, parameters=2)
#print(bchmk.getDistributionStatistics(gauss_teste[:50], [pfts1,pfts2,pfts3], 20, 1.50))