- Adding gaussmf and trapmf support on partitioners

- Parallel util for partitioners
This commit is contained in:
Petrônio Cândido de Lima e Silva 2017-03-31 20:34:12 -03:00
parent 5a59d91816
commit a95b806a73
8 changed files with 109 additions and 21 deletions

View File

@ -0,0 +1,9 @@
from copy import deepcopy
from joblib import Parallel, delayed
import multiprocessing

View File

@ -5,24 +5,24 @@ from pyFTS import *
def trimf(x, parameters):
xx = round(x, 3)
if (xx < parameters[0]):
if xx < parameters[0]:
return 0
elif (xx >= parameters[0] and xx < parameters[1]):
elif parameters[0] <= xx < parameters[1]:
return (x - parameters[0]) / (parameters[1] - parameters[0])
elif (xx >= parameters[1] and xx <= parameters[2]):
elif parameters[1] <= xx <= parameters[2]:
return (parameters[2] - xx) / (parameters[2] - parameters[1])
else:
return 0
def trapmf(x, parameters):
if (x < parameters[0]):
if x < parameters[0]:
return 0
elif (x >= parameters[0] and x < parameters[1]):
elif parameters[0] <= x < parameters[1]:
return (x - parameters[0]) / (parameters[1] - parameters[0])
elif (x >= parameters[1] and x <= parameters[2]):
elif parameters[1] <= x <= parameters[2]:
return 1
elif (x >= parameters[2] and x <= parameters[3]):
elif parameters[2] <= x <= parameters[3]:
return (parameters[3] - x) / (parameters[3] - parameters[2])
else:
return 0

View File

@ -76,6 +76,7 @@ def bestSplit(data, npart):
else:
return [threshold]
class EntropyPartitioner(partitioner.Partitioner):
def __init__(self, data, npart, func = Membership.trimf, transformation=None):
super(EntropyPartitioner, self).__init__("Entropy", data, npart, func=func, transformation=transformation)
@ -89,7 +90,15 @@ class EntropyPartitioner(partitioner.Partitioner):
partitions = list(set(partitions))
partitions.sort()
for c in np.arange(1, len(partitions) - 1):
sets.append(FuzzySet.FuzzySet(self.prefix + str(c), Membership.trimf,
[partitions[c - 1], partitions[c], partitions[c + 1]],partitions[c]))
if self.membership_function == Membership.trimf:
sets.append(FuzzySet.FuzzySet(self.prefix + str(c), Membership.trimf,
[partitions[c - 1], partitions[c], partitions[c + 1]],partitions[c]))
elif self.membership_function == Membership.trapmf:
b1 = (partitions[c] - partitions[c - 1])/2
b2 = (partitions[c + 1] - partitions[c]) / 2
sets.append(FuzzySet.FuzzySet(self.prefix + str(c), Membership.trapmf,
[partitions[c - 1], partitions[c] - b1,
partitions[c] - b2, partitions[c + 1]],
partitions[c]))
return sets

View File

@ -24,6 +24,10 @@ class GridPartitioner(partitioner.Partitioner):
elif self.membership_function == Membership.gaussmf:
sets.append(
FuzzySet.FuzzySet(self.prefix + str(count), Membership.gaussmf, [c, partlen / 3], c))
elif self.membership_function == Membership.trapmf:
q = partlen / 2
sets.append(
FuzzySet.FuzzySet(self.prefix + str(count), Membership.trapmf, [c - partlen, c - q, c + q, c + partlen], c))
count += 1

View File

@ -5,9 +5,10 @@ import matplotlib.colors as pltcolors
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from pyFTS.common import Membership, Util
from pyFTS.partitioners import Grid,Huarng,FCM,Entropy
def plotSets(data, sets, titles, tam=[12, 10], save=False, file=None):
def plot_sets(data, sets, titles, tam=[12, 10], save=False, file=None):
num = len(sets)
#fig = plt.figure(figsize=tam)
maxx = max(data)
@ -23,12 +24,40 @@ def plotSets(data, sets, titles, tam=[12, 10], save=False, file=None):
ax.set_xlim([minx, maxx])
for s in sets[k]:
if s.mf == Membership.trimf:
ax.plot([s.parameters[0],s.parameters[1],s.parameters[2]],[0,1,0])
ax.plot(s.parameters,[0,1,0])
elif s.mf == Membership.gaussmf:
tmpx = [ kk for kk in np.arange(s.lower, s.upper)]
tmpy = [s.membership(kk) for kk in np.arange(s.lower, s.upper)]
ax.plot(tmpx, tmpy)
elif s.mf == Membership.gaussmf:
ax.plot(s.parameters, [0, 1, 1, 0])
plt.tight_layout()
Util.showAndSaveImage(fig, file, save)
Util.showAndSaveImage(fig, file, save)
def plot_partitioners(data, objs, tam=[12, 10], save=False, file=None):
sets = [k.sets for k in objs]
titles = [k.name for k in objs]
plot_sets(data,sets,titles,tam,save,file)
def explore_partitioners(data, npart, methods=None, mf=None, tam=[12, 10], save=False, file=None):
all_methods = [Grid.GridPartitioner, Entropy.EntropyPartitioner, FCM.FCMPartitioner, Huarng.HuarngPartitioner]
mfs = [Membership.trimf, Membership.gaussmf, Membership.trapmf]
if methods is None:
methods = all_methods
if mf is None:
mf = mfs
objs = []
for p in methods:
for m in mf:
obj = p(data, npart,m)
objs.append(obj)
plot_partitioners(data, objs, tam, save, file)

View File

@ -0,0 +1,32 @@
from copy import deepcopy
from joblib import Parallel, delayed
import multiprocessing
import numpy as np
from pyFTS.common import Membership, Util
from pyFTS.partitioners import Grid,Huarng,FCM,Entropy
from pyFTS.partitioners import Util
def explore_partitioners(data, npart, methods=None, mf=None, tam=[12, 10], save=False, file=None):
all_methods = [Grid.GridPartitioner, Entropy.EntropyPartitioner, FCM.FCMPartitioner]
mfs = [Membership.trimf, Membership.gaussmf, Membership.trapmf]
if methods is None:
methods = all_methods
if mf is None:
mf = mfs
num_cores = multiprocessing.cpu_count()
objs = []
for method in methods:
print(str(method))
tmp = Parallel(n_jobs=num_cores)(delayed(method)(deepcopy(data), npart, m) for m in mf)
objs.append(tmp)
objs = np.ravel(objs).tolist()
Util.plot_partitioners(data, objs, tam, save, file)

View File

@ -1,6 +1,7 @@
from pyFTS.common import FuzzySet, Membership
import numpy as np
class Partitioner(object):
def __init__(self,name,data,npart,func = Membership.trimf, names=None, prefix="A", transformation=None):
self.name = name

View File

@ -23,13 +23,17 @@ from numpy import random
#gauss_teste = random.normal(0,1.0,400)
os.chdir("/home/petronio/dados/Dropbox/Doutorado/Disciplinas/AdvancedFuzzyTimeSeriesModels/")
os.chdir("/home/petronio/dados/Dropbox/Doutorado/Codigos/")
#taiexpd = pd.read_csv("DataSets/TAIEX.csv", sep=",")
#taiex = np.array(taiexpd["avg"][:5000])
taiexpd = pd.read_csv("DataSets/TAIEX.csv", sep=",")
taiex = np.array(taiexpd["avg"][:5000])
nasdaqpd = pd.read_csv("DataSets/NASDAQ_IXIC.csv", sep=",")
nasdaq = np.array(nasdaqpd["avg"][:5000])
from pyFTS.partitioners import parallel_util
parallel_util.explore_partitioners(taiex,20)
#nasdaqpd = pd.read_csv("DataSets/NASDAQ_IXIC.csv", sep=",")
#nasdaq = np.array(nasdaqpd["avg"][:5000])
#taiex = pd.read_csv("DataSets/TAIEX.csv", sep=",")
#taiex_treino = np.array(taiex["avg"][2500:3900])
@ -49,10 +53,10 @@ diff = Transformations.Differential(1)
# gauss,2000,train=0.8, dump=True, save=True, file="experiments/arima_gauss.csv")
bchmk.interval_sliding_window(nasdaq,2000,train=0.8, #transformation=diff, #models=[pwfts.ProbabilisticWeightedFTS], # #
partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
partitions= np.arange(10,200,step=5), #
dump=True, save=True, file="experiments/nasdaq_interval.csv")
#bchmk.interval_sliding_window(nasdaq,2000,train=0.8, #transformation=diff, #models=[pwfts.ProbabilisticWeightedFTS], # #
# partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],
# partitions= np.arange(10,200,step=5), #
# dump=True, save=True, file="experiments/nasdaq_interval.csv")
#3bchmk.ahead_sliding_window(taiex,2000,train=0.8, steps=20, resolution=250, #transformation=diff, #models=[pwfts.ProbabilisticWeightedFTS], # #
# partitioners=[Grid.GridPartitioner], #Entropy.EntropyPartitioner], # FCM.FCMPartitioner, ],