bugfixes
This commit is contained in:
parent
2b038968f7
commit
09e5415929
@ -71,9 +71,9 @@ class ConventionalFTS(fts.FTS):
|
|||||||
if actual.name not in self.flrgs:
|
if actual.name not in self.flrgs:
|
||||||
ret.append(actual.centroid)
|
ret.append(actual.centroid)
|
||||||
else:
|
else:
|
||||||
flrg = self.flrgs[actual.name]
|
_flrg = self.flrgs[actual.name]
|
||||||
|
|
||||||
ret.append(flrg.get_midpoint())
|
ret.append(_flrg.get_midpoint())
|
||||||
|
|
||||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||||
|
|
||||||
|
@ -20,7 +20,6 @@ class FLRG(object):
|
|||||||
ret = self.LHS.membership(data)
|
ret = self.LHS.membership(data)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def get_midpoint(self):
|
def get_midpoint(self):
|
||||||
if self.midpoint is None:
|
if self.midpoint is None:
|
||||||
self.midpoint = sum(self.get_midpoints())/len(self.RHS)
|
self.midpoint = sum(self.get_midpoints())/len(self.RHS)
|
||||||
|
@ -159,15 +159,17 @@ class FTS(object):
|
|||||||
return ndata
|
return ndata
|
||||||
|
|
||||||
def doInverseTransformations(self, data, params=None, **kwargs):
|
def doInverseTransformations(self, data, params=None, **kwargs):
|
||||||
ndata = data
|
|
||||||
if len(self.transformations) > 0:
|
if len(self.transformations) > 0:
|
||||||
if params is None:
|
if params is None:
|
||||||
params = [None for k in self.transformations]
|
params = [None for k in self.transformations]
|
||||||
|
|
||||||
for c, t in enumerate(reversed(self.transformations), start=0):
|
for c, t in enumerate(reversed(self.transformations), start=0):
|
||||||
ndata = t.inverse(ndata, params[c], **kwargs)
|
print(c)
|
||||||
|
ndata = t.inverse(data, params[c], **kwargs)
|
||||||
|
|
||||||
return ndata
|
return ndata
|
||||||
|
else:
|
||||||
|
return data
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
tmp = self.name + ":\n"
|
tmp = self.name + ":\n"
|
||||||
|
@ -27,7 +27,7 @@ class HighOrderFLRG(flrg.FLRG):
|
|||||||
for c in self.LHS:
|
for c in self.LHS:
|
||||||
if len(self.strlhs) > 0:
|
if len(self.strlhs) > 0:
|
||||||
self.strlhs += ", "
|
self.strlhs += ", "
|
||||||
self.strlhs = self.strlhs + c.name
|
self.strlhs = self.strlhs + str(c)
|
||||||
return self.strlhs
|
return self.strlhs
|
||||||
|
|
||||||
def appendLHS(self, c):
|
def appendLHS(self, c):
|
||||||
|
@ -196,8 +196,8 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
|||||||
if len(affected_flrgs) == 0:
|
if len(affected_flrgs) == 0:
|
||||||
tmp.append(common.check_bounds(sample[-1], self.sets, tdisp))
|
tmp.append(common.check_bounds(sample[-1], self.sets, tdisp))
|
||||||
elif len(affected_flrgs) == 1:
|
elif len(affected_flrgs) == 1:
|
||||||
if affected_flrgs[0].strLHS() in self.flrgs:
|
flrg = affected_flrgs[0]
|
||||||
flrg = affected_flrgs[0]
|
if flrg.strLHS() in self.flrgs:
|
||||||
tmp.append(self.flrgs[flrg.strLHS()].get_midpoint(tdisp))
|
tmp.append(self.flrgs[flrg.strLHS()].get_midpoint(tdisp))
|
||||||
else:
|
else:
|
||||||
tmp.append(flrg.LHS[-1].get_midpoint(tdisp))
|
tmp.append(flrg.LHS[-1].get_midpoint(tdisp))
|
||||||
@ -250,13 +250,13 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
|||||||
lower.append(aset.get_lower(tdisp))
|
lower.append(aset.get_lower(tdisp))
|
||||||
upper.append(aset.get_upper(tdisp))
|
upper.append(aset.get_upper(tdisp))
|
||||||
elif len(affected_flrgs) == 1:
|
elif len(affected_flrgs) == 1:
|
||||||
if affected_flrgs[0].strLHS() in self.flrgs:
|
_flrg = affected_flrgs[0]
|
||||||
flrg = affected_flrgs[0]
|
if _flrg.strLHS() in self.flrgs:
|
||||||
lower.append(self.flrgs[flrg.strLHS()].get_lower(tdisp))
|
lower.append(self.flrgs[_flrg.strLHS()].get_lower(tdisp))
|
||||||
upper.append(self.flrgs[flrg.strLHS()].get_upper(tdisp))
|
upper.append(self.flrgs[_flrg.strLHS()].get_upper(tdisp))
|
||||||
else:
|
else:
|
||||||
lower.append(flrg.LHS[-1].get_lower(tdisp))
|
lower.append(_flrg.LHS[-1].get_lower(tdisp))
|
||||||
upper.append(flrg.LHS[-1].get_upper(tdisp))
|
upper.append(_flrg.LHS[-1].get_upper(tdisp))
|
||||||
else:
|
else:
|
||||||
for ct, aset in enumerate(affected_flrgs):
|
for ct, aset in enumerate(affected_flrgs):
|
||||||
if aset.strLHS() in self.flrgs:
|
if aset.strLHS() in self.flrgs:
|
||||||
|
@ -18,7 +18,7 @@ class GridPartitioner(partitioner.Partitioner):
|
|||||||
partlen = dlen / self.partitions
|
partlen = dlen / self.partitions
|
||||||
|
|
||||||
count = 0
|
count = 0
|
||||||
for c in np.arange(self.min, self.max, partlen):
|
for c in np.linspace(self.min, self.max, self.partitions):
|
||||||
if self.membership_function == Membership.trimf:
|
if self.membership_function == Membership.trimf:
|
||||||
sets.append(
|
sets.append(
|
||||||
FuzzySet.FuzzySet(self.prefix + str(count), Membership.trimf, [c - partlen, c, c + partlen],c))
|
FuzzySet.FuzzySet(self.prefix + str(count), Membership.trimf, [c - partlen, c, c + partlen],c))
|
||||||
|
@ -68,7 +68,6 @@ class ConventionalFTS(fts.FTS):
|
|||||||
|
|
||||||
ret.append( sum(mp)/len(mp))
|
ret.append( sum(mp)/len(mp))
|
||||||
|
|
||||||
|
|
||||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
@ -9,17 +9,18 @@ import matplotlib as plt
|
|||||||
#from mpl_toolkits.mplot3d import Axes3D
|
#from mpl_toolkits.mplot3d import Axes3D
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from pyF import Grid, Entropy, FCM, Huarng
|
from pyFTS.partitioners import Grid, Entropy, FCM, Huarng
|
||||||
from pyFTS.common import FLR,FuzzySet,Membership,Transformations, Util as cUtil
|
from pyFTS.common import FLR,FuzzySet,Membership,Transformations, Util as cUtil
|
||||||
from pyFTS import fts,hofts,ifts,pwfts,tree, chen
|
from pyFTS import fts,hofts,ifts,pwfts,tree, chen
|
||||||
#from pyFTS.benchmarks import benchmarks as bchmk
|
#from pyFTS.benchmarks import benchmarks as bchmk
|
||||||
from pyFTS.benchmarks import naive, arima
|
from pyFTS.benchmarks import naive, arima
|
||||||
from pyFTS.benchmarks import Measures
|
from pyFTS.benchmarks import Measures
|
||||||
from numpy import random
|
from numpy import random
|
||||||
from pyFTS.models.seasonal import SeasonalIndexer
|
from pyFTS.seasonal import SeasonalIndexer
|
||||||
|
|
||||||
os.chdir("/home/petronio/dados/Dropbox/Doutorado/Codigos/")
|
os.chdir("/home/petronio/dados/Dropbox/Doutorado/Codigos/")
|
||||||
|
|
||||||
|
bc = Transformations.BoxCox(0)
|
||||||
diff = Transformations.Differential(1)
|
diff = Transformations.Differential(1)
|
||||||
#ix = SeasonalIndexer.LinearSeasonalIndexer([12, 24], [720, 1],[False, False])
|
#ix = SeasonalIndexer.LinearSeasonalIndexer([12, 24], [720, 1],[False, False])
|
||||||
|
|
||||||
@ -30,8 +31,8 @@ DATASETS
|
|||||||
#enrollments = pd.read_csv("DataSets/Enrollments.csv", sep=";")
|
#enrollments = pd.read_csv("DataSets/Enrollments.csv", sep=";")
|
||||||
#enrollments = np.array(enrollments["Enrollments"])
|
#enrollments = np.array(enrollments["Enrollments"])
|
||||||
|
|
||||||
#passengers = pd.read_csv("DataSets/AirPassengers.csv", sep=",")
|
passengers = pd.read_csv("DataSets/AirPassengers.csv", sep=",")
|
||||||
#passengers = np.array(passengers["Passengers"])
|
passengers = np.array(passengers["Passengers"])
|
||||||
|
|
||||||
#sunspots = pd.read_csv("DataSets/sunspots.csv", sep=",")
|
#sunspots = pd.read_csv("DataSets/sunspots.csv", sep=",")
|
||||||
#sunspots = np.array(sunspots["SUNACTIVITY"])
|
#sunspots = np.array(sunspots["SUNACTIVITY"])
|
||||||
@ -63,16 +64,42 @@ DATASETS
|
|||||||
#print(lag)
|
#print(lag)
|
||||||
#print(a)
|
#print(a)
|
||||||
#'''
|
#'''
|
||||||
|
'''
|
||||||
sonda = pd.read_csv("DataSets/SONDA_BSB_15MIN_AVG.csv", sep=";")
|
sonda = pd.read_csv("DataSets/SONDA_BSB_15MIN_AVG.csv", sep=";")
|
||||||
|
|
||||||
sonda['data'] = pd.to_datetime(sonda['data'])
|
sonda['data'] = pd.to_datetime(sonda['data'])
|
||||||
|
|
||||||
#sonda = sonda[:][527041:].dropna()
|
sonda = sonda[:][527041:].dropna()
|
||||||
|
|
||||||
sonda.index = np.arange(0,len(sonda.index))
|
sonda.index = np.arange(0,len(sonda.index))
|
||||||
|
|
||||||
sonda_treino = sonda[:105313].dropna()
|
sonda_treino = sonda[:105313].dropna()
|
||||||
sonda_teste = sonda[105314:].dropna()
|
sonda_teste = sonda[105314:].dropna()
|
||||||
|
'''
|
||||||
|
|
||||||
|
from pyFTS.partitioners import Grid
|
||||||
|
from pyFTS import song, chen, yu, sadaei, ismailefendi, cheng
|
||||||
|
|
||||||
|
train = passengers[:100]
|
||||||
|
test = passengers[100:]
|
||||||
|
|
||||||
|
fs = Grid.GridPartitioner(train, 10, transformation=bc)
|
||||||
|
|
||||||
|
methods = [song.ConventionalFTS, chen.ConventionalFTS, yu.WeightedFTS, sadaei.ExponentialyWeightedFTS,
|
||||||
|
ismailefendi.ImprovedWeightedFTS, cheng.TrendWeightedFTS]
|
||||||
|
|
||||||
|
#fig, axes = plt.subplots(nrows=1, ncols=1, figsize=[15, 5])
|
||||||
|
|
||||||
|
#axes.plot(test, label="Original")
|
||||||
|
|
||||||
|
for method in methods:
|
||||||
|
model = method("")
|
||||||
|
model.appendTransformation(bc)
|
||||||
|
model.train(train, sets=fs.sets)
|
||||||
|
|
||||||
|
forecasts = model.forecast(test)
|
||||||
|
|
||||||
|
print(forecasts)
|
||||||
|
|
||||||
#ix_m15 = SeasonalIndexer.DateTimeSeasonalIndexer('data',[SeasonalIndexer.DateTime.minute],[15],'glo_avg', name='m15')
|
#ix_m15 = SeasonalIndexer.DateTimeSeasonalIndexer('data',[SeasonalIndexer.DateTime.minute],[15],'glo_avg', name='m15')
|
||||||
|
|
||||||
@ -92,11 +119,11 @@ sonda_teste = sonda[105314:].dropna()
|
|||||||
|
|
||||||
#cUtil.persist_obj(obj, "models/sonda_msfts_Entropy40_Mhm15.pkl")
|
#cUtil.persist_obj(obj, "models/sonda_msfts_Entropy40_Mhm15.pkl")
|
||||||
|
|
||||||
ftse = cUtil.load_obj("models/sonda_ensemble_msfts.pkl")
|
#ftse = cUtil.load_obj("models/sonda_ensemble_msfts.pkl")
|
||||||
|
|
||||||
tmp = ftse.forecastDistribution(sonda_teste[850:860], h=0.5, method="gaussian")
|
#tmp = ftse.forecastDistribution(sonda_teste[850:860], h=0.5, method="gaussian")
|
||||||
|
|
||||||
print(tmp[0])
|
#print(tmp[0])
|
||||||
|
|
||||||
#'''
|
#'''
|
||||||
|
|
||||||
|
@ -4,9 +4,22 @@ from pyFTS.common import Membership, Transformations
|
|||||||
from pyFTS.nonstationary import common,perturbation, partitioners, util,nsfts, honsfts, cvfts
|
from pyFTS.nonstationary import common,perturbation, partitioners, util,nsfts, honsfts, cvfts
|
||||||
from pyFTS.partitioners import Grid
|
from pyFTS.partitioners import Grid
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
|
from pyFTS.common import Util as cUtil
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
os.chdir("/home/petronio/Dropbox/Doutorado/Codigos/")
|
os.chdir("/home/petronio/Dropbox/Doutorado/Codigos/")
|
||||||
|
|
||||||
|
data = pd.read_csv("DataSets/synthetic_nonstationary_dataset_A.csv", sep=";")
|
||||||
|
data = np.array(data["0"][:])
|
||||||
|
|
||||||
|
for ct, train, test in cUtil.sliding_window(data, 300):
|
||||||
|
for partition in np.arange(10,50):
|
||||||
|
print(partition)
|
||||||
|
tmp_fsp = Grid.GridPartitioner(train, partition)
|
||||||
|
print(len(tmp_fsp.sets))
|
||||||
|
|
||||||
|
fsp = partitioners.PolynomialNonStationaryPartitioner(train, tmp_fsp, window_size=35, degree=1)
|
||||||
|
|
||||||
|
'''
|
||||||
diff = Transformations.Differential(1)
|
diff = Transformations.Differential(1)
|
||||||
|
|
||||||
def generate_heteroskedastic_linear(mu_ini, sigma_ini, mu_inc, sigma_inc, it=10, num=35):
|
def generate_heteroskedastic_linear(mu_ini, sigma_ini, mu_inc, sigma_inc, it=10, num=35):
|
||||||
@ -43,6 +56,7 @@ fs = partitioners.ConstantNonStationaryPartitioner(train, tmp_fs,
|
|||||||
width=perturbation.polynomial,
|
width=perturbation.polynomial,
|
||||||
width_params=[1,0],
|
width_params=[1,0],
|
||||||
width_roots=0)
|
width_roots=0)
|
||||||
|
'''
|
||||||
"""
|
"""
|
||||||
perturb = [0.5, 0.25]
|
perturb = [0.5, 0.25]
|
||||||
for i in [0,1]:
|
for i in [0,1]:
|
||||||
@ -51,6 +65,7 @@ for i in [0,1]:
|
|||||||
for i in [0,1]:
|
for i in [0,1]:
|
||||||
print(fs.sets[i].perturbated_parameters[perturb[i]])
|
print(fs.sets[i].perturbated_parameters[perturb[i]])
|
||||||
"""
|
"""
|
||||||
|
'''
|
||||||
#nsfts1 = nsfts.NonStationaryFTS("", partitioner=fs)
|
#nsfts1 = nsfts.NonStationaryFTS("", partitioner=fs)
|
||||||
|
|
||||||
nsfts1 = cvfts.ConditionalVarianceFTS("", partitioner=fs)
|
nsfts1 = cvfts.ConditionalVarianceFTS("", partitioner=fs)
|
||||||
@ -113,4 +128,5 @@ axes.plot(testp, label="Original")
|
|||||||
|
|
||||||
handles0, labels0 = axes.get_legend_handles_labels()
|
handles0, labels0 = axes.get_legend_handles_labels()
|
||||||
lgd = axes.legend(handles0, labels0, loc=2)
|
lgd = axes.legend(handles0, labels0, loc=2)
|
||||||
"""
|
"""
|
||||||
|
'''
|
Loading…
Reference in New Issue
Block a user