Bugfixes on nonstationary methods
This commit is contained in:
parent
b65af00526
commit
4ba6c16a2f
@ -205,11 +205,11 @@ def fuzzify(inst, t, fuzzySets):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def fuzzySeries(data, fuzzySets, window_size=1, method='fuzzy', const_t= None):
|
def fuzzySeries(data, fuzzySets, ordered_sets, window_size=1, method='fuzzy', const_t= None):
|
||||||
fts = []
|
fts = []
|
||||||
for t, i in enumerate(data):
|
for t, i in enumerate(data):
|
||||||
tdisp = window_index(t, window_size) if const_t is None else const_t
|
tdisp = window_index(t, window_size) if const_t is None else const_t
|
||||||
mv = np.array([fs.membership(i, tdisp) for fs in fuzzySets])
|
mv = np.array([fuzzySets[fs].membership(i, tdisp) for fs in ordered_sets])
|
||||||
if len(mv) == 0:
|
if len(mv) == 0:
|
||||||
sets = [check_bounds(i, fuzzySets, tdisp)]
|
sets = [check_bounds(i, fuzzySets, tdisp)]
|
||||||
else:
|
else:
|
||||||
@ -218,7 +218,7 @@ def fuzzySeries(data, fuzzySets, window_size=1, method='fuzzy', const_t= None):
|
|||||||
elif method == 'maximum':
|
elif method == 'maximum':
|
||||||
mx = max(mv)
|
mx = max(mv)
|
||||||
ix = np.ravel(np.argwhere(mv == mx))
|
ix = np.ravel(np.argwhere(mv == mx))
|
||||||
sets = [fuzzySets[i] for i in ix]
|
sets = [fuzzySets[ordered_sets[i]] for i in ix]
|
||||||
fts.append(sets)
|
fts.append(sets)
|
||||||
return fts
|
return fts
|
||||||
|
|
||||||
@ -229,15 +229,15 @@ def window_index(t, window_size):
|
|||||||
return t - (t % window_size)
|
return t - (t % window_size)
|
||||||
|
|
||||||
|
|
||||||
def check_bounds(data, sets, t):
|
def check_bounds(data, partitioner, t):
|
||||||
if data < sets[0].get_lower(t):
|
if data < partitioner.lower_set().get_lower(t):
|
||||||
return sets[0]
|
return partitioner.lower_set()
|
||||||
elif data > sets[-1].get_upper(t):
|
elif data > partitioner.upper_set().get_upper(t):
|
||||||
return sets[-1]
|
return partitioner.upper_set()
|
||||||
|
|
||||||
|
|
||||||
def check_bounds_index(data, sets, t):
|
def check_bounds_index(data, partitioner, t):
|
||||||
if data < sets[0].get_lower(t):
|
if data < partitioner.lower_set().get_lower(t):
|
||||||
return 0
|
return 0
|
||||||
elif data > sets[-1].get_upper(t):
|
elif data > partitioner.upper_set().get_upper(t):
|
||||||
return len(sets) -1
|
return len(partitioner.sets) -1
|
||||||
|
@ -23,13 +23,10 @@ class ConditionalVarianceFTS(chen.ConventionalFTS):
|
|||||||
self.max_stack = [0,0,0]
|
self.max_stack = [0,0,0]
|
||||||
|
|
||||||
def train(self, ndata, **kwargs):
|
def train(self, ndata, **kwargs):
|
||||||
if kwargs.get('sets', None) is not None:
|
|
||||||
self.sets = kwargs.get('sets', None)
|
|
||||||
|
|
||||||
self.min_tx = min(ndata)
|
self.min_tx = min(ndata)
|
||||||
self.max_tx = max(ndata)
|
self.max_tx = max(ndata)
|
||||||
|
|
||||||
tmpdata = common.fuzzySeries(ndata, self.sets, method='fuzzy', const_t=0)
|
tmpdata = common.fuzzySeries(ndata, self.sets, self.partitioner.ordered_sets, method='fuzzy', const_t=0)
|
||||||
flrs = FLR.generate_non_recurrent_flrs(tmpdata)
|
flrs = FLR.generate_non_recurrent_flrs(tmpdata)
|
||||||
self.generate_flrg(flrs)
|
self.generate_flrg(flrs)
|
||||||
|
|
||||||
@ -69,14 +66,14 @@ class ConditionalVarianceFTS(chen.ConventionalFTS):
|
|||||||
|
|
||||||
def _affected_sets(self, sample, perturb):
|
def _affected_sets(self, sample, perturb):
|
||||||
|
|
||||||
affected_sets = [[ct, set.membership(sample, perturb[ct])]
|
affected_sets = [[ct, self.sets[key].membership(sample, perturb[ct])]
|
||||||
for ct, set in enumerate(self.sets)
|
for ct, key in enumerate(self.partitioner.ordered_sets)
|
||||||
if set.membership(sample, perturb[ct]) > 0.0]
|
if self.sets[key].membership(sample, perturb[ct]) > 0.0]
|
||||||
|
|
||||||
if len(affected_sets) == 0:
|
if len(affected_sets) == 0:
|
||||||
if sample < self.sets[0].get_lower(perturb[0]):
|
if sample < self.partitioner.lower_set().get_lower(perturb[0]):
|
||||||
affected_sets.append([0, 1])
|
affected_sets.append([0, 1])
|
||||||
elif sample < self.sets[-1].get_lower(perturb[-1]):
|
elif sample > self.partitioner.upper_set().get_upper(perturb[-1]):
|
||||||
affected_sets.append([len(self.sets) - 1, 1])
|
affected_sets.append([len(self.sets) - 1, 1])
|
||||||
|
|
||||||
|
|
||||||
|
@ -11,6 +11,9 @@ class NonStationaryFLRG(flrg.FLRG):
|
|||||||
self.RHS = set()
|
self.RHS = set()
|
||||||
|
|
||||||
def get_key(self):
|
def get_key(self):
|
||||||
|
if isinstance(self.LHS, list):
|
||||||
|
return str([k.name for k in self.LHS])
|
||||||
|
else:
|
||||||
return self.LHS.name
|
return self.LHS.name
|
||||||
|
|
||||||
def get_membership(self, data, t, window_size=1):
|
def get_membership(self, data, t, window_size=1):
|
||||||
|
@ -46,25 +46,27 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
|||||||
|
|
||||||
disp = common.window_index(k, window_size)
|
disp = common.window_index(k, window_size)
|
||||||
|
|
||||||
rhs = [set for set in self.sets if set.membership(data[k], disp) > 0.0]
|
rhs = [self.sets[key] for key in self.partitioner.ordered_sets
|
||||||
|
if self.sets[key].membership(data[k], disp) > 0.0]
|
||||||
|
|
||||||
if len(rhs) == 0:
|
if len(rhs) == 0:
|
||||||
rhs = [common.check_bounds(data[k], self.sets, disp)]
|
rhs = [common.check_bounds(data[k], self.partitioner, disp)]
|
||||||
|
|
||||||
lags = {}
|
lags = {}
|
||||||
|
|
||||||
for o in np.arange(0, self.order):
|
for o in np.arange(0, self.order):
|
||||||
tdisp = common.window_index(k - (self.order - o), window_size)
|
tdisp = common.window_index(k - (self.order - o), window_size)
|
||||||
lhs = [set for set in self.sets if set.membership(sample[o], tdisp) > 0.0]
|
lhs = [self.sets[key] for key in self.partitioner.ordered_sets
|
||||||
|
if self.sets[key].membership(sample[o], tdisp) > 0.0]
|
||||||
|
|
||||||
if len(lhs) == 0:
|
if len(lhs) == 0:
|
||||||
lhs = [common.check_bounds(sample[o], self.sets, tdisp)]
|
lhs = [common.check_bounds(sample[o], self.partitioner, tdisp)]
|
||||||
|
|
||||||
lags[o] = lhs
|
lags[o] = lhs
|
||||||
|
|
||||||
root = tree.FLRGTreeNode(None)
|
root = tree.FLRGTreeNode(None)
|
||||||
|
|
||||||
self.build_tree_without_order(root, lags, 0)
|
tree.build_tree_without_order(root, lags, 0)
|
||||||
|
|
||||||
# Trace the possible paths
|
# Trace the possible paths
|
||||||
for p in root.paths():
|
for p in root.paths():
|
||||||
@ -103,10 +105,12 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
|||||||
|
|
||||||
for ct, dat in enumerate(sample):
|
for ct, dat in enumerate(sample):
|
||||||
tdisp = common.window_index((k + time_displacement) - (self.order - ct), window_size)
|
tdisp = common.window_index((k + time_displacement) - (self.order - ct), window_size)
|
||||||
sel = [ct for ct, set in enumerate(self.sets) if set.membership(dat, tdisp) > 0.0]
|
|
||||||
|
sel = [ct for ct, key in enumerate(self.partitioner.ordered_sets)
|
||||||
|
if self.sets[key].membership(dat, tdisp) > 0.0]
|
||||||
|
|
||||||
if len(sel) == 0:
|
if len(sel) == 0:
|
||||||
sel.append(common.check_bounds_index(dat, self.sets, tdisp))
|
sel.append(common.check_bounds_index(dat, self.partitioner, tdisp))
|
||||||
|
|
||||||
lags[ct] = sel
|
lags[ct] = sel
|
||||||
|
|
||||||
@ -114,7 +118,7 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
|||||||
|
|
||||||
root = tree.FLRGTreeNode(None)
|
root = tree.FLRGTreeNode(None)
|
||||||
|
|
||||||
self.build_tree(root, lags, 0)
|
tree.build_tree_without_order(root, lags, 0)
|
||||||
|
|
||||||
# Trace the possible paths and build the PFLRG's
|
# Trace the possible paths and build the PFLRG's
|
||||||
|
|
||||||
@ -123,7 +127,7 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
|||||||
flrg = HighOrderNonStationaryFLRG(self.order)
|
flrg = HighOrderNonStationaryFLRG(self.order)
|
||||||
|
|
||||||
for kk in path:
|
for kk in path:
|
||||||
flrg.append_lhs(self.sets[kk])
|
flrg.append_lhs(self.sets[self.partitioner.ordered_sets[kk]])
|
||||||
|
|
||||||
affected_flrgs.append(flrg)
|
affected_flrgs.append(flrg)
|
||||||
# affected_flrgs_memberships.append_rhs(flrg.get_membership(sample, disp))
|
# affected_flrgs_memberships.append_rhs(flrg.get_membership(sample, disp))
|
||||||
@ -135,16 +139,8 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
|||||||
for ct, dat in enumerate(sample):
|
for ct, dat in enumerate(sample):
|
||||||
td = common.window_index((k + time_displacement) - (self.order - ct), window_size)
|
td = common.window_index((k + time_displacement) - (self.order - ct), window_size)
|
||||||
tmp = flrg.LHS[ct].membership(dat, td)
|
tmp = flrg.LHS[ct].membership(dat, td)
|
||||||
# print('td',td)
|
|
||||||
# print('dat',dat)
|
|
||||||
# print(flrg.LHS[ct].name, flrg.LHS[ct].perturbated_parameters[td])
|
|
||||||
# print(tmp)
|
|
||||||
|
|
||||||
if (tmp == 0.0 and flrg.LHS[ct].name == self.sets[0].name and dat < self.sets[0].get_lower(td)) \
|
|
||||||
or (tmp == 0.0 and flrg.LHS[ct].name == self.sets[-1].name and dat > self.sets[-1].get_upper(
|
|
||||||
td)):
|
|
||||||
mv.append(1.0)
|
|
||||||
else:
|
|
||||||
mv.append(tmp)
|
mv.append(tmp)
|
||||||
# print(mv)
|
# print(mv)
|
||||||
|
|
||||||
|
@ -34,7 +34,6 @@ class NonStationaryFTS(fts.FTS):
|
|||||||
self.name = "Non Stationary FTS"
|
self.name = "Non Stationary FTS"
|
||||||
self.detail = ""
|
self.detail = ""
|
||||||
self.flrgs = {}
|
self.flrgs = {}
|
||||||
self.method = kwargs.get("method",'fuzzy')
|
|
||||||
|
|
||||||
def generate_flrg(self, flrs, **kwargs):
|
def generate_flrg(self, flrs, **kwargs):
|
||||||
for flr in flrs:
|
for flr in flrs:
|
||||||
@ -46,11 +45,9 @@ class NonStationaryFTS(fts.FTS):
|
|||||||
|
|
||||||
def train(self, data, **kwargs):
|
def train(self, data, **kwargs):
|
||||||
|
|
||||||
if kwargs.get('sets', None) is not None:
|
|
||||||
self.sets = kwargs.get('sets', None)
|
|
||||||
|
|
||||||
window_size = kwargs.get('parameters', 1)
|
window_size = kwargs.get('parameters', 1)
|
||||||
tmpdata = common.fuzzySeries(data, self.sets, window_size, method=self.method)
|
tmpdata = common.fuzzySeries(data, self.sets, self.partitioner.ordered_sets,
|
||||||
|
window_size, method='fuzzy')
|
||||||
flrs = FLR.generate_recurrent_flrs(tmpdata)
|
flrs = FLR.generate_recurrent_flrs(tmpdata)
|
||||||
self.generate_flrg(flrs)
|
self.generate_flrg(flrs)
|
||||||
|
|
||||||
@ -68,23 +65,16 @@ class NonStationaryFTS(fts.FTS):
|
|||||||
|
|
||||||
tdisp = common.window_index(k + time_displacement, window_size)
|
tdisp = common.window_index(k + time_displacement, window_size)
|
||||||
|
|
||||||
if self.method == 'fuzzy':
|
affected_sets = [ [self.sets[key], self.sets[key].membership(ndata[k], tdisp)]
|
||||||
affected_sets = [ [set, set.membership(ndata[k], tdisp)]
|
for key in self.partitioner.ordered_sets
|
||||||
for set in self.sets if set.membership(ndata[k], tdisp) > 0.0]
|
if self.sets[key].membership(ndata[k], tdisp) > 0.0]
|
||||||
elif self.method == 'maximum':
|
|
||||||
mv = [set.membership(ndata[k], tdisp) for set in self.sets]
|
|
||||||
ix = np.ravel(np.argwhere(mv == max(mv)))
|
|
||||||
affected_sets = [self.sets[x] for x in ix]
|
|
||||||
|
|
||||||
if len(affected_sets) == 0:
|
if len(affected_sets) == 0:
|
||||||
if self.method == 'fuzzy':
|
affected_sets.append([common.check_bounds(ndata[k], self.partitioner, tdisp), 1.0])
|
||||||
affected_sets.append([common.check_bounds(ndata[k], self.sets, tdisp), 1.0])
|
|
||||||
else:
|
|
||||||
affected_sets.append(common.check_bounds(ndata[k], self.sets, tdisp))
|
|
||||||
|
|
||||||
tmp = []
|
tmp = []
|
||||||
|
|
||||||
if len(affected_sets) == 1 and self.method == 'fuzzy':
|
if len(affected_sets) == 1:
|
||||||
aset = affected_sets[0][0]
|
aset = affected_sets[0][0]
|
||||||
if aset.name in self.flrgs:
|
if aset.name in self.flrgs:
|
||||||
tmp.append(self.flrgs[aset.name].get_midpoint(tdisp))
|
tmp.append(self.flrgs[aset.name].get_midpoint(tdisp))
|
||||||
@ -92,16 +82,10 @@ class NonStationaryFTS(fts.FTS):
|
|||||||
tmp.append(aset.get_midpoint(tdisp))
|
tmp.append(aset.get_midpoint(tdisp))
|
||||||
else:
|
else:
|
||||||
for aset in affected_sets:
|
for aset in affected_sets:
|
||||||
if self.method == 'fuzzy':
|
|
||||||
if aset[0].name in self.flrgs:
|
if aset[0].name in self.flrgs:
|
||||||
tmp.append(self.flrgs[aset[0].name].get_midpoint(tdisp) * aset[1])
|
tmp.append(self.flrgs[aset[0].name].get_midpoint(tdisp) * aset[1])
|
||||||
else:
|
else:
|
||||||
tmp.append(aset[0].get_midpoint(tdisp) * aset[1])
|
tmp.append(aset[0].get_midpoint(tdisp) * aset[1])
|
||||||
elif self.method == 'maximum':
|
|
||||||
if aset.name in self.flrgs:
|
|
||||||
tmp.append(self.flrgs[aset.name].get_midpoint(tdisp))
|
|
||||||
else:
|
|
||||||
tmp.append(aset.get_midpoint(tdisp))
|
|
||||||
|
|
||||||
pto = sum(tmp)
|
pto = sum(tmp)
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
from pyFTS.partitioners import partitioner
|
from pyFTS.partitioners import partitioner
|
||||||
from pyFTS.models.nonstationary import common, perturbation
|
from pyFTS.models.nonstationary import common, perturbation
|
||||||
|
from pyFTS.common import FuzzySet as stationary_fs
|
||||||
|
|
||||||
|
|
||||||
class PolynomialNonStationaryPartitioner(partitioner.Partitioner):
|
class PolynomialNonStationaryPartitioner(partitioner.Partitioner):
|
||||||
@ -13,13 +14,18 @@ class PolynomialNonStationaryPartitioner(partitioner.Partitioner):
|
|||||||
super(PolynomialNonStationaryPartitioner, self).__init__(name=part.name, data=data, npart=part.partitions,
|
super(PolynomialNonStationaryPartitioner, self).__init__(name=part.name, data=data, npart=part.partitions,
|
||||||
func=part.membership_function, names=part.setnames,
|
func=part.membership_function, names=part.setnames,
|
||||||
prefix=part.prefix, transformation=part.transformation,
|
prefix=part.prefix, transformation=part.transformation,
|
||||||
indexer=part.indexer)
|
indexer=part.indexer, preprocess=False)
|
||||||
|
|
||||||
self.sets = {}
|
self.sets = {}
|
||||||
|
|
||||||
loc_params, wid_params = self.get_polynomial_perturbations(data, **kwargs)
|
loc_params, wid_params = self.get_polynomial_perturbations(data, **kwargs)
|
||||||
|
|
||||||
for ct, key in enumerate(part.sets.keys()):
|
if self.ordered_sets is None and self.setnames is not None:
|
||||||
|
self.ordered_sets = part.setnames
|
||||||
|
else:
|
||||||
|
self.ordered_sets = stationary_fs.set_ordered(part.sets)
|
||||||
|
|
||||||
|
for ct, key in enumerate(self.ordered_sets):
|
||||||
set = part.sets[key]
|
set = part.sets[key]
|
||||||
loc_roots = np.roots(loc_params[ct])[0]
|
loc_roots = np.roots(loc_params[ct])[0]
|
||||||
wid_roots = np.roots(wid_params[ct])[0]
|
wid_roots = np.roots(wid_params[ct])[0]
|
||||||
|
@ -1,133 +1,32 @@
|
|||||||
import os
|
import os
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pyFTS.common import Membership, Transformations
|
from pyFTS.common import Membership, Transformations
|
||||||
from pyFTS.nonstationary import common,perturbation, partitioners, util, honsfts, cvfts
|
from pyFTS.models.nonstationary import common, perturbation, partitioners, util, honsfts, cvfts
|
||||||
from pyFTS.models.nonstationary import nsfts
|
from pyFTS.models.nonstationary import nsfts
|
||||||
from pyFTS.partitioners import Grid
|
from pyFTS.partitioners import Grid
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
from pyFTS.common import Util as cUtil
|
from pyFTS.common import Util as cUtil
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
os.chdir("/home/petronio/Dropbox/Doutorado/Codigos/")
|
|
||||||
|
|
||||||
data = pd.read_csv("DataSets/synthetic_nonstationary_dataset_A.csv", sep=";")
|
from pyFTS.data import artificial
|
||||||
data = np.array(data["0"][:])
|
|
||||||
|
|
||||||
for ct, train, test in cUtil.sliding_window(data, 300):
|
lmv1 = artificial.generate_gaussian_linear(1,0.2,0.2,0.05)
|
||||||
for partition in np.arange(10,50):
|
|
||||||
print(partition)
|
|
||||||
tmp_fsp = Grid.GridPartitioner(train, partition)
|
|
||||||
print(len(tmp_fsp.sets))
|
|
||||||
|
|
||||||
fsp = partitioners.PolynomialNonStationaryPartitioner(train, tmp_fsp, window_size=35, degree=1)
|
|
||||||
|
|
||||||
'''
|
|
||||||
diff = Transformations.Differential(1)
|
|
||||||
|
|
||||||
def generate_heteroskedastic_linear(mu_ini, sigma_ini, mu_inc, sigma_inc, it=10, num=35):
|
|
||||||
mu = mu_ini
|
|
||||||
sigma = sigma_ini
|
|
||||||
ret = []
|
|
||||||
for k in np.arange(0,it):
|
|
||||||
ret.extend(np.random.normal(mu, sigma, num))
|
|
||||||
mu += mu_inc
|
|
||||||
sigma += sigma_inc
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
#lmv1 = generate_heteroskedastic_linear(1,0.1,1,0.3)
|
|
||||||
lmv1 = generate_heteroskedastic_linear(5,0.1,0,0.2)
|
|
||||||
#lmv1 = generate_heteroskedastic_linear(1,0.3,1,0)
|
|
||||||
|
|
||||||
lmv1 = diff.apply(lmv1)
|
|
||||||
|
|
||||||
ns = 10 #number of fuzzy sets
|
|
||||||
ts=200
|
ts=200
|
||||||
train = lmv1[:ts]
|
ws=35
|
||||||
test = lmv1[ts:]
|
train1 = lmv1[:ts]
|
||||||
w = 25
|
test1 = lmv1[ts:]
|
||||||
deg = 4
|
|
||||||
|
|
||||||
tmp_fs = Grid.GridPartitioner(train, 10)
|
tmp_fs1 = Grid.GridPartitioner(data=train1[:50], npart=10)
|
||||||
|
|
||||||
#fs = partitioners.PolynomialNonStationaryPartitioner(train, tmp_fs, window_size=35, degree=1)
|
fs1 = partitioners.PolynomialNonStationaryPartitioner(train1, tmp_fs1, window_size=ws, degree=1)
|
||||||
fs = partitioners.ConstantNonStationaryPartitioner(train, tmp_fs,
|
|
||||||
location=perturbation.polynomial,
|
|
||||||
location_params=[1,0],
|
|
||||||
location_roots=0,
|
|
||||||
width=perturbation.polynomial,
|
|
||||||
width_params=[1,0],
|
|
||||||
width_roots=0)
|
|
||||||
'''
|
|
||||||
"""
|
|
||||||
perturb = [0.5, 0.25]
|
|
||||||
for i in [0,1]:
|
|
||||||
print(fs.sets[i].parameters)
|
|
||||||
fs.sets[i].perturbate_parameters(perturb[i])
|
|
||||||
for i in [0,1]:
|
|
||||||
print(fs.sets[i].perturbated_parameters[perturb[i]])
|
|
||||||
"""
|
|
||||||
'''
|
|
||||||
#nsfts1 = nsfts.NonStationaryFTS("", partitioner=fs)
|
|
||||||
|
|
||||||
nsfts1 = cvfts.ConditionalVarianceFTS("", partitioner=fs)
|
nsfts1 = honsfts.HighOrderNonStationaryFTS("", partitioner=fs1)
|
||||||
|
|
||||||
nsfts1.train(train)
|
nsfts1.fit(train1, order=2, parameters=ws)
|
||||||
|
|
||||||
#print(fs)
|
print(fs1)
|
||||||
|
|
||||||
#print(nsfts1)
|
print(nsfts1.predict(test1))
|
||||||
|
|
||||||
#tmp = nsfts1.forecast(test[50:60])
|
print(nsfts1)
|
||||||
|
|
||||||
#print(tmp)
|
|
||||||
#print(test[50:60])
|
|
||||||
|
|
||||||
util.plot_sets_conditional(nsfts1, test, end=150, step=1,tam=[10, 5])
|
|
||||||
print('')
|
|
||||||
"""
|
|
||||||
passengers = pd.read_csv("DataSets/AirPassengers.csv", sep=",")
|
|
||||||
passengers = np.array(passengers["Passengers"])
|
|
||||||
|
|
||||||
ts = 100
|
|
||||||
ws=12
|
|
||||||
|
|
||||||
trainp = passengers[:ts]
|
|
||||||
testp = passengers[ts:]
|
|
||||||
|
|
||||||
tmp_fsp = Grid.GridPartitioner(trainp[:50], 10)
|
|
||||||
|
|
||||||
|
|
||||||
fsp = common.PolynomialNonStationaryPartitioner(trainp, tmp_fsp, window_size=ws, degree=1)
|
|
||||||
|
|
||||||
|
|
||||||
nsftsp = honsfts.HighOrderNonStationaryFTS("", partitioner=fsp)
|
|
||||||
#nsftsp = nsfts.NonStationaryFTS("", partitioner=fsp, method='fuzzy')
|
|
||||||
|
|
||||||
nsftsp.train(trainp, order=2, parameters=ws)
|
|
||||||
|
|
||||||
#print(fsp)
|
|
||||||
|
|
||||||
#print(nsftsp)
|
|
||||||
|
|
||||||
tmpp = nsftsp.forecast(passengers[101:104], time_displacement=101, window_size=ws)
|
|
||||||
tmpi = nsftsp.forecast_interval(passengers[101:104], time_displacement=101, window_size=ws)
|
|
||||||
|
|
||||||
#print(passengers[101:104])
|
|
||||||
print([k[0] for k in tmpi])
|
|
||||||
print(tmpp)
|
|
||||||
print([k[1] for k in tmpi])
|
|
||||||
|
|
||||||
#util.plot_sets(fsp.sets,tam=[10, 5], start=0, end=100, step=2, data=passengers[:100],
|
|
||||||
# window_size=ws, only_lines=False)
|
|
||||||
|
|
||||||
#fig, axes = plt.subplots(nrows=1, ncols=1, figsize=[15,5])
|
|
||||||
"""
|
|
||||||
|
|
||||||
"""
|
|
||||||
axes.plot(testp, label="Original")
|
|
||||||
#axes.plot(tmpp, label="NSFTS")
|
|
||||||
|
|
||||||
handles0, labels0 = axes.get_legend_handles_labels()
|
|
||||||
lgd = axes.legend(handles0, labels0, loc=2)
|
|
||||||
"""
|
|
||||||
'''
|
|
Loading…
Reference in New Issue
Block a user