- Improvements and refactorings on HOFTS, IFTS and PWFTS;
- Complete version of NSFTS
This commit is contained in:
parent
ce71dc20cb
commit
6f455f3215
@ -35,6 +35,10 @@ def rmse(targets, forecasts):
|
|||||||
:param forecasts:
|
:param forecasts:
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
|
if isinstance(targets, list):
|
||||||
|
targets = np.array(targets)
|
||||||
|
if isinstance(forecasts, list):
|
||||||
|
forecasts = np.array(forecasts)
|
||||||
return np.sqrt(np.nanmean((targets - forecasts) ** 2))
|
return np.sqrt(np.nanmean((targets - forecasts) ** 2))
|
||||||
|
|
||||||
|
|
||||||
@ -56,6 +60,10 @@ def mape(targets, forecasts):
|
|||||||
:param forecasts:
|
:param forecasts:
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
|
if isinstance(targets, list):
|
||||||
|
targets = np.array(targets)
|
||||||
|
if isinstance(forecasts, list):
|
||||||
|
forecasts = np.array(forecasts)
|
||||||
return np.mean(np.abs(targets - forecasts) / targets) * 100
|
return np.mean(np.abs(targets - forecasts) / targets) * 100
|
||||||
|
|
||||||
|
|
||||||
@ -67,6 +75,10 @@ def smape(targets, forecasts, type=2):
|
|||||||
:param type:
|
:param type:
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
|
if isinstance(targets, list):
|
||||||
|
targets = np.array(targets)
|
||||||
|
if isinstance(forecasts, list):
|
||||||
|
forecasts = np.array(forecasts)
|
||||||
if type == 1:
|
if type == 1:
|
||||||
return np.mean(np.abs(forecasts - targets) / ((forecasts + targets)/2))
|
return np.mean(np.abs(forecasts - targets) / ((forecasts + targets)/2))
|
||||||
elif type == 2:
|
elif type == 2:
|
||||||
@ -88,6 +100,11 @@ def UStatistic(targets, forecasts):
|
|||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
l = len(targets)
|
l = len(targets)
|
||||||
|
if isinstance(targets, list):
|
||||||
|
targets = np.array(targets)
|
||||||
|
if isinstance(forecasts, list):
|
||||||
|
forecasts = np.array(forecasts)
|
||||||
|
|
||||||
naive = []
|
naive = []
|
||||||
y = []
|
y = []
|
||||||
for k in np.arange(0,l-1):
|
for k in np.arange(0,l-1):
|
||||||
|
@ -128,3 +128,29 @@ def plot_residuals(targets, models, tam=[8, 8], save=False, file=None):
|
|||||||
plt.tight_layout()
|
plt.tight_layout()
|
||||||
|
|
||||||
Util.showAndSaveImage(fig, file, save)
|
Util.showAndSaveImage(fig, file, save)
|
||||||
|
|
||||||
|
|
||||||
|
def single_plot_residuals(targets, forecasts, order, tam=[8, 8], save=False, file=None):
|
||||||
|
fig, axes = plt.subplots(nrows=1, ncols=3, figsize=tam)
|
||||||
|
|
||||||
|
ax = axes
|
||||||
|
res = residuals(targets, forecasts, order)
|
||||||
|
|
||||||
|
ax[0].set_title("Residuals", size='large')
|
||||||
|
ax[0].set_ylabel("Model", size='large')
|
||||||
|
ax[0].set_xlabel(' ')
|
||||||
|
ax[0].plot(res)
|
||||||
|
|
||||||
|
ax[1].set_title("Residuals Autocorrelation", size='large')
|
||||||
|
ax[1].set_ylabel('ACS')
|
||||||
|
ax[1].set_xlabel('Lag')
|
||||||
|
ax[1].acorr(res)
|
||||||
|
|
||||||
|
ax[2].set_title("Residuals Histogram", size='large')
|
||||||
|
ax[2].set_ylabel('Freq')
|
||||||
|
ax[2].set_xlabel('Bins')
|
||||||
|
ax[2].hist(res)
|
||||||
|
|
||||||
|
plt.tight_layout()
|
||||||
|
|
||||||
|
Util.showAndSaveImage(fig, file, save)
|
||||||
|
@ -35,6 +35,25 @@ class IndexedFLR(FLR):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.index) + ": "+ self.LHS.name + " -> " + self.RHS.name
|
return str(self.index) + ": "+ self.LHS.name + " -> " + self.RHS.name
|
||||||
|
|
||||||
|
def generate_high_order_recurrent_flr(fuzzyData):
|
||||||
|
"""
|
||||||
|
Create a ordered FLR set from a list of fuzzy sets with recurrence
|
||||||
|
:param fuzzyData: ordered list of fuzzy sets
|
||||||
|
:return: ordered list of FLR
|
||||||
|
"""
|
||||||
|
flrs = []
|
||||||
|
for i in np.arange(1,len(fuzzyData)):
|
||||||
|
lhs = fuzzyData[i - 1]
|
||||||
|
rhs = fuzzyData[i]
|
||||||
|
if isinstance(lhs, list) and isinstance(rhs, list):
|
||||||
|
for l in lhs:
|
||||||
|
for r in rhs:
|
||||||
|
tmp = FLR(l, r)
|
||||||
|
flrs.append(tmp)
|
||||||
|
else:
|
||||||
|
tmp = FLR(lhs,rhs)
|
||||||
|
flrs.append(tmp)
|
||||||
|
return flrs
|
||||||
|
|
||||||
def generateRecurrentFLRs(fuzzyData):
|
def generateRecurrentFLRs(fuzzyData):
|
||||||
"""
|
"""
|
||||||
|
@ -27,6 +27,7 @@ class FuzzySet(object):
|
|||||||
elif self.mf == Membership.gaussmf:
|
elif self.mf == Membership.gaussmf:
|
||||||
self.lower = parameters[0] - parameters[1]*3
|
self.lower = parameters[0] - parameters[1]*3
|
||||||
self.upper = parameters[0] + parameters[1]*3
|
self.upper = parameters[0] + parameters[1]*3
|
||||||
|
self.metadata = {}
|
||||||
|
|
||||||
def membership(self, x):
|
def membership(self, x):
|
||||||
"""
|
"""
|
||||||
@ -89,6 +90,7 @@ def getMaxMembershipFuzzySet(inst, fuzzySets):
|
|||||||
mv = fuzzyInstance(inst, fuzzySets)
|
mv = fuzzyInstance(inst, fuzzySets)
|
||||||
return fuzzySets[np.argwhere(mv == max(mv))[0, 0]]
|
return fuzzySets[np.argwhere(mv == max(mv))[0, 0]]
|
||||||
|
|
||||||
|
|
||||||
def getMaxMembershipFuzzySetIndex(inst, fuzzySets):
|
def getMaxMembershipFuzzySetIndex(inst, fuzzySets):
|
||||||
"""
|
"""
|
||||||
Fuzzify a data point, returning the fuzzy set with maximum membership value
|
Fuzzify a data point, returning the fuzzy set with maximum membership value
|
||||||
|
@ -11,6 +11,16 @@ class FLRG(object):
|
|||||||
self.lower = None
|
self.lower = None
|
||||||
self.upper = None
|
self.upper = None
|
||||||
|
|
||||||
|
def get_membership(self, data):
|
||||||
|
ret = 0.0
|
||||||
|
if isinstance(self.LHS, (list, set)):
|
||||||
|
assert len(self.LHS) == len(data)
|
||||||
|
ret = min([self.LHS[ct].membership(dat) for ct, dat in enumerate(data)])
|
||||||
|
else:
|
||||||
|
ret = self.LHS.membership(data)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def get_midpoint(self):
|
def get_midpoint(self):
|
||||||
if self.midpoint is None:
|
if self.midpoint is None:
|
||||||
self.midpoint = sum(self.get_midpoints())/len(self.RHS)
|
self.midpoint = sum(self.get_midpoints())/len(self.RHS)
|
||||||
|
@ -7,7 +7,7 @@ Fuzzy Sets Syst., vol. 81, no. 3, pp. 311–319, 1996.
|
|||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pyFTS.common import FuzzySet,FLR
|
from pyFTS.common import FuzzySet,FLR
|
||||||
from pyFTS import fts, flrg
|
from pyFTS import fts, flrg, tree
|
||||||
|
|
||||||
|
|
||||||
class HighOrderFLRG(flrg.FLRG):
|
class HighOrderFLRG(flrg.FLRG):
|
||||||
@ -57,6 +57,27 @@ class HighOrderFTS(fts.FTS):
|
|||||||
self.setsDict = {}
|
self.setsDict = {}
|
||||||
self.is_high_order = True
|
self.is_high_order = True
|
||||||
|
|
||||||
|
def build_tree(self, node, lags, level):
|
||||||
|
if level >= self.order:
|
||||||
|
return
|
||||||
|
|
||||||
|
for s in lags[level]:
|
||||||
|
node.appendChild(tree.FLRGTreeNode(s))
|
||||||
|
|
||||||
|
for child in node.getChildren():
|
||||||
|
self.build_tree(child, lags, level + 1)
|
||||||
|
|
||||||
|
def build_tree_without_order(self, node, lags, level):
|
||||||
|
|
||||||
|
if level not in lags:
|
||||||
|
return
|
||||||
|
|
||||||
|
for s in lags[level]:
|
||||||
|
node.appendChild(tree.FLRGTreeNode(s))
|
||||||
|
|
||||||
|
for child in node.getChildren():
|
||||||
|
self.build_tree_without_order(child, lags, level + 1)
|
||||||
|
|
||||||
def generateFLRG(self, flrs):
|
def generateFLRG(self, flrs):
|
||||||
flrgs = {}
|
flrgs = {}
|
||||||
l = len(flrs)
|
l = len(flrs)
|
||||||
@ -73,6 +94,43 @@ class HighOrderFTS(fts.FTS):
|
|||||||
flrgs[flrg.strLHS()].appendRHS(flrs[k].RHS)
|
flrgs[flrg.strLHS()].appendRHS(flrs[k].RHS)
|
||||||
return (flrgs)
|
return (flrgs)
|
||||||
|
|
||||||
|
def generate_flrg(self, data):
|
||||||
|
flrgs = {}
|
||||||
|
l = len(data)
|
||||||
|
for k in np.arange(self.order, l):
|
||||||
|
if self.dump: print("FLR: " + str(k))
|
||||||
|
|
||||||
|
sample = data[k - self.order: k]
|
||||||
|
|
||||||
|
rhs = [set for set in self.sets if set.membership(data[k]) > 0.0]
|
||||||
|
|
||||||
|
lags = {}
|
||||||
|
|
||||||
|
for o in np.arange(0, self.order):
|
||||||
|
lhs = [set for set in self.sets if set.membership(sample[o]) > 0.0]
|
||||||
|
|
||||||
|
lags[o] = lhs
|
||||||
|
|
||||||
|
root = tree.FLRGTreeNode(None)
|
||||||
|
|
||||||
|
self.build_tree_without_order(root, lags, 0)
|
||||||
|
|
||||||
|
# Trace the possible paths
|
||||||
|
for p in root.paths():
|
||||||
|
flrg = HighOrderFLRG(self.order)
|
||||||
|
path = list(reversed(list(filter(None.__ne__, p))))
|
||||||
|
|
||||||
|
for lhs in enumerate(path, start=0):
|
||||||
|
flrg.appendLHS(lhs)
|
||||||
|
|
||||||
|
if flrg.strLHS() not in flrgs:
|
||||||
|
flrgs[flrg.strLHS()] = flrg;
|
||||||
|
|
||||||
|
for st in rhs:
|
||||||
|
flrgs[flrg.strLHS()].appendRHS(st)
|
||||||
|
|
||||||
|
return flrgs
|
||||||
|
|
||||||
def train(self, data, sets, order=1,parameters=None):
|
def train(self, data, sets, order=1,parameters=None):
|
||||||
|
|
||||||
data = self.doTransformations(data, updateUoD=True)
|
data = self.doTransformations(data, updateUoD=True)
|
||||||
@ -80,9 +138,7 @@ class HighOrderFTS(fts.FTS):
|
|||||||
self.order = order
|
self.order = order
|
||||||
self.sets = sets
|
self.sets = sets
|
||||||
for s in self.sets: self.setsDict[s.name] = s
|
for s in self.sets: self.setsDict[s.name] = s
|
||||||
tmpdata = FuzzySet.fuzzySeries(data, sets)
|
self.flrgs = self.generate_flrg(data)
|
||||||
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
|
||||||
self.flrgs = self.generateFLRG(flrs)
|
|
||||||
|
|
||||||
def forecast(self, data, **kwargs):
|
def forecast(self, data, **kwargs):
|
||||||
|
|
||||||
|
@ -43,16 +43,6 @@ class IntervalFTS(hofts.HighOrderFTS):
|
|||||||
mb = [fuzzySets[k].membership(data[k]) for k in np.arange(0, len(data))]
|
mb = [fuzzySets[k].membership(data[k]) for k in np.arange(0, len(data))]
|
||||||
return mb
|
return mb
|
||||||
|
|
||||||
def build_tree(self, node, lags, level):
|
|
||||||
if level >= self.order:
|
|
||||||
return
|
|
||||||
|
|
||||||
for s in lags[level]:
|
|
||||||
node.appendChild(tree.FLRGTreeNode(s))
|
|
||||||
|
|
||||||
for child in node.getChildren():
|
|
||||||
self.build_tree(child, lags, level + 1)
|
|
||||||
|
|
||||||
def forecastInterval(self, data, **kwargs):
|
def forecastInterval(self, data, **kwargs):
|
||||||
|
|
||||||
ndata = np.array(self.doTransformations(data))
|
ndata = np.array(self.doTransformations(data))
|
||||||
|
@ -172,14 +172,15 @@ class FuzzySet(FS.FuzzySet):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
tmp = ""
|
tmp = ""
|
||||||
if self.location is not None:
|
if self.location is not None:
|
||||||
tmp += "Loc. Pert.: "
|
tmp += "Location: "
|
||||||
for ct, f in enumerate(self.location):
|
for ct, f in enumerate(self.location):
|
||||||
tmp += str(f.__name__) + "(" + str(self.location_params[ct]) + ") "
|
tmp += str(f.__name__) + "(" + str(["{0:.2f}".format(p) for p in self.location_params[ct]]) + ") "
|
||||||
if self.width is not None:
|
if self.width is not None:
|
||||||
tmp += "Wid. Pert.: "
|
tmp += "Width: "
|
||||||
for ct, f in enumerate(self.width):
|
for ct, f in enumerate(self.width):
|
||||||
tmp += str(f.__name__) + "(" + str(self.width_params[ct]) + ") "
|
tmp += str(f.__name__) + "(" + str(["{0:.2f}".format(p) for p in self.width_params[ct]]) + ") "
|
||||||
return self.name + ": " + str(self.mf.__name__) + "(" + str(self.parameters) + ") " + tmp
|
tmp = "(" + str(["{0:.2f}".format(p) for p in self.parameters]) + ") " + tmp
|
||||||
|
return self.name + ": " + str(self.mf.__name__) + tmp
|
||||||
|
|
||||||
|
|
||||||
class PolynomialNonStationaryPartitioner(partitioner.Partitioner):
|
class PolynomialNonStationaryPartitioner(partitioner.Partitioner):
|
||||||
@ -218,6 +219,14 @@ class PolynomialNonStationaryPartitioner(partitioner.Partitioner):
|
|||||||
tmp = np.polyfit(rng, diff, deg=deg)
|
tmp = np.polyfit(rng, diff, deg=deg)
|
||||||
return tmp
|
return tmp
|
||||||
|
|
||||||
|
def scale_up(self,x,pct):
|
||||||
|
if x > 0: return x*(1+pct)
|
||||||
|
else: return x*pct
|
||||||
|
|
||||||
|
def scale_down(self,x,pct):
|
||||||
|
if x > 0: return x*pct
|
||||||
|
else: return x*(1+pct)
|
||||||
|
|
||||||
def get_polynomial_perturbations(self, data, **kwargs):
|
def get_polynomial_perturbations(self, data, **kwargs):
|
||||||
w = kwargs.get("window_size", int(len(data) / 5))
|
w = kwargs.get("window_size", int(len(data) / 5))
|
||||||
deg = kwargs.get("degree", 2)
|
deg = kwargs.get("degree", 2)
|
||||||
@ -225,8 +234,7 @@ class PolynomialNonStationaryPartitioner(partitioner.Partitioner):
|
|||||||
tmax = [0]
|
tmax = [0]
|
||||||
xmin = [data[0]]
|
xmin = [data[0]]
|
||||||
tmin = [0]
|
tmin = [0]
|
||||||
lengs = [0]
|
|
||||||
tlengs = [0]
|
|
||||||
l = len(data)
|
l = len(data)
|
||||||
|
|
||||||
for i in np.arange(0, l, w):
|
for i in np.arange(0, l, w):
|
||||||
@ -237,15 +245,9 @@ class PolynomialNonStationaryPartitioner(partitioner.Partitioner):
|
|||||||
tn = min(sample)
|
tn = min(sample)
|
||||||
xmin.append(tn)
|
xmin.append(tn)
|
||||||
tmin.append(np.ravel(np.argwhere(data == tn)).tolist()[0])
|
tmin.append(np.ravel(np.argwhere(data == tn)).tolist()[0])
|
||||||
lengs.append((tx - tn)/self.partitions)
|
|
||||||
tlengs.append(i)
|
|
||||||
|
|
||||||
|
|
||||||
cmax = np.polyfit(tmax, xmax, deg=deg)
|
cmax = np.polyfit(tmax, xmax, deg=deg)
|
||||||
#cmax = cmax.tolist()
|
|
||||||
cmin = np.polyfit(tmin, xmin, deg=deg)
|
cmin = np.polyfit(tmin, xmin, deg=deg)
|
||||||
#cmin = cmin.tolist()
|
|
||||||
|
|
||||||
|
|
||||||
cmed = []
|
cmed = []
|
||||||
|
|
||||||
@ -297,12 +299,37 @@ def fuzzify(inst, t, fuzzySets):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def fuzzySeries(data, fuzzySets):
|
def fuzzySeries(data, fuzzySets, window_size=1, method='fuzzy'):
|
||||||
fts = []
|
fts = []
|
||||||
for t, i in enumerate(data):
|
for t, i in enumerate(data):
|
||||||
mv = np.array([fs.membership(i, t) for fs in fuzzySets])
|
tdisp = window_index(t, window_size)
|
||||||
ix = np.ravel(np.argwhere(mv > 0.0))
|
mv = np.array([fs.membership(i, tdisp) for fs in fuzzySets])
|
||||||
sets = [fuzzySets[i] for i in ix]
|
if len(mv) == 0:
|
||||||
|
sets = [check_bounds(i, fuzzySets, tdisp)]
|
||||||
|
else:
|
||||||
|
if method == 'fuzzy':
|
||||||
|
ix = np.ravel(np.argwhere(mv > 0.0))
|
||||||
|
elif method == 'maximum':
|
||||||
|
mx = max(mv)
|
||||||
|
ix = np.ravel(np.argwhere(mv == mx))
|
||||||
|
sets = [fuzzySets[i] for i in ix]
|
||||||
fts.append(sets)
|
fts.append(sets)
|
||||||
return fts
|
return fts
|
||||||
|
|
||||||
|
|
||||||
|
def window_index(t, window_size):
|
||||||
|
return t - (t % window_size)
|
||||||
|
|
||||||
|
|
||||||
|
def check_bounds(data, sets, t):
|
||||||
|
if data < sets[0].get_lower(t):
|
||||||
|
return sets[0]
|
||||||
|
elif data > sets[-1].get_upper(t):
|
||||||
|
return sets[-1]
|
||||||
|
|
||||||
|
|
||||||
|
def check_bounds_index(data, sets, t):
|
||||||
|
if data < sets[0].get_lower(t):
|
||||||
|
return 0
|
||||||
|
elif data > sets[-1].get_upper(t):
|
||||||
|
return len(sets) -1
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
|
|
||||||
from pyFTS import flrg
|
from pyFTS import flrg
|
||||||
|
from pyFTS.nonstationary import common
|
||||||
|
|
||||||
|
|
||||||
class NonStationaryFLRG(flrg.FLRG):
|
class NonStationaryFLRG(flrg.FLRG):
|
||||||
@ -9,18 +10,41 @@ class NonStationaryFLRG(flrg.FLRG):
|
|||||||
self.LHS = LHS
|
self.LHS = LHS
|
||||||
self.RHS = set()
|
self.RHS = set()
|
||||||
|
|
||||||
def get_midpoint(self, t):
|
def get_membership(self, data, t, window_size=1):
|
||||||
if self.midpoint is None:
|
ret = 0.0
|
||||||
tmp = [r.get_midpoint(t) for r in self.RHS]
|
if isinstance(self.LHS, (list, set)):
|
||||||
self.midpoint = sum(tmp) / len(tmp)
|
assert len(self.LHS) == len(data)
|
||||||
return self.midpoint
|
|
||||||
|
|
||||||
def get_lower(self, t):
|
ret = min([self.LHS[ct].membership(dat, common.window_index(t - (self.order - ct), window_size))
|
||||||
|
for ct, dat in enumerate(data)])
|
||||||
|
else:
|
||||||
|
ret = self.LHS.membership(data, common.window_index(t, window_size))
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def get_midpoint(self, t, window_size=1):
|
||||||
|
if len(self.RHS) > 0:
|
||||||
|
if isinstance(self.RHS, (list,set)):
|
||||||
|
tmp = [r.get_midpoint(common.window_index(t, window_size)) for r in self.RHS]
|
||||||
|
elif isinstance(self.RHS, dict):
|
||||||
|
tmp = [self.RHS[r].get_midpoint(common.window_index(t, window_size)) for r in self.RHS.keys()]
|
||||||
|
return sum(tmp) / len(tmp)
|
||||||
|
else:
|
||||||
|
return self.LHS[-1].get_midpoint(common.window_index(t, window_size))
|
||||||
|
|
||||||
|
|
||||||
|
def get_lower(self, t, window_size=1):
|
||||||
if self.lower is None:
|
if self.lower is None:
|
||||||
self.lower = min([r.get_lower(t) for r in self.RHS])
|
if len(self.RHS) > 0:
|
||||||
|
self.lower = min([r.get_lower(common.window_index(t, window_size)) for r in self.RHS])
|
||||||
|
else:
|
||||||
|
self.lower = self.LHS[-1].get_lower(common.window_index(t, window_size))
|
||||||
|
|
||||||
return self.lower
|
return self.lower
|
||||||
|
|
||||||
def get_upper(self, t):
|
def get_upper(self, t, window_size=1):
|
||||||
if self.upper is None:
|
if self.upper is None:
|
||||||
self.upper = min([r.get_upper(t) for r in self.RHS])
|
if len(self.RHS) > 0:
|
||||||
|
self.upper = min([r.get_upper(common.window_index(t, window_size)) for r in self.RHS])
|
||||||
|
else:
|
||||||
|
self.upper = self.LHS[-1].get_upper(common.window_index(t, window_size))
|
||||||
return self.upper
|
return self.upper
|
@ -2,6 +2,7 @@ import numpy as np
|
|||||||
from pyFTS.common import FuzzySet, FLR
|
from pyFTS.common import FuzzySet, FLR
|
||||||
from pyFTS import fts, hofts
|
from pyFTS import fts, hofts
|
||||||
from pyFTS.nonstationary import common, flrg
|
from pyFTS.nonstationary import common, flrg
|
||||||
|
from pyFTS import tree
|
||||||
|
|
||||||
|
|
||||||
class HighOrderNonStationaryFLRG(flrg.NonStationaryFLRG):
|
class HighOrderNonStationaryFLRG(flrg.NonStationaryFLRG):
|
||||||
@ -37,31 +38,64 @@ class HighOrderNonStationaryFLRG(flrg.NonStationaryFLRG):
|
|||||||
return self.strLHS() + " -> " + tmp
|
return self.strLHS() + " -> " + tmp
|
||||||
|
|
||||||
|
|
||||||
class HighOrderNonStationaryFTS(hofts.HighOrderFLRG):
|
class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
||||||
"""NonStationaryFTS Fuzzy Time Series"""
|
"""NonStationaryFTS Fuzzy Time Series"""
|
||||||
def __init__(self, name, **kwargs):
|
def __init__(self, name, **kwargs):
|
||||||
super(HighOrderNonStationaryFTS, self).__init__(1, "HONSFTS " + name, **kwargs)
|
super(HighOrderNonStationaryFTS, self).__init__("HONSFTS " + name, **kwargs)
|
||||||
self.name = "High Order Non Stationary FTS"
|
self.name = "High Order Non Stationary FTS"
|
||||||
self.detail = ""
|
self.detail = ""
|
||||||
self.flrgs = {}
|
self.flrgs = {}
|
||||||
|
|
||||||
def generateFLRG(self, flrs):
|
def generate_flrg(self, data, **kwargs):
|
||||||
flrgs = {}
|
flrgs = {}
|
||||||
l = len(flrs)
|
l = len(data)
|
||||||
for k in np.arange(self.order + 1, l):
|
window_size = kwargs.get("window_size", 1)
|
||||||
flrg = HighOrderNonStationaryFLRG(self.order)
|
for k in np.arange(self.order, l):
|
||||||
|
if self.dump: print("FLR: " + str(k))
|
||||||
|
|
||||||
for kk in np.arange(k - self.order, k):
|
sample = data[k - self.order: k]
|
||||||
flrg.appendLHS(flrs[kk].LHS)
|
|
||||||
|
|
||||||
if flrg.strLHS() in flrgs:
|
disp = common.window_index(k, window_size)
|
||||||
flrgs[flrg.strLHS()].appendRHS(flrs[k].RHS)
|
|
||||||
else:
|
|
||||||
flrgs[flrg.strLHS()] = flrg;
|
|
||||||
flrgs[flrg.strLHS()].appendRHS(flrs[k].RHS)
|
|
||||||
return (flrgs)
|
|
||||||
|
|
||||||
def train(self, data, sets=None,order=1,parameters=None):
|
rhs = [set for set in self.sets if set.membership(data[k], disp) > 0.0]
|
||||||
|
|
||||||
|
if len(rhs) == 0:
|
||||||
|
rhs = [common.check_bounds(data[k], self.sets, disp)]
|
||||||
|
|
||||||
|
lags = {}
|
||||||
|
|
||||||
|
for o in np.arange(0, self.order):
|
||||||
|
tdisp = common.window_index(k - (self.order - o), window_size)
|
||||||
|
lhs = [set for set in self.sets if set.membership(sample[o], tdisp) > 0.0]
|
||||||
|
|
||||||
|
if len(lhs) == 0:
|
||||||
|
lhs = [common.check_bounds(sample[o], self.sets, tdisp)]
|
||||||
|
|
||||||
|
lags[o] = lhs
|
||||||
|
|
||||||
|
root = tree.FLRGTreeNode(None)
|
||||||
|
|
||||||
|
self.build_tree_without_order(root, lags, 0)
|
||||||
|
|
||||||
|
# Trace the possible paths
|
||||||
|
for p in root.paths():
|
||||||
|
flrg = HighOrderNonStationaryFLRG(self.order)
|
||||||
|
path = list(reversed(list(filter(None.__ne__, p))))
|
||||||
|
|
||||||
|
for c, e in enumerate(path, start=0):
|
||||||
|
flrg.appendLHS(e)
|
||||||
|
|
||||||
|
if flrg.strLHS() not in flrgs:
|
||||||
|
flrgs[flrg.strLHS()] = flrg;
|
||||||
|
|
||||||
|
for st in rhs:
|
||||||
|
flrgs[flrg.strLHS()].appendRHS(st)
|
||||||
|
|
||||||
|
return flrgs
|
||||||
|
|
||||||
|
def train(self, data, sets=None, order=2, parameters=None):
|
||||||
|
|
||||||
|
self.order = order
|
||||||
|
|
||||||
if sets is not None:
|
if sets is not None:
|
||||||
self.sets = sets
|
self.sets = sets
|
||||||
@ -69,43 +103,72 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFLRG):
|
|||||||
self.sets = self.partitioner.sets
|
self.sets = self.partitioner.sets
|
||||||
|
|
||||||
ndata = self.doTransformations(data)
|
ndata = self.doTransformations(data)
|
||||||
tmpdata = common.fuzzySeries(ndata, self.sets)
|
#tmpdata = common.fuzzySeries(ndata, self.sets)
|
||||||
flrs = FLR.generateNonRecurrentFLRs(tmpdata)
|
#flrs = FLR.generateRecurrentFLRs(ndata)
|
||||||
self.flrgs = self.generateFLRG(flrs)
|
window_size = parameters if parameters is not None else 1
|
||||||
|
self.flrgs = self.generate_flrg(ndata, window_size=window_size)
|
||||||
|
|
||||||
def forecast(self, data, **kwargs):
|
def forecast(self, data, **kwargs):
|
||||||
|
|
||||||
time_displacement = kwargs.get("time_displacement",0)
|
time_displacement = kwargs.get("time_displacement",0)
|
||||||
|
|
||||||
|
window_size = kwargs.get("window_size", 1)
|
||||||
|
|
||||||
ndata = np.array(self.doTransformations(data))
|
ndata = np.array(self.doTransformations(data))
|
||||||
|
|
||||||
l = len(ndata)
|
l = len(ndata)
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
|
|
||||||
for k in np.arange(0, l):
|
for k in np.arange(self.order, l+1):
|
||||||
|
|
||||||
#print("input: " + str(ndata[k]))
|
#print("input: " + str(ndata[k]))
|
||||||
|
|
||||||
tdisp = k + time_displacement
|
disp = common.window_index(k + time_displacement, window_size)
|
||||||
|
|
||||||
affected_sets = [ [set, set.membership(ndata[k], tdisp)]
|
affected_flrgs = []
|
||||||
for set in self.sets if set.membership(ndata[k], tdisp) > 0.0]
|
affected_flrgs_memberships = []
|
||||||
|
|
||||||
if len(affected_sets) == 0:
|
lags = {}
|
||||||
if self.sets[0].get_lower(tdisp) > ndata[k]:
|
|
||||||
affected_sets.append([self.sets[0], 1.0])
|
sample = ndata[k - self.order: k]
|
||||||
elif self.sets[-1].get_upper(tdisp) < ndata[k]:
|
|
||||||
affected_sets.append([self.sets[-1], 1.0])
|
for ct, dat in enumerate(sample):
|
||||||
|
tdisp = common.window_index((k + time_displacement) - (self.order - ct), window_size)
|
||||||
|
sel = [ct for ct, set in enumerate(self.sets) if set.membership(dat, tdisp) > 0.0]
|
||||||
|
|
||||||
|
if len(sel) == 0:
|
||||||
|
sel.append(common.check_bounds_index(dat, self.sets, tdisp))
|
||||||
|
|
||||||
|
lags[ct] = sel
|
||||||
|
|
||||||
|
# Build the tree with all possible paths
|
||||||
|
|
||||||
|
root = tree.FLRGTreeNode(None)
|
||||||
|
|
||||||
|
self.build_tree(root, lags, 0)
|
||||||
|
|
||||||
|
# Trace the possible paths and build the PFLRG's
|
||||||
|
|
||||||
|
for p in root.paths():
|
||||||
|
path = list(reversed(list(filter(None.__ne__, p))))
|
||||||
|
flrg = HighOrderNonStationaryFLRG(self.order)
|
||||||
|
|
||||||
|
for kk in path:
|
||||||
|
flrg.appendLHS(self.sets[kk])
|
||||||
|
|
||||||
|
affected_flrgs.append(flrg)
|
||||||
|
affected_flrgs_memberships.append(flrg.get_membership(ndata[k - self.order: k], disp))
|
||||||
|
|
||||||
#print(affected_sets)
|
#print(affected_sets)
|
||||||
|
|
||||||
tmp = []
|
tmp = []
|
||||||
for aset in affected_sets:
|
for ct, aset in enumerate(affected_flrgs):
|
||||||
if aset[0] in self.flrgs:
|
if aset.strLHS() in self.flrgs:
|
||||||
tmp.append(self.flrgs[aset[0].name].get_midpoint(tdisp) * aset[1])
|
tmp.append(self.flrgs[aset.strLHS()].get_midpoint(tdisp) *
|
||||||
|
affected_flrgs_memberships[ct])
|
||||||
else:
|
else:
|
||||||
tmp.append(aset[0].get_midpoint(tdisp) * aset[1])
|
tmp.append(aset.LHS[-1].get_midpoint(tdisp))
|
||||||
|
|
||||||
pto = sum(tmp)
|
pto = sum(tmp)
|
||||||
|
|
||||||
|
@ -32,8 +32,9 @@ class NonStationaryFTS(fts.FTS):
|
|||||||
self.name = "Non Stationary FTS"
|
self.name = "Non Stationary FTS"
|
||||||
self.detail = ""
|
self.detail = ""
|
||||||
self.flrgs = {}
|
self.flrgs = {}
|
||||||
|
self.method = kwargs.get("method",'fuzzy')
|
||||||
|
|
||||||
def generateFLRG(self, flrs):
|
def generate_flrg(self, flrs, **kwargs):
|
||||||
flrgs = {}
|
flrgs = {}
|
||||||
for flr in flrs:
|
for flr in flrs:
|
||||||
if flr.LHS.name in flrgs:
|
if flr.LHS.name in flrgs:
|
||||||
@ -41,9 +42,9 @@ class NonStationaryFTS(fts.FTS):
|
|||||||
else:
|
else:
|
||||||
flrgs[flr.LHS.name] = ConventionalNonStationaryFLRG(flr.LHS)
|
flrgs[flr.LHS.name] = ConventionalNonStationaryFLRG(flr.LHS)
|
||||||
flrgs[flr.LHS.name].append(flr.RHS)
|
flrgs[flr.LHS.name].append(flr.RHS)
|
||||||
return (flrgs)
|
return flrgs
|
||||||
|
|
||||||
def train(self, data, sets=None,order=1,parameters=None):
|
def train(self, data, sets=None, order=1, parameters=None):
|
||||||
|
|
||||||
if sets is not None:
|
if sets is not None:
|
||||||
self.sets = sets
|
self.sets = sets
|
||||||
@ -51,14 +52,19 @@ class NonStationaryFTS(fts.FTS):
|
|||||||
self.sets = self.partitioner.sets
|
self.sets = self.partitioner.sets
|
||||||
|
|
||||||
ndata = self.doTransformations(data)
|
ndata = self.doTransformations(data)
|
||||||
tmpdata = common.fuzzySeries(ndata, self.sets)
|
window_size = parameters if parameters is not None else 1
|
||||||
flrs = FLR.generateNonRecurrentFLRs(tmpdata)
|
tmpdata = common.fuzzySeries(ndata, self.sets, window_size, method=self.method)
|
||||||
self.flrgs = self.generateFLRG(flrs)
|
#print([k[0].name for k in tmpdata])
|
||||||
|
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
||||||
|
#print([str(k) for k in flrs])
|
||||||
|
self.flrgs = self.generate_flrg(flrs)
|
||||||
|
|
||||||
def forecast(self, data, **kwargs):
|
def forecast(self, data, **kwargs):
|
||||||
|
|
||||||
time_displacement = kwargs.get("time_displacement",0)
|
time_displacement = kwargs.get("time_displacement",0)
|
||||||
|
|
||||||
|
window_size = kwargs.get("window_size", 1)
|
||||||
|
|
||||||
ndata = np.array(self.doTransformations(data))
|
ndata = np.array(self.doTransformations(data))
|
||||||
|
|
||||||
l = len(ndata)
|
l = len(ndata)
|
||||||
@ -69,25 +75,38 @@ class NonStationaryFTS(fts.FTS):
|
|||||||
|
|
||||||
#print("input: " + str(ndata[k]))
|
#print("input: " + str(ndata[k]))
|
||||||
|
|
||||||
tdisp = k + time_displacement
|
tdisp = common.window_index(k + time_displacement, window_size)
|
||||||
|
|
||||||
affected_sets = [ [set, set.membership(ndata[k], tdisp)]
|
if self.method == 'fuzzy':
|
||||||
for set in self.sets if set.membership(ndata[k], tdisp) > 0.0]
|
affected_sets = [ [set, set.membership(ndata[k], tdisp)]
|
||||||
|
for set in self.sets if set.membership(ndata[k], tdisp) > 0.0]
|
||||||
|
elif self.method == 'maximum':
|
||||||
|
mv = [set.membership(ndata[k], tdisp) for set in self.sets]
|
||||||
|
ix = np.ravel(np.argwhere(mv == max(mv)))
|
||||||
|
affected_sets = [self.sets[x] for x in ix]
|
||||||
|
|
||||||
if len(affected_sets) == 0:
|
if len(affected_sets) == 0:
|
||||||
if self.sets[0].get_lower(tdisp) > ndata[k]:
|
if self.method == 'fuzzy':
|
||||||
affected_sets.append([self.sets[0], 1.0])
|
affected_sets.append([common.check_bounds(ndata[k], self.sets, tdisp), 1.0])
|
||||||
elif self.sets[-1].get_upper(tdisp) < ndata[k]:
|
else:
|
||||||
affected_sets.append([self.sets[-1], 1.0])
|
affected_sets.append(common.check_bounds(ndata[k], self.sets, tdisp))
|
||||||
|
|
||||||
#print(affected_sets)
|
#print(affected_sets)
|
||||||
|
|
||||||
tmp = []
|
tmp = []
|
||||||
for aset in affected_sets:
|
|
||||||
if aset[0] in self.flrgs:
|
if len(affected_sets) == 1 and self.method == 'fuzzy':
|
||||||
tmp.append(self.flrgs[aset[0].name].get_midpoint(tdisp) * aset[1])
|
tmp.append(affected_sets[0][0].get_midpoint(tdisp))
|
||||||
else:
|
else:
|
||||||
tmp.append(aset[0].get_midpoint(tdisp) * aset[1])
|
for aset in affected_sets:
|
||||||
|
if self.method == 'fuzzy':
|
||||||
|
if aset[0].name in self.flrgs:
|
||||||
|
tmp.append(self.flrgs[aset[0].name].get_midpoint(tdisp) * aset[1])
|
||||||
|
elif self.method == 'maximum':
|
||||||
|
if aset.name in self.flrgs:
|
||||||
|
tmp.append(self.flrgs[aset.name].get_midpoint(tdisp))
|
||||||
|
else:
|
||||||
|
tmp.append(aset.get_midpoint(tdisp))
|
||||||
|
|
||||||
pto = sum(tmp)
|
pto = sum(tmp)
|
||||||
|
|
||||||
@ -103,6 +122,8 @@ class NonStationaryFTS(fts.FTS):
|
|||||||
|
|
||||||
time_displacement = kwargs.get("time_displacement",0)
|
time_displacement = kwargs.get("time_displacement",0)
|
||||||
|
|
||||||
|
window_size = kwargs.get("window_size", 1)
|
||||||
|
|
||||||
ndata = np.array(self.doTransformations(data))
|
ndata = np.array(self.doTransformations(data))
|
||||||
|
|
||||||
l = len(ndata)
|
l = len(ndata)
|
||||||
@ -111,7 +132,7 @@ class NonStationaryFTS(fts.FTS):
|
|||||||
|
|
||||||
for k in np.arange(0, l):
|
for k in np.arange(0, l):
|
||||||
|
|
||||||
tdisp = k + time_displacement
|
tdisp = common.window_index(k + time_displacement, window_size)
|
||||||
|
|
||||||
affected_sets = [ [set.name, set.membership(ndata[k], tdisp)]
|
affected_sets = [ [set.name, set.membership(ndata[k], tdisp)]
|
||||||
for set in self.sets if set.membership(ndata[k], tdisp) > 0.0]
|
for set in self.sets if set.membership(ndata[k], tdisp) > 0.0]
|
||||||
|
@ -6,29 +6,48 @@ import matplotlib.pyplot as plt
|
|||||||
from pyFTS.common import Membership, Util
|
from pyFTS.common import Membership, Util
|
||||||
|
|
||||||
|
|
||||||
def plot_sets(uod, sets, start=0, end=10, tam=[5, 5], colors=None, save=False, file=None):
|
def plot_sets(sets, start=0, end=10, step=1, tam=[5, 5], colors=None,
|
||||||
|
save=False, file=None, axes=None, data=None, window_size = 1, only_lines=False):
|
||||||
|
|
||||||
|
range = np.arange(start,end,step)
|
||||||
ticks = []
|
ticks = []
|
||||||
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=tam)
|
if axes is None:
|
||||||
for t in np.arange(start,end,1):
|
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=tam)
|
||||||
for ct, set in enumerate(sets):
|
|
||||||
set.membership(0, t)
|
|
||||||
param = set.perturbated_parameters[t]
|
|
||||||
|
|
||||||
if set.mf == Membership.trimf:
|
for ct, set in enumerate(sets):
|
||||||
if t == start:
|
if not only_lines:
|
||||||
axes.plot([t, t+1, t], param, label=set.name)
|
for t in range:
|
||||||
else:
|
tdisp = t - (t % window_size)
|
||||||
axes.plot([t, t + 1, t], param)
|
set.membership(0, tdisp)
|
||||||
|
param = set.perturbated_parameters[tdisp]
|
||||||
|
|
||||||
ticks.extend(["t+"+str(t),""])
|
if set.mf == Membership.trimf:
|
||||||
|
if t == start:
|
||||||
|
line = axes.plot([t, t+1, t], param, label=set.name)
|
||||||
|
set.metadata['color'] = line[0].get_color()
|
||||||
|
else:
|
||||||
|
axes.plot([t, t + 1, t], param,c=set.metadata['color'])
|
||||||
|
|
||||||
|
ticks.extend(["t+"+str(t),""])
|
||||||
|
else:
|
||||||
|
tmp = []
|
||||||
|
for t in range:
|
||||||
|
tdisp = t - (t % window_size)
|
||||||
|
set.membership(0, tdisp)
|
||||||
|
param = set.perturbated_parameters[tdisp]
|
||||||
|
tmp.append(np.polyval(param, tdisp))
|
||||||
|
axes.plot(range, tmp, ls="--", c="blue")
|
||||||
|
|
||||||
axes.set_ylabel("Universe of Discourse")
|
axes.set_ylabel("Universe of Discourse")
|
||||||
axes.set_xlabel("Time")
|
axes.set_xlabel("Time")
|
||||||
plt.xticks([k for k in np.arange(0,end,1)], ticks, rotation='vertical')
|
plt.xticks([k for k in range], ticks, rotation='vertical')
|
||||||
|
|
||||||
handles0, labels0 = axes.get_legend_handles_labels()
|
handles0, labels0 = axes.get_legend_handles_labels()
|
||||||
lgd = axes.legend(handles0, labels0, loc=2, bbox_to_anchor=(1, 1))
|
lgd = axes.legend(handles0, labels0, loc=2, bbox_to_anchor=(1, 1))
|
||||||
|
|
||||||
|
if data is not None:
|
||||||
|
axes.plot(np.arange(start, start + len(data), 1), data,c="black")
|
||||||
|
|
||||||
plt.tight_layout()
|
plt.tight_layout()
|
||||||
|
|
||||||
Util.showAndSaveImage(fig, file, save)
|
Util.showAndSaveImage(fig, file, save)
|
||||||
|
@ -79,4 +79,7 @@ class Partitioner(object):
|
|||||||
ax.plot(tmpx, tmpy)
|
ax.plot(tmpx, tmpy)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name + ":\n ".join([str(a) + "\n" for a in self.sets])
|
tmp = self.name + ":\n"
|
||||||
|
for a in self.sets:
|
||||||
|
tmp += str(a)+ "\n"
|
||||||
|
return tmp
|
||||||
|
@ -127,9 +127,9 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
||||||
self.flrgs = self.generateFLRG(flrs)
|
self.flrgs = self.generateFLRG(flrs)
|
||||||
else:
|
else:
|
||||||
self.flrgs = self.generateFLRGfuzzy(data)
|
self.flrgs = self.generate_flrg(data)
|
||||||
|
|
||||||
def generateFLRGfuzzy(self, data):
|
def generate_flrg(self, data):
|
||||||
flrgs = {}
|
flrgs = {}
|
||||||
l = len(data)
|
l = len(data)
|
||||||
for k in np.arange(self.order, l):
|
for k in np.arange(self.order, l):
|
||||||
@ -175,7 +175,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
|
|
||||||
self.global_frequency_count = self.global_frequency_count + tmp_fq
|
self.global_frequency_count = self.global_frequency_count + tmp_fq
|
||||||
|
|
||||||
return (flrgs)
|
return flrgs
|
||||||
|
|
||||||
def generateFLRG(self, flrs):
|
def generateFLRG(self, flrs):
|
||||||
flrgs = {}
|
flrgs = {}
|
||||||
@ -274,18 +274,6 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
ret = sum(np.array([pi * s.lower for s in flrg.LHS]))
|
ret = sum(np.array([pi * s.lower for s in flrg.LHS]))
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def build_tree_without_order(self, node, lags, level):
|
|
||||||
|
|
||||||
if level not in lags:
|
|
||||||
return
|
|
||||||
|
|
||||||
for s in lags[level]:
|
|
||||||
node.appendChild(tree.FLRGTreeNode(s))
|
|
||||||
|
|
||||||
for child in node.getChildren():
|
|
||||||
self.build_tree_without_order(child, lags, level + 1)
|
|
||||||
|
|
||||||
|
|
||||||
def forecast(self, data, **kwargs):
|
def forecast(self, data, **kwargs):
|
||||||
|
|
||||||
ndata = np.array(self.doTransformations(data))
|
ndata = np.array(self.doTransformations(data))
|
||||||
@ -299,19 +287,17 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
# print(k)
|
# print(k)
|
||||||
|
|
||||||
affected_flrgs = []
|
affected_flrgs = []
|
||||||
affected_rhs = []
|
|
||||||
affected_flrgs_memberships = []
|
affected_flrgs_memberships = []
|
||||||
norms = []
|
norms = []
|
||||||
|
|
||||||
mp = []
|
mp = []
|
||||||
|
|
||||||
# Find the sets which membership > 0 for each lag
|
# Find the sets which membership > 0 for each lag
|
||||||
count = 0
|
|
||||||
lags = {}
|
lags = {}
|
||||||
if self.order > 1:
|
if self.order > 1:
|
||||||
subset = ndata[k - (self.order - 1): k + 1]
|
subset = ndata[k - (self.order - 1): k + 1]
|
||||||
|
|
||||||
for instance in subset:
|
for count, instance in enumerate(subset):
|
||||||
mb = FuzzySet.fuzzyInstance(instance, self.sets)
|
mb = FuzzySet.fuzzyInstance(instance, self.sets)
|
||||||
tmp = np.argwhere(mb)
|
tmp = np.argwhere(mb)
|
||||||
idx = np.ravel(tmp) # flatten the array
|
idx = np.ravel(tmp) # flatten the array
|
||||||
@ -325,7 +311,6 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
raise Exception(instance)
|
raise Exception(instance)
|
||||||
|
|
||||||
lags[count] = idx
|
lags[count] = idx
|
||||||
count = count + 1
|
|
||||||
|
|
||||||
# Build the tree with all possible paths
|
# Build the tree with all possible paths
|
||||||
|
|
||||||
@ -346,7 +331,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
affected_flrgs.append(flrg)
|
affected_flrgs.append(flrg)
|
||||||
|
|
||||||
# Find the general membership of FLRG
|
# Find the general membership of FLRG
|
||||||
affected_flrgs_memberships.append(min(self.get_sequence_membership(subset, flrg.LHS)))
|
affected_flrgs_memberships.append(flrg.get_membership())
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pyFTS.common import Membership
|
from pyFTS.common import Membership
|
||||||
from pyFTS.nonstationary import common,perturbation,util,nsfts
|
from pyFTS.nonstationary import common,perturbation,util,nsfts, honsfts
|
||||||
from pyFTS.partitioners import Grid
|
from pyFTS.partitioners import Grid
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
@ -50,27 +50,33 @@ print(tmp)
|
|||||||
passengers = pd.read_csv("DataSets/AirPassengers.csv", sep=",")
|
passengers = pd.read_csv("DataSets/AirPassengers.csv", sep=",")
|
||||||
passengers = np.array(passengers["Passengers"])
|
passengers = np.array(passengers["Passengers"])
|
||||||
|
|
||||||
ts = 80
|
ts = 100
|
||||||
|
ws=12
|
||||||
|
|
||||||
trainp = passengers[:ts]
|
trainp = passengers[:ts]
|
||||||
testp = passengers[ts:]
|
testp = passengers[ts:]
|
||||||
|
|
||||||
tmp_fsp = Grid.GridPartitioner(trainp[:50], 10)
|
tmp_fsp = Grid.GridPartitioner(trainp[:ws], 15)
|
||||||
|
|
||||||
fsp = common.PolynomialNonStationaryPartitioner(trainp, tmp_fsp, window_size=20, degree=1)
|
fsp = common.PolynomialNonStationaryPartitioner(trainp, tmp_fsp, window_size=ws, degree=1)
|
||||||
|
|
||||||
nsftsp = nsfts.NonStationaryFTS("", partitioner=fsp)
|
|
||||||
|
|
||||||
nsftsp.train(trainp[:50])
|
#nsftsp = honsfts.HighOrderNonStationaryFTS("", partitioner=fsp)
|
||||||
|
nsftsp = nsfts.NonStationaryFTS("", partitioner=fsp, method='fuzzy')
|
||||||
|
|
||||||
|
#nsftsp.train(trainp, order=1, parameters=ws)
|
||||||
|
|
||||||
print(fsp)
|
print(fsp)
|
||||||
|
|
||||||
print(nsftsp)
|
#print(nsftsp)
|
||||||
|
|
||||||
tmpp = nsftsp.forecast(testp, time_displacement=ts)
|
#tmpp = nsftsp.forecast(passengers[55:65], time_displacement=55, window_size=ws)
|
||||||
|
|
||||||
print(testp)
|
#print(passengers[100:120])
|
||||||
print(tmpp)
|
#print(tmpp)
|
||||||
|
|
||||||
|
#util.plot_sets(fsp.sets,tam=[10, 5], start=0, end=100, step=2, data=passengers[:100],
|
||||||
|
# window_size=ws, only_lines=False)
|
||||||
|
|
||||||
#fig, axes = plt.subplots(nrows=1, ncols=1, figsize=[15,5])
|
#fig, axes = plt.subplots(nrows=1, ncols=1, figsize=[15,5])
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user