Bugfix on forecast_ahead

This commit is contained in:
Petrônio Cândido 2019-02-19 14:21:42 -03:00
parent b73b369ed4
commit fc1f295150
5 changed files with 49 additions and 22 deletions

View File

@ -223,8 +223,6 @@ class FTS(object):
:return: a list with the forecasted values :return: a list with the forecasted values
""" """
if isinstance(data, np.ndarray): if isinstance(data, np.ndarray):
data = data.tolist() data = data.tolist()

View File

@ -135,7 +135,7 @@ class EnsembleFTS(fts.FTS):
ret = [] ret = []
for k in np.arange(self.order, l+1): for k in np.arange(self.order, l+1):
sample = data[k - self.order : k] sample = data[k - self.max_lag : k]
tmp = self.get_models_forecasts(sample) tmp = self.get_models_forecasts(sample)
point = self.get_point(tmp) point = self.get_point(tmp)
ret.append(point) ret.append(point)

View File

@ -195,6 +195,8 @@ class HighOrderFTS(fts.FTS):
if l < self.max_lag: if l < self.max_lag:
return ndata return ndata
elif l == self.max_lag:
l += 1
for k in np.arange(self.max_lag, l): for k in np.arange(self.max_lag, l):

View File

@ -1,17 +1,19 @@
''' '''
Incremental Ensemble of FTS methods Time Variant/Incremental Ensemble of FTS methods
''' '''
import numpy as np import numpy as np
import pandas as pd import pandas as pd
from pyFTS.common import FuzzySet, FLR, fts, flrg from pyFTS.common import FuzzySet, FLR, fts, flrg
from pyFTS.partitioners import Grid
from pyFTS.models import hofts
from pyFTS.models.ensemble import ensemble from pyFTS.models.ensemble import ensemble
class IncrementalEnsembleFTS(ensemble.EnsembleFTS): class IncrementalEnsembleFTS(ensemble.EnsembleFTS):
""" """
Ensemble FTS Time Variant/Incremental Ensemble of FTS methods
""" """
def __init__(self, **kwargs): def __init__(self, **kwargs):
super(IncrementalEnsembleFTS, self).__init__(**kwargs) super(IncrementalEnsembleFTS, self).__init__(**kwargs)
@ -20,16 +22,12 @@ class IncrementalEnsembleFTS(ensemble.EnsembleFTS):
self.order = kwargs.get('order',1) self.order = kwargs.get('order',1)
self.order = kwargs.get('order', 1)
self.partitioner_method = kwargs.get('partitioner_method', Grid.GridPartitioner) self.partitioner_method = kwargs.get('partitioner_method', Grid.GridPartitioner)
"""The partitioner method to be called when a new model is build""" """The partitioner method to be called when a new model is build"""
self.partitioner_params = kwargs.get('partitioner_params', {'npart': 10}) self.partitioner_params = kwargs.get('partitioner_params', {'npart': 10})
"""The partitioner method parameters""" """The partitioner method parameters"""
self.partitioner = None
"""The most recent trained partitioner"""
self.fts_method = kwargs.get('fts_method', None) self.fts_method = kwargs.get('fts_method', hofts.WeightedHighOrderFTS)
"""The FTS method to be called when a new model is build""" """The FTS method to be called when a new model is build"""
self.fts_params = kwargs.get('fts_params', {}) self.fts_params = kwargs.get('fts_params', {})
"""The FTS method specific parameters""" """The FTS method specific parameters"""
@ -39,19 +37,51 @@ class IncrementalEnsembleFTS(ensemble.EnsembleFTS):
self.batch_size = kwargs.get('batch_size', 10) self.batch_size = kwargs.get('batch_size', 10)
"""The batch interval between each retraining""" """The batch interval between each retraining"""
self.is_high_order = True self.is_high_order = True
self.uod_clip = False self.uod_clip = False
self.max_lag = self.window_length + self.max_lag #self.max_lag = self.window_length + self.max_lag
def train(self, data, **kwargs): def train(self, data, **kwargs):
self.partitioner = self.partitioner_method(data=data, **self.partitioner_params) partitioner = self.partitioner_method(data=data, **self.partitioner_params)
self.model = self.fts_method(partitioner=self.partitioner, **self.fts_params) model = self.fts_method(partitioner=partitioner, **self.fts_params)
if self.model.is_high_order: if self.model.is_high_order:
self.model.order = self.model = self.fts_method(partitioner=self.partitioner, self.model = self.fts_method(partitioner=partitioner, order=self.order, **self.fts_params)
order=self.order, **self.fts_params) model.fit(data, **kwargs)
self.model.fit(data, **kwargs) self.models.pop(0)
self.shortname = self.model.shortname self.models.append(model)
def _point_smoothing(self, forecasts):
l = len(self.models)
ret = np.nansum([np.exp(-(l-k)) * forecasts[k] for k in range(l)])
return ret
def forecast(self, data, **kwargs):
l = len(data)
data_window = []
ret = []
for k in np.arange(self.max_lag, l):
data_window.append(data[k - self.max_lag])
if k >= self.window_length:
data_window.pop(0)
if k % self.batch_size == 0 and k >= self.window_length:
self.train(data_window, **kwargs)
sample = data[k - self.max_lag: k]
tmp = self.get_models_forecasts(sample)
point = self._point_smoothing(tmp)
ret.append(point)
return ret

View File

@ -31,7 +31,4 @@ y = [np.sin(k) for k in x]
part = Grid.GridPartitioner(data=y, npart=35) part = Grid.GridPartitioner(data=y, npart=35)
model = hofts.HighOrderFTS(order=2, partitioner=part) model = hofts.HighOrderFTS(order=2, partitioner=part)
model.fit(y) model.fit(y)
forecasts = model.predict(y) forecasts = model.predict(y, steps_ahead=10)
print([round(k,2) for k in y[2:]])
print([round(k,2) for k in forecasts[:-1]])