Bugfix on forecast_ahead

This commit is contained in:
Petrônio Cândido 2019-02-19 14:21:42 -03:00
parent b73b369ed4
commit fc1f295150
5 changed files with 49 additions and 22 deletions

View File

@ -223,8 +223,6 @@ class FTS(object):
:return: a list with the forecasted values
"""
if isinstance(data, np.ndarray):
data = data.tolist()

View File

@ -135,7 +135,7 @@ class EnsembleFTS(fts.FTS):
ret = []
for k in np.arange(self.order, l+1):
sample = data[k - self.order : k]
sample = data[k - self.max_lag : k]
tmp = self.get_models_forecasts(sample)
point = self.get_point(tmp)
ret.append(point)

View File

@ -195,6 +195,8 @@ class HighOrderFTS(fts.FTS):
if l < self.max_lag:
return ndata
elif l == self.max_lag:
l += 1
for k in np.arange(self.max_lag, l):

View File

@ -1,17 +1,19 @@
'''
Incremental Ensemble of FTS methods
Time Variant/Incremental Ensemble of FTS methods
'''
import numpy as np
import pandas as pd
from pyFTS.common import FuzzySet, FLR, fts, flrg
from pyFTS.partitioners import Grid
from pyFTS.models import hofts
from pyFTS.models.ensemble import ensemble
class IncrementalEnsembleFTS(ensemble.EnsembleFTS):
"""
Ensemble FTS
Time Variant/Incremental Ensemble of FTS methods
"""
def __init__(self, **kwargs):
super(IncrementalEnsembleFTS, self).__init__(**kwargs)
@ -20,16 +22,12 @@ class IncrementalEnsembleFTS(ensemble.EnsembleFTS):
self.order = kwargs.get('order',1)
self.order = kwargs.get('order', 1)
self.partitioner_method = kwargs.get('partitioner_method', Grid.GridPartitioner)
"""The partitioner method to be called when a new model is build"""
self.partitioner_params = kwargs.get('partitioner_params', {'npart': 10})
"""The partitioner method parameters"""
self.partitioner = None
"""The most recent trained partitioner"""
self.fts_method = kwargs.get('fts_method', None)
self.fts_method = kwargs.get('fts_method', hofts.WeightedHighOrderFTS)
"""The FTS method to be called when a new model is build"""
self.fts_params = kwargs.get('fts_params', {})
"""The FTS method specific parameters"""
@ -39,19 +37,51 @@ class IncrementalEnsembleFTS(ensemble.EnsembleFTS):
self.batch_size = kwargs.get('batch_size', 10)
"""The batch interval between each retraining"""
self.is_high_order = True
self.uod_clip = False
self.max_lag = self.window_length + self.max_lag
#self.max_lag = self.window_length + self.max_lag
def train(self, data, **kwargs):
self.partitioner = self.partitioner_method(data=data, **self.partitioner_params)
self.model = self.fts_method(partitioner=self.partitioner, **self.fts_params)
partitioner = self.partitioner_method(data=data, **self.partitioner_params)
model = self.fts_method(partitioner=partitioner, **self.fts_params)
if self.model.is_high_order:
self.model.order = self.model = self.fts_method(partitioner=self.partitioner,
order=self.order, **self.fts_params)
self.model.fit(data, **kwargs)
self.shortname = self.model.shortname
self.model = self.fts_method(partitioner=partitioner, order=self.order, **self.fts_params)
model.fit(data, **kwargs)
self.models.pop(0)
self.models.append(model)
def _point_smoothing(self, forecasts):
l = len(self.models)
ret = np.nansum([np.exp(-(l-k)) * forecasts[k] for k in range(l)])
return ret
def forecast(self, data, **kwargs):
l = len(data)
data_window = []
ret = []
for k in np.arange(self.max_lag, l):
data_window.append(data[k - self.max_lag])
if k >= self.window_length:
data_window.pop(0)
if k % self.batch_size == 0 and k >= self.window_length:
self.train(data_window, **kwargs)
sample = data[k - self.max_lag: k]
tmp = self.get_models_forecasts(sample)
point = self._point_smoothing(tmp)
ret.append(point)
return ret

View File

@ -31,7 +31,4 @@ y = [np.sin(k) for k in x]
part = Grid.GridPartitioner(data=y, npart=35)
model = hofts.HighOrderFTS(order=2, partitioner=part)
model.fit(y)
forecasts = model.predict(y)
print([round(k,2) for k in y[2:]])
print([round(k,2) for k in forecasts[:-1]])
forecasts = model.predict(y, steps_ahead=10)