diff --git a/pyFTS/common/fts.py b/pyFTS/common/fts.py index 5168038..62e269a 100644 --- a/pyFTS/common/fts.py +++ b/pyFTS/common/fts.py @@ -223,13 +223,11 @@ class FTS(object): :return: a list with the forecasted values """ - - if isinstance(data, np.ndarray): data = data.tolist() ret = [] - for k in np.arange(0,steps): + for k in np.arange(0, steps): tmp = self.forecast(data[-self.max_lag:], **kwargs) if isinstance(tmp,(list, np.ndarray)): diff --git a/pyFTS/models/ensemble/ensemble.py b/pyFTS/models/ensemble/ensemble.py index af65b31..07cd555 100644 --- a/pyFTS/models/ensemble/ensemble.py +++ b/pyFTS/models/ensemble/ensemble.py @@ -135,7 +135,7 @@ class EnsembleFTS(fts.FTS): ret = [] for k in np.arange(self.order, l+1): - sample = data[k - self.order : k] + sample = data[k - self.max_lag : k] tmp = self.get_models_forecasts(sample) point = self.get_point(tmp) ret.append(point) diff --git a/pyFTS/models/hofts.py b/pyFTS/models/hofts.py index 1c203a5..e89f754 100644 --- a/pyFTS/models/hofts.py +++ b/pyFTS/models/hofts.py @@ -195,6 +195,8 @@ class HighOrderFTS(fts.FTS): if l < self.max_lag: return ndata + elif l == self.max_lag: + l += 1 for k in np.arange(self.max_lag, l): diff --git a/pyFTS/models/incremental/IncrementalEnsemble.py b/pyFTS/models/incremental/IncrementalEnsemble.py index 7d8278e..2ff5041 100644 --- a/pyFTS/models/incremental/IncrementalEnsemble.py +++ b/pyFTS/models/incremental/IncrementalEnsemble.py @@ -1,17 +1,19 @@ ''' -Incremental Ensemble of FTS methods +Time Variant/Incremental Ensemble of FTS methods ''' import numpy as np import pandas as pd from pyFTS.common import FuzzySet, FLR, fts, flrg +from pyFTS.partitioners import Grid +from pyFTS.models import hofts from pyFTS.models.ensemble import ensemble class IncrementalEnsembleFTS(ensemble.EnsembleFTS): """ - Ensemble FTS + Time Variant/Incremental Ensemble of FTS methods """ def __init__(self, **kwargs): super(IncrementalEnsembleFTS, self).__init__(**kwargs) @@ -20,16 +22,12 @@ class IncrementalEnsembleFTS(ensemble.EnsembleFTS): self.order = kwargs.get('order',1) - self.order = kwargs.get('order', 1) - self.partitioner_method = kwargs.get('partitioner_method', Grid.GridPartitioner) """The partitioner method to be called when a new model is build""" self.partitioner_params = kwargs.get('partitioner_params', {'npart': 10}) """The partitioner method parameters""" - self.partitioner = None - """The most recent trained partitioner""" - self.fts_method = kwargs.get('fts_method', None) + self.fts_method = kwargs.get('fts_method', hofts.WeightedHighOrderFTS) """The FTS method to be called when a new model is build""" self.fts_params = kwargs.get('fts_params', {}) """The FTS method specific parameters""" @@ -39,19 +37,51 @@ class IncrementalEnsembleFTS(ensemble.EnsembleFTS): self.batch_size = kwargs.get('batch_size', 10) """The batch interval between each retraining""" + self.is_high_order = True self.uod_clip = False - self.max_lag = self.window_length + self.max_lag + #self.max_lag = self.window_length + self.max_lag def train(self, data, **kwargs): - self.partitioner = self.partitioner_method(data=data, **self.partitioner_params) - self.model = self.fts_method(partitioner=self.partitioner, **self.fts_params) + partitioner = self.partitioner_method(data=data, **self.partitioner_params) + model = self.fts_method(partitioner=partitioner, **self.fts_params) if self.model.is_high_order: - self.model.order = self.model = self.fts_method(partitioner=self.partitioner, - order=self.order, **self.fts_params) - self.model.fit(data, **kwargs) - self.shortname = self.model.shortname + self.model = self.fts_method(partitioner=partitioner, order=self.order, **self.fts_params) + model.fit(data, **kwargs) + self.models.pop(0) + self.models.append(model) + + def _point_smoothing(self, forecasts): + l = len(self.models) + + ret = np.nansum([np.exp(-(l-k)) * forecasts[k] for k in range(l)]) + + return ret + + def forecast(self, data, **kwargs): + l = len(data) + + data_window = [] + + ret = [] + + for k in np.arange(self.max_lag, l): + + data_window.append(data[k - self.max_lag]) + + if k >= self.window_length: + data_window.pop(0) + + if k % self.batch_size == 0 and k >= self.window_length: + self.train(data_window, **kwargs) + + sample = data[k - self.max_lag: k] + tmp = self.get_models_forecasts(sample) + point = self._point_smoothing(tmp) + ret.append(point) + + return ret diff --git a/pyFTS/tests/general.py b/pyFTS/tests/general.py index d529f2c..c076458 100644 --- a/pyFTS/tests/general.py +++ b/pyFTS/tests/general.py @@ -31,7 +31,4 @@ y = [np.sin(k) for k in x] part = Grid.GridPartitioner(data=y, npart=35) model = hofts.HighOrderFTS(order=2, partitioner=part) model.fit(y) -forecasts = model.predict(y) - -print([round(k,2) for k in y[2:]]) -print([round(k,2) for k in forecasts[:-1]]) \ No newline at end of file +forecasts = model.predict(y, steps_ahead=10)