Refactoring to centralize the apply_transformations and apply_transformations_inverse inside fts.predict method
This commit is contained in:
parent
581f404b18
commit
50c2b501b1
@ -70,6 +70,11 @@ class FTS(object):
|
||||
:return: a numpy array with the forecasted data
|
||||
"""
|
||||
|
||||
if self.is_multivariate:
|
||||
ndata = data
|
||||
else:
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
if 'distributed' in kwargs:
|
||||
distributed = kwargs.pop('distributed')
|
||||
else:
|
||||
@ -85,17 +90,17 @@ class FTS(object):
|
||||
steps_ahead = kwargs.get("steps_ahead", None)
|
||||
|
||||
if type == 'point' and steps_ahead == None:
|
||||
return self.forecast(data, **kwargs)
|
||||
ret = self.forecast(ndata, **kwargs)
|
||||
elif type == 'point' and steps_ahead != None:
|
||||
return self.forecast_ahead(data, steps_ahead, **kwargs)
|
||||
ret = self.forecast_ahead(ndata, steps_ahead, **kwargs)
|
||||
elif type == 'interval' and steps_ahead == None:
|
||||
return self.forecast_interval(data, **kwargs)
|
||||
ret = self.forecast_interval(ndata, **kwargs)
|
||||
elif type == 'interval' and steps_ahead != None:
|
||||
return self.forecast_ahead_interval(data, steps_ahead, **kwargs)
|
||||
ret = self.forecast_ahead_interval(ndata, steps_ahead, **kwargs)
|
||||
elif type == 'distribution' and steps_ahead == None:
|
||||
return self.forecast_distribution(data, **kwargs)
|
||||
ret = self.forecast_distribution(ndata, **kwargs)
|
||||
elif type == 'distribution' and steps_ahead != None:
|
||||
return self.forecast_ahead_distribution(data, steps_ahead, **kwargs)
|
||||
ret = self.forecast_ahead_distribution(ndata, steps_ahead, **kwargs)
|
||||
else:
|
||||
raise ValueError('The argument \'type\' has an unknown value.')
|
||||
|
||||
@ -104,7 +109,13 @@ class FTS(object):
|
||||
nodes = kwargs.get("nodes", ['127.0.0.1'])
|
||||
num_batches = kwargs.get('num_batches', 10)
|
||||
|
||||
return Util.distributed_predict(self, kwargs, nodes, data, num_batches)
|
||||
ret = Util.distributed_predict(self, kwargs, nodes, ndata, num_batches)
|
||||
|
||||
if type != 'distribution' and not self.is_multivariate:
|
||||
interval = True if type == 'interval' else False
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]], interval=interval)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
@ -185,7 +196,7 @@ class FTS(object):
|
||||
"""
|
||||
pass
|
||||
|
||||
def fit(self, data, **kwargs):
|
||||
def fit(self, ndata, **kwargs):
|
||||
"""
|
||||
|
||||
:param data: the training time series
|
||||
@ -204,6 +215,11 @@ class FTS(object):
|
||||
|
||||
import datetime
|
||||
|
||||
if self.is_multivariate:
|
||||
data = ndata
|
||||
else:
|
||||
data = self.apply_transformations(ndata)
|
||||
|
||||
dump = kwargs.get('dump', None)
|
||||
|
||||
num_batches = kwargs.get('num_batches', None)
|
||||
@ -245,11 +261,11 @@ class FTS(object):
|
||||
if dump == 'time':
|
||||
print("[{0: %H:%M:%S}] Starting batch ".format(datetime.datetime.now()) + str(bcount))
|
||||
if self.is_multivariate:
|
||||
ndata = data.iloc[ct - self.order:ct + batch_size]
|
||||
mdata = data.iloc[ct - self.order:ct + batch_size]
|
||||
else:
|
||||
ndata = data[ct - self.order : ct + batch_size]
|
||||
mdata = data[ct - self.order : ct + batch_size]
|
||||
|
||||
self.train(ndata, **kwargs)
|
||||
self.train(mdata, **kwargs)
|
||||
|
||||
if batch_save:
|
||||
Util.persist_obj(self,file_path)
|
||||
|
@ -15,7 +15,7 @@ class ConventionalFLRG(flrg.FLRG):
|
||||
self.LHS = LHS
|
||||
self.RHS = set()
|
||||
|
||||
def get_key(self):
|
||||
def get_key(self, sets):
|
||||
return sets[self.LHS].name
|
||||
|
||||
def append_rhs(self, c, **kwargs):
|
||||
@ -50,14 +50,11 @@ class ConventionalFTS(fts.FTS):
|
||||
def train(self, data, **kwargs):
|
||||
if kwargs.get('sets', None) is not None:
|
||||
self.sets = kwargs.get('sets', None)
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, self.sets)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(data, self.sets)
|
||||
flrs = FLR.generate_non_recurrent_flrs(tmpdata)
|
||||
self.generate_flrg(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
def forecast(self, ndata, **kwargs):
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -76,6 +73,4 @@ class ConventionalFTS(fts.FTS):
|
||||
|
||||
ret.append(_flrg.get_midpoint(self.sets))
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data])
|
||||
|
||||
return ret
|
||||
|
@ -93,8 +93,6 @@ class HighOrderFTS(fts.FTS):
|
||||
|
||||
def train(self, data, **kwargs):
|
||||
|
||||
data = self.apply_transformations(data, updateUoD=True)
|
||||
|
||||
self.order = kwargs.get('order',2)
|
||||
|
||||
if kwargs.get('sets', None) is not None:
|
||||
@ -102,16 +100,14 @@ class HighOrderFTS(fts.FTS):
|
||||
|
||||
self.generate_flrg(data)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
def forecast(self, ndata, **kwargs):
|
||||
|
||||
ret = []
|
||||
|
||||
l = len(data)
|
||||
l = len(ndata)
|
||||
|
||||
if l <= self.order:
|
||||
return data
|
||||
|
||||
ndata = self.apply_transformations(data)
|
||||
return ndata
|
||||
|
||||
for k in np.arange(self.order, l+1):
|
||||
flrgs = self.generate_lhs_flrg(ndata[k - self.order: k])
|
||||
@ -126,6 +122,4 @@ class HighOrderFTS(fts.FTS):
|
||||
|
||||
ret.append(np.nanmean(tmp))
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
@ -18,12 +18,10 @@ class HighOrderFTS(fts.FTS):
|
||||
self.shortname = "Hwang" + name
|
||||
self.detail = "Hwang"
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
def forecast(self, ndata, **kwargs):
|
||||
|
||||
ordered_sets = FuzzySet.set_ordered(self.sets)
|
||||
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
l = len(self.sets)
|
||||
|
||||
cn = np.array([0.0 for k in range(l)])
|
||||
@ -52,8 +50,6 @@ class HighOrderFTS(fts.FTS):
|
||||
count += 1.0
|
||||
ret.append(out / count)
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
||||
def train(self, data, **kwargs):
|
||||
|
@ -44,16 +44,14 @@ class IntervalFTS(hofts.HighOrderFTS):
|
||||
return mb
|
||||
|
||||
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
def forecast_interval(self, ndata, **kwargs):
|
||||
|
||||
ret = []
|
||||
|
||||
l = len(data)
|
||||
l = len(ndata)
|
||||
|
||||
if l <= self.order:
|
||||
return data
|
||||
|
||||
ndata = self.apply_transformations(data)
|
||||
return ndata
|
||||
|
||||
for k in np.arange(self.order, l+1):
|
||||
|
||||
@ -78,6 +76,4 @@ class IntervalFTS(hofts.HighOrderFTS):
|
||||
up_ = sum(up) / norm
|
||||
ret.append([lo_, up_])
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]], interval=True)
|
||||
|
||||
return ret
|
||||
|
@ -60,24 +60,20 @@ class ImprovedWeightedFTS(fts.FTS):
|
||||
self.flrgs[flr.LHS] = ImprovedWeightedFLRG(flr.LHS);
|
||||
self.flrgs[flr.LHS].append_rhs(flr.RHS)
|
||||
|
||||
def train(self, data, **kwargs):
|
||||
def train(self, ndata, **kwargs):
|
||||
if kwargs.get('sets', None) is not None:
|
||||
self.sets = kwargs.get('sets', None)
|
||||
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
tmpdata = FuzzySet.fuzzyfy_series(ndata, self.sets, method="maximum")
|
||||
flrs = FLR.generate_recurrent_flrs(tmpdata)
|
||||
self.generate_flrg(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
def forecast(self, ndata, **kwargs):
|
||||
l = 1
|
||||
|
||||
ordered_sets = FuzzySet.set_ordered(self.sets)
|
||||
|
||||
data = np.array(data)
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
ndata = np.array(ndata)
|
||||
l = len(ndata)
|
||||
|
||||
ret = []
|
||||
@ -94,6 +90,4 @@ class ImprovedWeightedFTS(fts.FTS):
|
||||
|
||||
ret.append(mp.dot(flrg.weights()))
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data])
|
||||
|
||||
return ret
|
||||
|
@ -22,12 +22,10 @@ class ConditionalVarianceFTS(chen.ConventionalFTS):
|
||||
self.min_stack = [0,0,0]
|
||||
self.max_stack = [0,0,0]
|
||||
|
||||
def train(self, data, **kwargs):
|
||||
def train(self, ndata, **kwargs):
|
||||
if kwargs.get('sets', None) is not None:
|
||||
self.sets = kwargs.get('sets', None)
|
||||
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
self.min_tx = min(ndata)
|
||||
self.max_tx = max(ndata)
|
||||
|
||||
@ -84,9 +82,7 @@ class ConditionalVarianceFTS(chen.ConventionalFTS):
|
||||
|
||||
return affected_sets
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
def forecast(self, ndata, **kwargs):
|
||||
l = len(ndata)
|
||||
|
||||
ret = []
|
||||
@ -123,14 +119,10 @@ class ConditionalVarianceFTS(chen.ConventionalFTS):
|
||||
|
||||
ret.append(pto)
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
def forecast_interval(self, ndata, **kwargs):
|
||||
l = len(ndata)
|
||||
|
||||
ret = []
|
||||
@ -171,6 +163,4 @@ class ConditionalVarianceFTS(chen.ConventionalFTS):
|
||||
|
||||
ret.append(itvl)
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
@ -1,7 +1,7 @@
|
||||
import numpy as np
|
||||
from pyFTS.common import FuzzySet, FLR, fts, tree
|
||||
from pyFTS.models import hofts
|
||||
from pyFTS.nonstationary import common, flrg
|
||||
from pyFTS.models.nonstationary import common, flrg
|
||||
|
||||
|
||||
class HighOrderNonStationaryFLRG(flrg.NonStationaryFLRG):
|
||||
@ -90,11 +90,8 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
||||
if kwargs.get('sets', None) is not None:
|
||||
self.sets = kwargs.get('sets', None)
|
||||
|
||||
ndata = self.apply_transformations(data)
|
||||
#tmpdata = common.fuzzyfy_series_old(ndata, self.sets)
|
||||
#flrs = FLR.generate_recurrent_flrs(ndata)
|
||||
window_size = kwargs.get('parameters', 1)
|
||||
self.generate_flrg(ndata, window_size=window_size)
|
||||
self.generate_flrg(data, window_size=window_size)
|
||||
|
||||
def _affected_flrgs(self, sample, k, time_displacement, window_size):
|
||||
# print("input: " + str(ndata[k]))
|
||||
@ -155,14 +152,12 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
||||
|
||||
return [affected_flrgs, affected_flrgs_memberships]
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
def forecast(self, ndata, **kwargs):
|
||||
|
||||
time_displacement = kwargs.get("time_displacement",0)
|
||||
|
||||
window_size = kwargs.get("window_size", 1)
|
||||
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
ret = []
|
||||
@ -201,18 +196,14 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
||||
|
||||
ret.append(pto)
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
def forecast_interval(self, ndata, **kwargs):
|
||||
|
||||
time_displacement = kwargs.get("time_displacement", 0)
|
||||
|
||||
window_size = kwargs.get("window_size", 1)
|
||||
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
ret = []
|
||||
@ -259,6 +250,4 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
||||
ret.append([sum(lower), sum(upper)])
|
||||
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
@ -49,30 +49,23 @@ class NonStationaryFTS(fts.FTS):
|
||||
if kwargs.get('sets', None) is not None:
|
||||
self.sets = kwargs.get('sets', None)
|
||||
|
||||
ndata = self.apply_transformations(data)
|
||||
window_size = kwargs.get('parameters', 1)
|
||||
tmpdata = common.fuzzySeries(ndata, self.sets, window_size, method=self.method)
|
||||
#print([k[0].name for k in tmpdata])
|
||||
tmpdata = common.fuzzySeries(data, self.sets, window_size, method=self.method)
|
||||
flrs = FLR.generate_recurrent_flrs(tmpdata)
|
||||
#print([str(k) for k in flrs])
|
||||
self.generate_flrg(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
def forecast(self, ndata, **kwargs):
|
||||
|
||||
time_displacement = kwargs.get("time_displacement",0)
|
||||
|
||||
window_size = kwargs.get("window_size", 1)
|
||||
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
ret = []
|
||||
|
||||
for k in np.arange(0, l):
|
||||
|
||||
#print("input: " + str(ndata[k]))
|
||||
|
||||
tdisp = common.window_index(k + time_displacement, window_size)
|
||||
|
||||
if self.method == 'fuzzy':
|
||||
@ -89,8 +82,6 @@ class NonStationaryFTS(fts.FTS):
|
||||
else:
|
||||
affected_sets.append(common.check_bounds(ndata[k], self.sets, tdisp))
|
||||
|
||||
#print(affected_sets)
|
||||
|
||||
tmp = []
|
||||
|
||||
if len(affected_sets) == 1 and self.method == 'fuzzy':
|
||||
@ -118,18 +109,14 @@ class NonStationaryFTS(fts.FTS):
|
||||
|
||||
ret.append(pto)
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
def forecast_interval(self, ndata, **kwargs):
|
||||
|
||||
time_displacement = kwargs.get("time_displacement", 0)
|
||||
|
||||
window_size = kwargs.get("window_size", 1)
|
||||
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
ret = []
|
||||
@ -179,6 +166,4 @@ class NonStationaryFTS(fts.FTS):
|
||||
|
||||
ret.append([sum(lower), sum(upper)])
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
@ -266,9 +266,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
else:
|
||||
raise Exception("Unknown point forecasting method!")
|
||||
|
||||
def point_heuristic(self, data, **kwargs):
|
||||
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
def point_heuristic(self, ndata, **kwargs):
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -298,8 +296,6 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
if self.auto_update and k > self.order+1: self.update_model(ndata[k - self.order - 1 : k])
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
||||
def point_expected_value(self, data, **kwargs):
|
||||
@ -314,11 +310,9 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
ret.append(tmp)
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
def forecast_interval(self, ndata, **kwargs):
|
||||
|
||||
if 'method' in kwargs:
|
||||
self.interval_method = kwargs.get('method','heuristic')
|
||||
@ -326,8 +320,6 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
if 'alpha' in kwargs:
|
||||
self.alpha = kwargs.get('alpha', 0.05)
|
||||
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
ret = []
|
||||
@ -339,15 +331,12 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
else:
|
||||
self.interval_quantile(k, ndata, ret)
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]], interval=True)
|
||||
|
||||
return ret
|
||||
|
||||
def interval_quantile(self, k, ndata, ret):
|
||||
dist = self.forecast_distribution(ndata)
|
||||
lo_qt = dist[0].quantile(self.alpha)
|
||||
up_qt = dist[0].quantile(1.0 - self.alpha)
|
||||
ret.append([lo_qt, up_qt])
|
||||
itvl = dist[0].quantile([self.alpha, 1.0 - self.alpha])
|
||||
ret.append(itvl)
|
||||
|
||||
def interval_heuristic(self, k, ndata, ret):
|
||||
|
||||
@ -375,14 +364,10 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
up_ = sum(up) / norm
|
||||
ret.append([lo_, up_])
|
||||
|
||||
def forecast_distribution(self, data, **kwargs):
|
||||
|
||||
if not isinstance(data, (list, set, np.ndarray)):
|
||||
data = [data]
|
||||
def forecast_distribution(self, ndata, **kwargs):
|
||||
|
||||
smooth = kwargs.get("smooth", "none")
|
||||
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
l = len(ndata)
|
||||
uod = self.get_UoD()
|
||||
|
||||
@ -457,14 +442,12 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
return ret
|
||||
|
||||
def forecast_ahead_distribution(self, data, steps, **kwargs):
|
||||
def forecast_ahead_distribution(self, ndata, steps, **kwargs):
|
||||
|
||||
ret = []
|
||||
|
||||
smooth = kwargs.get("smooth", "none")
|
||||
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
uod = self.get_UoD()
|
||||
|
||||
if 'bins' in kwargs:
|
||||
|
@ -69,20 +69,17 @@ class ExponentialyWeightedFTS(fts.FTS):
|
||||
self.c = kwargs.get('parameters', default_c)
|
||||
if kwargs.get('sets', None) is not None:
|
||||
self.sets = kwargs.get('sets', None)
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series(ndata, self.sets, method='maximum')
|
||||
tmpdata = FuzzySet.fuzzyfy_series(data, self.sets, method='maximum')
|
||||
flrs = FLR.generate_recurrent_flrs(tmpdata)
|
||||
self.generate_flrg(flrs, self.c)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
def forecast(self, ndata, **kwargs):
|
||||
l = 1
|
||||
|
||||
ordered_sets = FuzzySet.set_ordered(self.sets)
|
||||
|
||||
data = np.array(data)
|
||||
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
ret = []
|
||||
@ -99,6 +96,4 @@ class ExponentialyWeightedFTS(fts.FTS):
|
||||
|
||||
ret.append(mp.dot(flrg.weights()))
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data])
|
||||
|
||||
return ret
|
||||
|
@ -85,8 +85,6 @@ class ContextualMultiSeasonalFTS(sfts.SeasonalFTS):
|
||||
|
||||
ret.append(sum(mp) / len(mp))
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[ndata])
|
||||
|
||||
return ret
|
||||
|
||||
def forecast_ahead(self, data, steps, **kwargs):
|
||||
@ -98,6 +96,4 @@ class ContextualMultiSeasonalFTS(sfts.SeasonalFTS):
|
||||
|
||||
ret.append(sum(mp) / len(mp))
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=data)
|
||||
|
||||
return ret
|
||||
|
@ -52,8 +52,6 @@ class MultiSeasonalFTS(sfts.SeasonalFTS):
|
||||
|
||||
ret.append(sum(mp) / len(mp))
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[ndata])
|
||||
|
||||
return ret
|
||||
|
||||
def forecast_ahead(self, data, steps, **kwargs):
|
||||
@ -65,6 +63,4 @@ class MultiSeasonalFTS(sfts.SeasonalFTS):
|
||||
|
||||
ret.append(sum(mp) / len(mp))
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=data)
|
||||
|
||||
return ret
|
||||
|
@ -65,16 +65,13 @@ class SeasonalFTS(fts.FTS):
|
||||
def train(self, data, **kwargs):
|
||||
if kwargs.get('sets', None) is not None:
|
||||
self.sets = kwargs.get('sets', None)
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, self.sets)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(data, self.sets)
|
||||
flrs = FLR.generate_recurrent_flrs(tmpdata)
|
||||
self.generate_flrg(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
l = len(data)
|
||||
|
||||
ret = []
|
||||
|
||||
@ -88,6 +85,4 @@ class SeasonalFTS(fts.FTS):
|
||||
|
||||
ret.append(np.percentile(mp, 50))
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
@ -51,17 +51,15 @@ class ConventionalFTS(fts.FTS):
|
||||
def train(self, data, **kwargs):
|
||||
if kwargs.get('sets', None) is not None:
|
||||
self.sets = kwargs.get('sets', None)
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series(ndata, self.sets, method='maximum')
|
||||
|
||||
tmpdata = FuzzySet.fuzzyfy_series(data, self.sets, method='maximum')
|
||||
flrs = FLR.generate_non_recurrent_flrs(tmpdata)
|
||||
self.operation_matrix(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
def forecast(self, ndata, **kwargs):
|
||||
|
||||
ordered_set = FuzzySet.set_ordered(self.sets)
|
||||
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
npart = len(self.sets)
|
||||
|
||||
@ -81,8 +79,6 @@ class ConventionalFTS(fts.FTS):
|
||||
|
||||
ret.append( sum(mp)/len(mp))
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data])
|
||||
|
||||
return ret
|
||||
|
||||
def __str__(self):
|
||||
|
@ -57,23 +57,19 @@ class WeightedFTS(fts.FTS):
|
||||
self.flrgs[flr.LHS] = WeightedFLRG(flr.LHS);
|
||||
self.flrgs[flr.LHS].append_rhs(flr.RHS)
|
||||
|
||||
def train(self, data, **kwargs):
|
||||
def train(self, ndata, **kwargs):
|
||||
if kwargs.get('sets', None) is not None:
|
||||
self.sets = kwargs.get('sets', None)
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, self.sets)
|
||||
flrs = FLR.generate_recurrent_flrs(tmpdata)
|
||||
self.generate_FLRG(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
def forecast(self, ndata, **kwargs):
|
||||
|
||||
ordered_sets = FuzzySet.set_ordered(self.sets)
|
||||
|
||||
l = 1
|
||||
|
||||
data = np.array(data)
|
||||
|
||||
ndata = self.apply_transformations(data)
|
||||
ndata = np.array(ndata)
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -91,6 +87,4 @@ class WeightedFTS(fts.FTS):
|
||||
|
||||
ret.append(mp.dot(flrg.weights(self.sets)))
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data])
|
||||
|
||||
return ret
|
||||
|
Loading…
Reference in New Issue
Block a user