CVFTS improvements on perturbation_factors

This commit is contained in:
Petrônio Cândido 2018-06-18 12:44:46 -03:00
parent bd4c0c432e
commit 74debe42ee
5 changed files with 115 additions and 21 deletions

View File

@ -58,7 +58,6 @@ class FuzzySet(FS.FuzzySet):
if self.location is None:
inc = t
else:
l = len(self.location)
inc = sum([self.location[k](t + self.location_roots[k], self.location_params[k]) for k in np.arange(0, l)])

View File

@ -30,6 +30,7 @@ class HighOrderNonstationaryFLRG(hofts.HighOrderFTS):
def __len__(self):
return len(self.RHS)
class ConditionalVarianceFTS(hofts.HighOrderFTS):
def __init__(self, **kwargs):
super(ConditionalVarianceFTS, self).__init__(**kwargs)
@ -45,6 +46,12 @@ class ConditionalVarianceFTS(hofts.HighOrderFTS):
self.uod_clip = False
self.order = 1
self.min_order = 1
self.inputs = []
self.forecasts = []
self.residuals = []
self.variance_residual = 0.
self.mean_residual = 0.
self.memory_window = kwargs.get("memory_window",5)
def train(self, ndata, **kwargs):
@ -52,6 +59,21 @@ class ConditionalVarianceFTS(hofts.HighOrderFTS):
flrs = FLR.generate_non_recurrent_flrs(tmpdata)
self.generate_flrg(flrs)
self.forecasts = self.forecast(ndata, no_update=True)
self.residuals = np.array(ndata[1:]) - np.array(self.forecasts[:-1])
self.variance_residual = np.var(self.residuals) # np.max(self.residuals
self.mean_residual = np.mean(self.residuals)
self.residuals = self.residuals[-self.memory_window:].tolist()
self.forecasts = self.forecasts[-self.memory_window:]
self.inputs = np.array(ndata[-self.memory_window:]).tolist()
print(self.mean_residual)
print(self.variance_residual)
print([self.original_min,self.original_max])
def generate_flrg(self, flrs, **kwargs):
for flr in flrs:
if flr.LHS.name in self.flrgs:
@ -64,7 +86,38 @@ class ConditionalVarianceFTS(hofts.HighOrderFTS):
def _smooth(self, a):
return .1 * a[0] + .3 * a[1] + .6 * a[2]
def perturbation_factors(self, data):
def perturbation_factors(self, data, **kwargs):
_max = 0
_min = 0
if data < self.original_min:
_min = data - self.original_min if data < 0 else self.original_min - data
elif data > self.original_max:
_max = data - self.original_max if data > 0 else self.original_max - data
self.min_stack.pop(2)
self.min_stack.insert(0, _min)
_min = min(self.min_stack)
self.max_stack.pop(2)
self.max_stack.insert(0, _max)
_max = max(self.max_stack)
_range = (_max - _min)/2
translate = np.linspace(_min, _max, self.partitioner.partitions)
var = np.std(self.residuals)
var = 0 if var < 1 else var
loc = (self.mean_residual + np.mean(self.residuals))
location = [_range + w + loc + k for k in np.linspace(-var,var) for w in translate]
perturb = [[location[k], var] for k in np.arange(0, self.partitioner.partitions)]
return perturb
def perturbation_factors__old(self, data):
_max = 0
_min = 0
if data < self.original_min:
@ -107,39 +160,59 @@ class ConditionalVarianceFTS(hofts.HighOrderFTS):
ret = []
no_update = kwargs.get("no_update",False)
for k in np.arange(0, l):
sample = ndata[k]
if not no_update:
perturb = self.perturbation_factors(sample)
else:
perturb = [[0, 1] for k in np.arange(0, self.partitioner.partitions)]
affected_sets = self._affected_sets(sample, perturb)
tmp = []
numerator = []
denominator = []
if len(affected_sets) == 1:
ix = affected_sets[0][0]
aset = self.partitioner.ordered_sets[ix]
if aset in self.flrgs:
tmp.append(self.flrgs[aset].get_midpoint(perturb[ix]))
numerator.append(self.flrgs[aset].get_midpoint(perturb[ix]))
else:
fuzzy_set = self.sets[aset]
tmp.append(fuzzy_set.get_midpoint(perturb[ix]))
numerator.append(fuzzy_set.get_midpoint(perturb[ix]))
denominator.append(1)
else:
for aset in affected_sets:
ix = aset[0]
fs = self.partitioner.ordered_sets[ix]
tdisp = perturb[ix]
if fs in self.flrgs:
tmp.append(self.flrgs[fs].get_midpoint(tdisp) * aset[1])
numerator.append(self.flrgs[fs].get_midpoint(tdisp) * aset[1])
else:
fuzzy_set = self.sets[fs]
tmp.append(fuzzy_set.get_midpoint(tdisp) * aset[1])
numerator.append(fuzzy_set.get_midpoint(tdisp) * aset[1])
denominator.append(aset[1])
pto = sum(tmp)
if sum(denominator) > 0:
pto = sum(numerator) /sum(denominator)
else:
pto = sum(numerator)
ret.append(pto)
if not no_update:
self.forecasts.append(pto)
self.residuals.append(self.inputs[-1] - self.forecasts[-1])
self.inputs.append(sample)
self.inputs.pop(0)
self.forecasts.pop(0)
self.residuals.pop(0)
return ret

View File

@ -62,6 +62,7 @@ def plot_sets_conditional(model, data, step=1, size=[5, 5], colors=None,
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=size)
for t in range:
model.forecast([data[t]])
perturb = model.perturbation_factors(data[t])
for ct, key in enumerate(model.partitioner.ordered_sets):

View File

@ -19,7 +19,7 @@ dataset = TAIEX.get_data()
#print(len(dataset))
from pyFTS.partitioners import Grid, Util as pUtil
partitioner = Grid.GridPartitioner(data=dataset[:800], npart=10)#, transformation=tdiff)
partitioner = Grid.GridPartitioner(data=dataset[:800], npart=10, transformation=tdiff)
from pyFTS.common import Util as cUtil
@ -28,9 +28,9 @@ from pyFTS.benchmarks import benchmarks as bchmk, Util as bUtil, Measures, knn,
from pyFTS.models import pwfts, song, chen, ifts, hofts
from pyFTS.models.ensemble import ensemble
#model = chen.ConventionalFTS(partitioner=partitioner)
model = hofts.HighOrderFTS(partitioner=partitioner,order=2)
#model.append_transformation(tdiff)
model = chen.ConventionalFTS(partitioner=partitioner)
#model = hofts.HighOrderFTS(partitioner=partitioner,order=2)
model.append_transformation(tdiff)
model.fit(dataset[:800])
cUtil.plot_rules(model, size=[20,20], rules_by_axis=5, columns=1)

View File

@ -3,7 +3,7 @@ import numpy as np
from pyFTS.common import Membership, Transformations
from pyFTS.models.nonstationary import common, perturbation, partitioners, util
from pyFTS.models.nonstationary import nsfts, cvfts
from pyFTS.partitioners import Grid
from pyFTS.partitioners import Grid, Entropy
import matplotlib.pyplot as plt
from pyFTS.common import Util as cUtil
import pandas as pd
@ -45,23 +45,44 @@ from pyFTS.common import Util
from pyFTS.data import TAIEX
taiex = TAIEX.get_data()
taiex_diff = tdiff.apply(taiex)
#taiex_diff = tdiff.apply(taiex)
train = taiex_diff[:600]
test = taiex_diff[600:1500]
train = taiex[:600]
test = taiex[600:800]
fs_tmp = Grid.GridPartitioner(data=train, npart=20) #, transformation=tdiff)
#fs_tmp = Grid.GridPartitioner(data=train, npart=7, transformation=tdiff)
#fs_tmp = Entropy.EntropyPartitioner(data=train, npart=7, transformation=tdiff)
fs_tmp = Grid.GridPartitioner(data=train, npart=20)
fs = partitioners.SimpleNonStationaryPartitioner(train, fs_tmp)
print(fs)
model = cvfts.ConditionalVarianceFTS(partitioner=fs)
model = cvfts.ConditionalVarianceFTS(partitioner=fs,memory_window=3)
model.fit(train)
print(model)
#tmpp4 = model.predict(test, type='point')
tmp = model.predict(test, type='interval')
#tmp = model.predict(test, type='interval')
#util.plot_sets_conditional(model, test, step=1, tam=[10, 5])
#util.plot_sets_conditional(model, tdiff.apply(test), step=5, size=[10, 5])
#util.plot_sets_conditional(model, test, step=5, size=[10, 5])
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=[10, 5])
axes[0].plot(test[1:], label="Test Data")
forecasts = model.predict(test, type='point')
axes[0].plot(forecasts[:-1], label="CVFTS Forecasts")
handles0, labels0 = axes[0].get_legend_handles_labels()
lgd = axes[0].legend(handles0, labels0, loc=2)
residuals = np.array(test[1:]) - np.array(forecasts[:-1])
axes[1].plot(residuals)
axes[1].set_title("Residuals")
print("fim")