Improvements and bugfixes on pwfts.forecast_interval and pwfts.point_expected_value
This commit is contained in:
parent
24c4fadc08
commit
581f404b18
@ -16,6 +16,7 @@ import numpy as np
|
|||||||
from mpl_toolkits.mplot3d import Axes3D
|
from mpl_toolkits.mplot3d import Axes3D
|
||||||
|
|
||||||
from pyFTS.probabilistic import ProbabilityDistribution
|
from pyFTS.probabilistic import ProbabilityDistribution
|
||||||
|
from pyFTS.common import Transformations
|
||||||
from pyFTS.models import song, chen, yu, ismailefendi, sadaei, hofts, pwfts, ifts, cheng, hwang
|
from pyFTS.models import song, chen, yu, ismailefendi, sadaei, hofts, pwfts, ifts, cheng, hwang
|
||||||
from pyFTS.models.ensemble import ensemble
|
from pyFTS.models.ensemble import ensemble
|
||||||
from pyFTS.benchmarks import Measures, naive, arima, ResidualAnalysis, quantreg
|
from pyFTS.benchmarks import Measures, naive, arima, ResidualAnalysis, quantreg
|
||||||
@ -833,10 +834,6 @@ def plot_density_rectange(ax, cmap, density, fig, resolution, time_from, time_to
|
|||||||
cb.set_label('Density')
|
cb.set_label('Density')
|
||||||
|
|
||||||
|
|
||||||
from pyFTS.common import Transformations
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def plot_distribution(ax, cmap, probabilitydist, fig, time_from, reference_data=None):
|
def plot_distribution(ax, cmap, probabilitydist, fig, time_from, reference_data=None):
|
||||||
from matplotlib.patches import Rectangle
|
from matplotlib.patches import Rectangle
|
||||||
from matplotlib.collections import PatchCollection
|
from matplotlib.collections import PatchCollection
|
||||||
|
@ -257,6 +257,16 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
def forecast(self, data, **kwargs):
|
def forecast(self, data, **kwargs):
|
||||||
|
method = kwargs.get('method','heuristic')
|
||||||
|
|
||||||
|
if method == 'heuristic':
|
||||||
|
return self.point_heuristic(data, **kwargs)
|
||||||
|
elif method == 'expected_value':
|
||||||
|
return self.point_expected_value(data, **kwargs)
|
||||||
|
else:
|
||||||
|
raise Exception("Unknown point forecasting method!")
|
||||||
|
|
||||||
|
def point_heuristic(self, data, **kwargs):
|
||||||
|
|
||||||
ndata = np.array(self.apply_transformations(data))
|
ndata = np.array(self.apply_transformations(data))
|
||||||
|
|
||||||
@ -292,10 +302,26 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
def point_expected_value(self, data, **kwargs):
|
||||||
|
l = len(data)
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
|
||||||
|
for k in np.arange(self.order - 1, l):
|
||||||
|
sample = data[k - (self.order - 1): k + 1]
|
||||||
|
|
||||||
|
tmp = self.forecast_distribution(sample)[0].expected_value()
|
||||||
|
|
||||||
|
ret.append(tmp)
|
||||||
|
|
||||||
|
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
def forecast_interval(self, data, **kwargs):
|
def forecast_interval(self, data, **kwargs):
|
||||||
|
|
||||||
if 'method' in kwargs:
|
if 'method' in kwargs:
|
||||||
self.interval_method = kwargs.get('method','quantile')
|
self.interval_method = kwargs.get('method','heuristic')
|
||||||
|
|
||||||
if 'alpha' in kwargs:
|
if 'alpha' in kwargs:
|
||||||
self.alpha = kwargs.get('alpha', 0.05)
|
self.alpha = kwargs.get('alpha', 0.05)
|
||||||
@ -308,8 +334,8 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
|
|
||||||
for k in np.arange(self.order - 1, l):
|
for k in np.arange(self.order - 1, l):
|
||||||
|
|
||||||
if self.interval_method == 'extremum':
|
if self.interval_method == 'heuristic':
|
||||||
self.interval_extremum(k, ndata, ret)
|
self.interval_heuristic(k, ndata, ret)
|
||||||
else:
|
else:
|
||||||
self.interval_quantile(k, ndata, ret)
|
self.interval_quantile(k, ndata, ret)
|
||||||
|
|
||||||
@ -321,9 +347,9 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
dist = self.forecast_distribution(ndata)
|
dist = self.forecast_distribution(ndata)
|
||||||
lo_qt = dist[0].quantile(self.alpha)
|
lo_qt = dist[0].quantile(self.alpha)
|
||||||
up_qt = dist[0].quantile(1.0 - self.alpha)
|
up_qt = dist[0].quantile(1.0 - self.alpha)
|
||||||
ret.append_rhs([lo_qt, up_qt])
|
ret.append([lo_qt, up_qt])
|
||||||
|
|
||||||
def interval_extremum(self, k, ndata, ret):
|
def interval_heuristic(self, k, ndata, ret):
|
||||||
|
|
||||||
sample = ndata[k - (self.order - 1): k + 1]
|
sample = ndata[k - (self.order - 1): k + 1]
|
||||||
|
|
||||||
@ -335,7 +361,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
for flrg in flrgs:
|
for flrg in flrgs:
|
||||||
norm = self.flrg_lhs_conditional_probability(sample, flrg)
|
norm = self.flrg_lhs_conditional_probability(sample, flrg)
|
||||||
if norm == 0:
|
if norm == 0:
|
||||||
norm = self.flrg_lhs_unconditional_probability(flrg) # * 0.001
|
norm = self.flrg_lhs_unconditional_probability(flrg)
|
||||||
up.append(norm * self.get_upper(flrg))
|
up.append(norm * self.get_upper(flrg))
|
||||||
lo.append(norm * self.get_lower(flrg))
|
lo.append(norm * self.get_lower(flrg))
|
||||||
norms.append(norm)
|
norms.append(norm)
|
||||||
@ -349,7 +375,6 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
up_ = sum(up) / norm
|
up_ = sum(up) / norm
|
||||||
ret.append([lo_, up_])
|
ret.append([lo_, up_])
|
||||||
|
|
||||||
|
|
||||||
def forecast_distribution(self, data, **kwargs):
|
def forecast_distribution(self, data, **kwargs):
|
||||||
|
|
||||||
if not isinstance(data, (list, set, np.ndarray)):
|
if not isinstance(data, (list, set, np.ndarray)):
|
||||||
@ -425,8 +450,8 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
1] >= self.original_max):
|
1] >= self.original_max):
|
||||||
ret.append(ret[-1])
|
ret.append(ret[-1])
|
||||||
else:
|
else:
|
||||||
lower = self.forecast_interval([ret[x][0] for x in np.arange(k - self.order, k)])
|
lower = self.forecast_interval([ret[x][0] for x in np.arange(k - self.order, k)], **kwargs)
|
||||||
upper = self.forecast_interval([ret[x][1] for x in np.arange(k - self.order, k)])
|
upper = self.forecast_interval([ret[x][1] for x in np.arange(k - self.order, k)], **kwargs)
|
||||||
|
|
||||||
ret.append([np.min(lower), np.max(upper)])
|
ret.append([np.min(lower), np.max(upper)])
|
||||||
|
|
||||||
|
@ -95,6 +95,9 @@ class ProbabilityDistribution(object):
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
def expected_value(self):
|
||||||
|
return np.nansum([v * self.distribution[v] for v in self.bins])
|
||||||
|
|
||||||
def build_cdf_qtl(self):
|
def build_cdf_qtl(self):
|
||||||
ret = 0.0
|
ret = 0.0
|
||||||
self.cdf = {}
|
self.cdf = {}
|
||||||
@ -137,7 +140,7 @@ class ProbabilityDistribution(object):
|
|||||||
ret.append(self.qtl[str(k)][0])
|
ret.append(self.qtl[str(k)][0])
|
||||||
else:
|
else:
|
||||||
k = self.quantile_index.find_ge(values)
|
k = self.quantile_index.find_ge(values)
|
||||||
ret = self.qtl[str(k)[0]]
|
ret = self.qtl[str(k)]
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@ -9,6 +9,8 @@ import matplotlib.pylab as plt
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
from pyFTS.common import Transformations
|
from pyFTS.common import Transformations
|
||||||
|
|
||||||
|
tdiff = Transformations.Differential(1)
|
||||||
|
|
||||||
from pyFTS.data import TAIEX
|
from pyFTS.data import TAIEX
|
||||||
|
|
||||||
dataset = TAIEX.get_data()
|
dataset = TAIEX.get_data()
|
||||||
@ -30,17 +32,28 @@ test_length = 200
|
|||||||
|
|
||||||
from pyFTS.partitioners import Grid, Util as pUtil
|
from pyFTS.partitioners import Grid, Util as pUtil
|
||||||
partitioner = Grid.GridPartitioner(data=dataset[:train_split], npart=30)
|
partitioner = Grid.GridPartitioner(data=dataset[:train_split], npart=30)
|
||||||
|
#partitioner = Grid.GridPartitioner(data=dataset[:train_split], npart=10, transformation=tdiff)
|
||||||
|
|
||||||
from pyFTS.common import fts,tree
|
from pyFTS.common import fts,tree
|
||||||
from pyFTS.models import hofts, pwfts
|
from pyFTS.models import hofts, pwfts
|
||||||
|
|
||||||
pfts1_taiex = pwfts.ProbabilisticWeightedFTS("1", partitioner=partitioner)
|
pfts1_taiex = pwfts.ProbabilisticWeightedFTS("1", partitioner=partitioner)
|
||||||
|
#pfts1_taiex.append_transformation(tdiff)
|
||||||
pfts1_taiex.fit(dataset[:train_split], save_model=True, file_path='pwfts')
|
pfts1_taiex.fit(dataset[:train_split], save_model=True, file_path='pwfts')
|
||||||
pfts1_taiex.shortname = "1st Order"
|
pfts1_taiex.shortname = "1st Order"
|
||||||
|
|
||||||
print(pfts1_taiex)
|
print(pfts1_taiex)
|
||||||
|
|
||||||
tmp = pfts1_taiex.predict(dataset[train_split:train_split+200], type='distribution', steps_ahead=20)
|
tmp = pfts1_taiex.predict(dataset[train_split:train_split+200], type='interval',
|
||||||
|
method='quantile', alpha=.05, steps_ahead=10)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
'''
|
||||||
|
tmp = pfts1_taiex.predict(dataset[train_split:train_split+200], type='diPedro Pazzini
|
||||||
|
stribution', steps_ahead=20)
|
||||||
|
|
||||||
|
|
||||||
f, ax = plt.subplots(3, 4, figsize=[20,15])
|
f, ax = plt.subplots(3, 4, figsize=[20,15])
|
||||||
tmp[0].plot(ax[0][0], title='t=1')
|
tmp[0].plot(ax[0][0], title='t=1')
|
||||||
tmp[2].plot(ax[0][1], title='t=20')
|
tmp[2].plot(ax[0][1], title='t=20')
|
||||||
@ -53,3 +66,9 @@ tmp[14].plot(ax[1][3], title='t=140')
|
|||||||
tmp[16].plot(ax[2][0], title='t=160')
|
tmp[16].plot(ax[2][0], title='t=160')
|
||||||
tmp[18].plot(ax[2][1], title='t=180')
|
tmp[18].plot(ax[2][1], title='t=180')
|
||||||
tmp[20].plot(ax[2][2], title='t=200')
|
tmp[20].plot(ax[2][2], title='t=200')
|
||||||
|
|
||||||
|
|
||||||
|
f, ax = plt.subplots(1, 1, figsize=[20,15])
|
||||||
|
bchmk.plot_distribution(ax, 'blue', tmp, f, 0, reference_data=dataset[train_split:train_split+200])
|
||||||
|
|
||||||
|
'''
|
Loading…
Reference in New Issue
Block a user