Bugfixes at pwfts.forecast_ahead_distribution
This commit is contained in:
parent
326d1b4f40
commit
1312d96246
@ -2,7 +2,6 @@
|
||||
Benchmark utility functions
|
||||
"""
|
||||
|
||||
import numba
|
||||
import matplotlib as plt
|
||||
import matplotlib.cm as cmx
|
||||
import matplotlib.colors as pltcolors
|
||||
|
@ -6,7 +6,6 @@
|
||||
|
||||
import datetime
|
||||
import time
|
||||
import numba
|
||||
from copy import deepcopy
|
||||
|
||||
import matplotlib as plt
|
||||
|
@ -1,5 +1,4 @@
|
||||
import time
|
||||
import numba
|
||||
import matplotlib.pyplot as plt
|
||||
import dill
|
||||
import numpy as np
|
||||
|
@ -363,6 +363,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
if 'bins' in kwargs:
|
||||
_bins = kwargs.pop('bins')
|
||||
nbins = len(_bins)
|
||||
else:
|
||||
nbins = kwargs.get("num_bins", 100)
|
||||
_bins = np.linspace(uod[0], uod[1], nbins)
|
||||
@ -380,11 +381,15 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
num = []
|
||||
den = []
|
||||
for s in flrgs:
|
||||
flrg = self.flrgs[s.get_key()]
|
||||
pk = flrg.lhs_conditional_probability(sample, self.sets, self.global_frequency_count, uod, nbins)
|
||||
wi = flrg.rhs_conditional_probability(bin, self.sets, uod, nbins)
|
||||
num.append(wi * pk)
|
||||
den.append(pk)
|
||||
if s.get_key() in self.flrgs:
|
||||
flrg = self.flrgs[s.get_key()]
|
||||
pk = flrg.lhs_conditional_probability(sample, self.sets, self.global_frequency_count, uod, nbins)
|
||||
wi = flrg.rhs_conditional_probability(bin, self.sets, uod, nbins)
|
||||
num.append(wi * pk)
|
||||
den.append(pk)
|
||||
else:
|
||||
num.append(0.0)
|
||||
den.append(0.000000001)
|
||||
pf = sum(num) / sum(den)
|
||||
|
||||
dist.set(bin, pf)
|
||||
@ -452,11 +457,11 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
tmp.set(dat, 1.0)
|
||||
ret.append(tmp)
|
||||
|
||||
dist = self.forecast_distribution(sample, bins=_bins)
|
||||
dist = self.forecast_distribution(sample, bins=_bins)[0]
|
||||
|
||||
ret.append(dist)
|
||||
|
||||
for k in np.arange(self.order, steps+self.order):
|
||||
for k in np.arange(self.order+1, steps+self.order+1):
|
||||
dist = ProbabilityDistribution.ProbabilityDistribution(smooth, uod=uod, bins=_bins, **kwargs)
|
||||
|
||||
lags = {}
|
||||
@ -469,7 +474,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
root = tree.FLRGTreeNode(None)
|
||||
|
||||
self.build_tree_without_order(root, lags, 0)
|
||||
tree.build_tree_without_order(root, lags, 0)
|
||||
|
||||
# Trace all possible combinations between the bins of past distributions
|
||||
|
||||
@ -487,6 +492,8 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
for bin in _bins:
|
||||
dist.set(bin, dist.density(bin) + pk * d.density(bin))
|
||||
|
||||
ret.append(dist)
|
||||
|
||||
ret = ret[self.order:]
|
||||
|
||||
return ret
|
||||
|
@ -1,4 +1,3 @@
|
||||
import numba
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import matplotlib as plt
|
||||
@ -15,7 +14,6 @@ all_methods = [Grid.GridPartitioner, Entropy.EntropyPartitioner, FCM.FCMPartitio
|
||||
mfs = [Membership.trimf, Membership.gaussmf, Membership.trapmf]
|
||||
|
||||
|
||||
@numba.jit()
|
||||
def sliding_window_simple_search(data, windowsize, model, partitions, orders, **kwargs):
|
||||
|
||||
_3d = len(orders) > 1
|
||||
|
@ -3,7 +3,7 @@
|
||||
|
||||
import os
|
||||
import numpy as np
|
||||
#import matplotlib.pyplot as plt
|
||||
import matplotlib.pylab as plt
|
||||
#from mpl_toolkits.mplot3d import Axes3D
|
||||
|
||||
import pandas as pd
|
||||
@ -17,9 +17,39 @@ from pyFTS.benchmarks import benchmarks as bchmk
|
||||
|
||||
from pyFTS.models import pwfts
|
||||
|
||||
|
||||
'''
|
||||
bchmk.sliding_window_benchmarks(dataset, 1000, train=0.8, inc=0.2, methods=[pwfts.ProbabilisticWeightedFTS],
|
||||
benchmark_models=False, orders=[1], partitions=[10], #np.arange(10,100,2),
|
||||
progress=False, type='distribution',
|
||||
distributed=False, nodes=['192.168.0.106', '192.168.0.105', '192.168.0.110'],
|
||||
save=True, file="pwfts_taiex_interval.csv")
|
||||
'''
|
||||
|
||||
train_split = 2000
|
||||
test_length = 200
|
||||
|
||||
from pyFTS.partitioners import Grid, Util as pUtil
|
||||
partitioner = Grid.GridPartitioner(data=dataset[:train_split], npart=30)
|
||||
|
||||
from pyFTS.common import fts,tree
|
||||
from pyFTS.models import hofts, pwfts
|
||||
|
||||
pfts1_taiex = pwfts.ProbabilisticWeightedFTS("1", partitioner=partitioner)
|
||||
pfts1_taiex.fit(dataset[:train_split], save_model=True, file_path='pwfts')
|
||||
pfts1_taiex.shortname = "1st Order"
|
||||
|
||||
print(pfts1_taiex)
|
||||
|
||||
tmp = pfts1_taiex.predict(dataset[train_split:train_split+200], type='distribution', steps_ahead=20)
|
||||
f, ax = plt.subplots(3, 4, figsize=[20,15])
|
||||
tmp[0].plot(ax[0][0], title='t=1')
|
||||
tmp[2].plot(ax[0][1], title='t=20')
|
||||
tmp[4].plot(ax[0][2], title='t=40')
|
||||
tmp[6].plot(ax[0][3], title='t=60')
|
||||
tmp[8].plot(ax[1][0], title='t=80')
|
||||
tmp[10].plot(ax[1][1], title='t=100')
|
||||
tmp[12].plot(ax[1][2], title='t=120')
|
||||
tmp[14].plot(ax[1][3], title='t=140')
|
||||
tmp[16].plot(ax[2][0], title='t=160')
|
||||
tmp[18].plot(ax[2][1], title='t=180')
|
||||
tmp[20].plot(ax[2][2], title='t=200')
|
Loading…
Reference in New Issue
Block a user