Refatoração de PIFTS para PFTS; acrescentando as packages data e models

This commit is contained in:
Petrônio Cândido de Lima e Silva 2017-01-05 16:42:45 -02:00
parent 2022c2a032
commit dba1919a18
3 changed files with 137 additions and 13 deletions

0
data/__init__.py Normal file
View File

0
models/__init__.py Normal file
View File

View File

@ -1,7 +1,7 @@
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import math import math
from pyFTS.common import FuzzySet,FLR from pyFTS.common import FuzzySet, FLR
import hofts, ifts, tree import hofts, ifts, tree
@ -30,11 +30,11 @@ class ProbabilisticFLRG(hofts.HighOrderFLRG):
return self.strLHS() + " -> " + tmp2 return self.strLHS() + " -> " + tmp2
class ProbabilisticIntervalFTS(ifts.IntervalFTS): class ProbabilisticFTS(ifts.IntervalFTS):
def __init__(self, name): def __init__(self, name):
super(ProbabilisticIntervalFTS, self).__init__("PIFTS") super(ProbabilisticFTS, self).__init__("PIFTS")
self.shortname = "PIFTS " + name self.shortname = "PIFTS " + name
self.name = "Probabilistic Interval FTS" self.name = "Probabilistic FTS"
self.detail = "Silva, P.; Guimarães, F.; Sadaei, H." self.detail = "Silva, P.; Guimarães, F.; Sadaei, H."
self.flrgs = {} self.flrgs = {}
self.globalFrequency = 0 self.globalFrequency = 0
@ -65,6 +65,14 @@ class ProbabilisticIntervalFTS(ifts.IntervalFTS):
else: else:
return 1.0 / self.globalFrequency return 1.0 / self.globalFrequency
def getMidpoints(self, flrg):
if flrg.strLHS() in self.flrgs:
tmp = self.flrgs[flrg.strLHS()]
ret = sum(np.array([tmp.getProbability(s) * self.setsDict[s].midpoint for s in tmp.RHS]))
else:
ret = sum(np.array([0.33 * s.midpoint for s in flrg.LHS]))
return ret
def getUpper(self, flrg): def getUpper(self, flrg):
if flrg.strLHS() in self.flrgs: if flrg.strLHS() in self.flrgs:
tmp = self.flrgs[flrg.strLHS()] tmp = self.flrgs[flrg.strLHS()]
@ -89,6 +97,106 @@ class ProbabilisticIntervalFTS(ifts.IntervalFTS):
ret = [] ret = []
for k in np.arange(self.order - 1, l):
# print(k)
affected_flrgs = []
affected_flrgs_memberships = []
norms = []
mp = []
# Find the sets which membership > 0 for each lag
count = 0
lags = {}
if self.order > 1:
subset = ndata[k - (self.order - 1): k + 1]
for instance in subset:
mb = FuzzySet.fuzzyInstance(instance, self.sets)
tmp = np.argwhere(mb)
idx = np.ravel(tmp) # flatten the array
if idx.size == 0: # the element is out of the bounds of the Universe of Discourse
if math.ceil(instance) <= self.sets[0].lower:
idx = [0]
elif math.ceil(instance) >= self.sets[-1].upper:
idx = [len(self.sets) - 1]
else:
raise Exception(instance)
lags[count] = idx
count = count + 1
# Build the tree with all possible paths
root = tree.FLRGTreeNode(None)
self.buildTree(root, lags, 0)
# Trace the possible paths and build the PFLRG's
for p in root.paths():
path = list(reversed(list(filter(None.__ne__, p))))
flrg = hofts.HighOrderFLRG(self.order)
for kk in path: flrg.appendLHS(self.sets[kk])
assert len(flrg.LHS) == subset.size, str(subset) + " -> " + str([s.name for s in flrg.LHS])
##
affected_flrgs.append(flrg)
# Find the general membership of FLRG
affected_flrgs_memberships.append(min(self.getSequenceMembership(subset, flrg.LHS)))
else:
mv = FuzzySet.fuzzyInstance(ndata[k], self.sets) # get all membership values
tmp = np.argwhere(mv) # get the indices of values > 0
idx = np.ravel(tmp) # flatten the array
if idx.size == 0: # the element is out of the bounds of the Universe of Discourse
if math.ceil(ndata[k]) <= self.sets[0].lower:
idx = [0]
elif math.ceil(ndata[k]) >= self.sets[-1].upper:
idx = [len(self.sets) - 1]
else:
raise Exception(ndata[k])
for kk in idx:
flrg = hofts.HighOrderFLRG(self.order)
flrg.appendLHS(self.sets[kk])
affected_flrgs.append(flrg)
affected_flrgs_memberships.append(mv[kk])
count = 0
for flrg in affected_flrgs:
# achar o os bounds de cada FLRG, ponderados pela probabilidade e pertinência
norm = self.getProbability(flrg) * affected_flrgs_memberships[count]
if norm == 0:
norm = self.getProbability(flrg) # * 0.001
mp.append(norm * self.getMidpoints(flrg))
norms.append(norm)
count = count + 1
# gerar o intervalo
norm = sum(norms)
if norm == 0:
ret.append([0, 0])
else:
ret.append(sum(mp) / norm)
return ret
def forecastInterval(self, data):
ndata = np.array(data)
l = len(ndata)
ret = []
for k in np.arange(self.order - 1, l): for k in np.arange(self.order - 1, l):
# print(k) # print(k)
@ -184,6 +292,20 @@ class ProbabilisticIntervalFTS(ifts.IntervalFTS):
return ret return ret
def forecastAhead(self, data, steps): def forecastAhead(self, data, steps):
ret = [data[k] for k in np.arange(len(data) - self.order, len(data))]
for k in np.arange(self.order - 1, steps):
if ret[-1] <= self.sets[0].lower or ret[-1] >= self.sets[-1].upper:
ret.append(ret[-1])
else:
mp = self.forecast([ret[x] for x in np.arange(k - self.order, k)])
ret.append(mp)
return ret
def forecastAheadInterval(self, data, steps):
ret = [[data[k], data[k]] for k in np.arange(len(data) - self.order, len(data))] ret = [[data[k], data[k]] for k in np.arange(len(data) - self.order, len(data))]
for k in np.arange(self.order - 1, steps): for k in np.arange(self.order - 1, steps):
@ -191,8 +313,8 @@ class ProbabilisticIntervalFTS(ifts.IntervalFTS):
if ret[-1][0] <= self.sets[0].lower and ret[-1][1] >= self.sets[-1].upper: if ret[-1][0] <= self.sets[0].lower and ret[-1][1] >= self.sets[-1].upper:
ret.append(ret[-1]) ret.append(ret[-1])
else: else:
lower = self.forecast([ret[x][0] for x in np.arange(k - self.order, k)]) lower = self.forecastInterval([ret[x][0] for x in np.arange(k - self.order, k)])
upper = self.forecast([ret[x][1] for x in np.arange(k - self.order, k)]) upper = self.forecastInterval([ret[x][1] for x in np.arange(k - self.order, k)])
ret.append([np.min(lower), np.max(upper)]) ret.append([np.min(lower), np.max(upper)])
@ -258,11 +380,11 @@ class ProbabilisticIntervalFTS(ifts.IntervalFTS):
df = pd.DataFrame(ret, columns=sorted(grid)) df = pd.DataFrame(ret, columns=sorted(grid))
return df return df
def forecastDistributionAhead(self, data, steps, resolution): def forecastAheadDistribution(self, data, steps, resolution):
ret = [] ret = []
intervals = self.forecastAhead(data, steps) intervals = self.forecastAheadInterval(data, steps)
for k in np.arange(self.order, steps): for k in np.arange(self.order, steps):
@ -271,13 +393,15 @@ class ProbabilisticIntervalFTS(ifts.IntervalFTS):
for qt in np.arange(1, 50, 2): for qt in np.arange(1, 50, 2):
# print(qt) # print(qt)
qtle_lower = self.forecast([intervals[x][0] + qt * (intervals[x][1] - intervals[x][0]) / 100 for x in qtle_lower = self.forecastInterval(
[intervals[x][0] + qt * (intervals[x][1] - intervals[x][0]) / 100 for x in
np.arange(k - self.order, k)]) np.arange(k - self.order, k)])
grid = self.gridCount(grid, resolution, np.ravel(qtle_lower)) grid = self.gridCount(grid, resolution, np.ravel(qtle_lower))
qtle_upper = self.forecast([intervals[x][1] - qt * (intervals[x][1] - intervals[x][0]) / 100 for x in qtle_upper = self.forecastInterval(
[intervals[x][1] - qt * (intervals[x][1] - intervals[x][0]) / 100 for x in
np.arange(k - self.order, k)]) np.arange(k - self.order, k)])
grid = self.gridCount(grid, resolution, np.ravel(qtle_upper)) grid = self.gridCount(grid, resolution, np.ravel(qtle_upper))
qtle_mid = self.forecast( qtle_mid = self.forecastInterval(
[intervals[x][0] + (intervals[x][1] - intervals[x][0]) / 2 for x in np.arange(k - self.order, k)]) [intervals[x][0] + (intervals[x][1] - intervals[x][0]) / 2 for x in np.arange(k - self.order, k)])
grid = self.gridCount(grid, resolution, np.ravel(qtle_mid)) grid = self.gridCount(grid, resolution, np.ravel(qtle_mid))