Otimizações nos Benchmarks e correções de erros em PFTS
This commit is contained in:
parent
ba1b4fbae6
commit
53029681d8
@ -32,8 +32,8 @@ def plotDistribution(dist):
|
||||
vmin=0, vmax=1, edgecolors=None)
|
||||
|
||||
|
||||
def plotComparedSeries(original, models, colors):
|
||||
fig = plt.figure(figsize=[15, 5])
|
||||
def plotComparedSeries(original, models, colors, typeonlegend=False, save=False, file=None,tam=[20, 5]):
|
||||
fig = plt.figure(figsize=tam)
|
||||
ax = fig.add_subplot(111)
|
||||
|
||||
mi = []
|
||||
@ -48,7 +48,9 @@ def plotComparedSeries(original, models, colors):
|
||||
ma.append(max(forecasted))
|
||||
for k in np.arange(0, fts.order):
|
||||
forecasted.insert(0, None)
|
||||
ax.plot(forecasted, color=colors[count], label=fts.shortname, ls="-")
|
||||
lbl = fts.shortname
|
||||
if typeonlegend: lbl += " (Point)"
|
||||
ax.plot(forecasted, color=colors[count], label=lbl, ls="-")
|
||||
|
||||
if fts.hasIntervalForecasting:
|
||||
forecasted = fts.forecastInterval(original)
|
||||
@ -59,7 +61,9 @@ def plotComparedSeries(original, models, colors):
|
||||
for k in np.arange(0, fts.order):
|
||||
lower.insert(0, None)
|
||||
upper.insert(0, None)
|
||||
ax.plot(lower, color=colors[count], label=fts.shortname,ls="--")
|
||||
lbl = fts.shortname
|
||||
if typeonlegend: lbl += " (Interval)"
|
||||
ax.plot(lower, color=colors[count], label=lbl,ls="--")
|
||||
ax.plot(upper, color=colors[count],ls="--")
|
||||
|
||||
handles0, labels0 = ax.get_legend_handles_labels()
|
||||
@ -71,32 +75,58 @@ def plotComparedSeries(original, models, colors):
|
||||
ax.set_xlabel('T')
|
||||
ax.set_xlim([0, len(original)])
|
||||
|
||||
if save:
|
||||
fig.savefig(file)
|
||||
plt.close(fig)
|
||||
|
||||
def plotComparedIntervalsAhead(original, models, colors, distributions, time_from, time_to):
|
||||
fig = plt.figure(figsize=[25, 10])
|
||||
|
||||
def plotComparedIntervalsAhead(original, models, colors, distributions, time_from, time_to, interpol=False, save=False, file=None,tam=[20, 5]):
|
||||
fig = plt.figure(figsize=tam)
|
||||
ax = fig.add_subplot(111)
|
||||
|
||||
percentile = (max(original) - min(original))/100
|
||||
|
||||
mi = []
|
||||
ma = []
|
||||
|
||||
count = 0
|
||||
for fts in models:
|
||||
if fts.hasDistributionForecasting and distributions[count]:
|
||||
density = fts.forecastDistributionAhead(original[:time_from], time_to, 25)
|
||||
density = fts.forecastAheadDistribution(original[time_from - fts.order:time_from], time_to, percentile)
|
||||
|
||||
y = density.columns
|
||||
t = len(y)
|
||||
|
||||
# interpol between time_from and time_from+1
|
||||
if interpol:
|
||||
diffs = [density[q][0] / 50 for q in density]
|
||||
for p in np.arange(0, 50):
|
||||
xx = [(time_from - 1) + 0.02 * p for q in np.arange(0, t)]
|
||||
alpha2 = np.array([diffs[q] * p for q in np.arange(0, t)]) * 100
|
||||
ax.scatter(xx, y, c=alpha2, marker='s', linewidths=0, cmap='Oranges',
|
||||
norm=pltcolors.Normalize(vmin=0, vmax=1), vmin=0, vmax=1, edgecolors=None)
|
||||
for k in density.index:
|
||||
alpha = np.array([density[x][k] for x in density]) * 100
|
||||
x = [time_from + fts.order + k for x in np.arange(0, len(alpha))]
|
||||
y = density.columns
|
||||
alpha = np.array([density[q][k] for q in density]) * 100
|
||||
|
||||
x = [time_from + k for x in np.arange(0, t)]
|
||||
|
||||
ax.scatter(x, y, c=alpha, marker='s', linewidths=0, cmap='Oranges',
|
||||
norm=pltcolors.Normalize(vmin=0, vmax=1), vmin=0, vmax=1, edgecolors=None)
|
||||
if interpol and k < max(density.index):
|
||||
diffs = [(density[q][k + 1] - density[q][k])/50 for q in density]
|
||||
for p in np.arange(0,50):
|
||||
xx = [time_from + k + 0.02*p for q in np.arange(0, t)]
|
||||
alpha2 = np.array([density[density.columns[q]][k] + diffs[q]*p for q in np.arange(0, t)]) * 100
|
||||
ax.scatter(xx, y, c=alpha2, marker='s', linewidths=0, cmap='Oranges',
|
||||
norm=pltcolors.Normalize(vmin=0, vmax=1), vmin=0, vmax=1, edgecolors=None)
|
||||
|
||||
if fts.hasIntervalForecasting:
|
||||
forecasts = fts.forecastAhead(original[:time_from], time_to)
|
||||
forecasts = fts.forecastAheadInterval(original[time_from - fts.order:time_from], time_to)
|
||||
lower = [kk[0] for kk in forecasts]
|
||||
upper = [kk[1] for kk in forecasts]
|
||||
mi.append(min(lower))
|
||||
ma.append(max(upper))
|
||||
for k in np.arange(0, time_from):
|
||||
for k in np.arange(0, time_from-fts.order):
|
||||
lower.insert(0, None)
|
||||
upper.insert(0, None)
|
||||
ax.plot(lower, color=colors[count], label=fts.shortname)
|
||||
@ -110,16 +140,20 @@ def plotComparedIntervalsAhead(original, models, colors, distributions, time_fro
|
||||
forecasts.insert(0, None)
|
||||
ax.plot(forecasts, color=colors[count], label=fts.shortname)
|
||||
|
||||
handles0, labels0 = ax.get_legend_handles_labels()
|
||||
ax.legend(handles0, labels0)
|
||||
count = count + 1
|
||||
ax.plot(original, color='black', label="Original")
|
||||
handles0, labels0 = ax.get_legend_handles_labels()
|
||||
ax.legend(handles0, labels0, loc=2)
|
||||
# ax.set_title(fts.name)
|
||||
ax.set_ylim([min(mi), max(ma)])
|
||||
ax.set_ylabel('F(T)')
|
||||
ax.set_xlabel('T')
|
||||
ax.set_xlim([0, len(original)])
|
||||
|
||||
if save:
|
||||
fig.savefig(file)
|
||||
plt.close(fig)
|
||||
|
||||
|
||||
def plotCompared(original, forecasts, labels, title):
|
||||
fig = plt.figure(figsize=[13, 6])
|
||||
|
40
pfts.py
40
pfts.py
@ -12,9 +12,9 @@ class ProbabilisticFLRG(hofts.HighOrderFLRG):
|
||||
self.frequencyCount = 0.0
|
||||
|
||||
def appendRHS(self, c):
|
||||
self.frequencyCount += 1
|
||||
self.frequencyCount += 1.0
|
||||
if c.name in self.RHS:
|
||||
self.RHS[c.name] += 1
|
||||
self.RHS[c.name] += 1.0
|
||||
else:
|
||||
self.RHS[c.name] = 1.0
|
||||
|
||||
@ -26,7 +26,7 @@ class ProbabilisticFLRG(hofts.HighOrderFLRG):
|
||||
for c in sorted(self.RHS):
|
||||
if len(tmp2) > 0:
|
||||
tmp2 = tmp2 + ", "
|
||||
tmp2 = tmp2 + c + "(" + str(round(self.RHS[c] / self.frequencyCount, 3)) + ")"
|
||||
tmp2 = tmp2 + "(" + str(round(self.RHS[c] / self.frequencyCount, 3)) + ")" + c
|
||||
return self.strLHS() + " -> " + tmp2
|
||||
|
||||
|
||||
@ -60,21 +60,31 @@ class ProbabilisticFTS(ifts.IntervalFTS):
|
||||
flrgs[flrg.strLHS()].appendRHS(flrs[k-1].RHS)
|
||||
if self.dump: print("RHS: " + str(flrs[k-1]))
|
||||
|
||||
self.globalFrequency = self.globalFrequency + 1
|
||||
self.globalFrequency += 1
|
||||
return (flrgs)
|
||||
|
||||
def addNewPFLGR(self,flrg):
|
||||
if flrg.strLHS() not in self.flrgs:
|
||||
tmp = ProbabilisticFLRG(self.order)
|
||||
for fs in flrg.LHS: tmp.appendLHS(fs)
|
||||
tmp.appendRHS(flrg.LHS[-1])
|
||||
self.flrgs[tmp.strLHS()] = tmp;
|
||||
self.globalFrequency += 1
|
||||
|
||||
def getProbability(self, flrg):
|
||||
if flrg.strLHS() in self.flrgs:
|
||||
return self.flrgs[flrg.strLHS()].frequencyCount / self.globalFrequency
|
||||
else:
|
||||
return 1.0 / self.globalFrequency
|
||||
self.addNewPFLGR(flrg)
|
||||
return self.getProbability(flrg)
|
||||
|
||||
def getMidpoints(self, flrg):
|
||||
if flrg.strLHS() in self.flrgs:
|
||||
tmp = self.flrgs[flrg.strLHS()]
|
||||
ret = sum(np.array([tmp.getProbability(s) * self.setsDict[s].centroid for s in tmp.RHS]))
|
||||
else:
|
||||
ret = sum(np.array([0.33 * s.centroid for s in flrg.LHS]))
|
||||
pi = 1 / len(flrg.LHS)
|
||||
ret = sum(np.array([pi * s.centroid for s in flrg.LHS]))
|
||||
return ret
|
||||
|
||||
def getUpper(self, flrg):
|
||||
@ -82,7 +92,8 @@ class ProbabilisticFTS(ifts.IntervalFTS):
|
||||
tmp = self.flrgs[flrg.strLHS()]
|
||||
ret = sum(np.array([tmp.getProbability(s) * self.setsDict[s].upper for s in tmp.RHS]))
|
||||
else:
|
||||
ret = sum(np.array([0.33 * s.upper for s in flrg.LHS]))
|
||||
pi = 1 / len(flrg.LHS)
|
||||
ret = sum(np.array([pi * s.upper for s in flrg.LHS]))
|
||||
return ret
|
||||
|
||||
def getLower(self, flrg):
|
||||
@ -90,7 +101,8 @@ class ProbabilisticFTS(ifts.IntervalFTS):
|
||||
tmp = self.flrgs[flrg.strLHS()]
|
||||
ret = sum(np.array([tmp.getProbability(s) * self.setsDict[s].lower for s in tmp.RHS]))
|
||||
else:
|
||||
ret = sum(np.array([0.33 * s.lower for s in flrg.LHS]))
|
||||
pi = 1 / len(flrg.LHS)
|
||||
ret = sum(np.array([pi * s.lower for s in flrg.LHS]))
|
||||
return ret
|
||||
|
||||
def forecast(self, data):
|
||||
@ -224,9 +236,9 @@ class ProbabilisticFTS(ifts.IntervalFTS):
|
||||
idx = np.ravel(tmp) # flatten the array
|
||||
|
||||
if idx.size == 0: # the element is out of the bounds of the Universe of Discourse
|
||||
if math.ceil(instance) <= self.sets[0].lower:
|
||||
if instance <= self.sets[0].lower:
|
||||
idx = [0]
|
||||
elif math.ceil(instance) >= self.sets[-1].upper:
|
||||
elif instance >= self.sets[-1].upper:
|
||||
idx = [len(self.sets) - 1]
|
||||
else:
|
||||
raise Exception(instance)
|
||||
@ -262,9 +274,9 @@ class ProbabilisticFTS(ifts.IntervalFTS):
|
||||
idx = np.ravel(tmp) # flatten the array
|
||||
|
||||
if idx.size == 0: # the element is out of the bounds of the Universe of Discourse
|
||||
if math.ceil(ndata[k]) <= self.sets[0].lower:
|
||||
if ndata[k] <= self.sets[0].lower:
|
||||
idx = [0]
|
||||
elif math.ceil(ndata[k]) >= self.sets[-1].upper:
|
||||
elif ndata[k] >= self.sets[-1].upper:
|
||||
idx = [len(self.sets) - 1]
|
||||
else:
|
||||
raise Exception(ndata[k])
|
||||
@ -312,7 +324,7 @@ class ProbabilisticFTS(ifts.IntervalFTS):
|
||||
def forecastAheadInterval(self, data, steps):
|
||||
ret = [[data[k], data[k]] for k in np.arange(len(data) - self.order, len(data))]
|
||||
|
||||
for k in np.arange(self.order - 1, steps):
|
||||
for k in np.arange(self.order, steps+self.order):
|
||||
|
||||
if ret[-1][0] <= self.sets[0].lower and ret[-1][1] >= self.sets[-1].upper:
|
||||
ret.append(ret[-1])
|
||||
@ -390,7 +402,7 @@ class ProbabilisticFTS(ifts.IntervalFTS):
|
||||
|
||||
intervals = self.forecastAheadInterval(data, steps)
|
||||
|
||||
for k in np.arange(self.order, steps):
|
||||
for k in np.arange(self.order, steps+self.order):
|
||||
|
||||
grid = self.getGridClean(resolution)
|
||||
grid = self.gridCount(grid, resolution, intervals[k])
|
||||
|
Loading…
Reference in New Issue
Block a user