- FLRG improvements and refactorings on affected classes

This commit is contained in:
Petrônio Cândido 2017-10-06 15:09:10 -03:00
parent a89ec9dd46
commit 04b1ffea85
11 changed files with 131 additions and 106 deletions

View File

@ -29,8 +29,6 @@ class ConventionalFLRG(flrg.FLRG):
return tmp + tmp2 return tmp + tmp2
class ConventionalFTS(fts.FTS): class ConventionalFTS(fts.FTS):
"""Conventional Fuzzy Time Series""" """Conventional Fuzzy Time Series"""
def __init__(self, name, **kwargs): def __init__(self, name, **kwargs):
@ -74,9 +72,8 @@ class ConventionalFTS(fts.FTS):
ret.append(actual.centroid) ret.append(actual.centroid)
else: else:
flrg = self.flrgs[actual.name] flrg = self.flrgs[actual.name]
mp = self.getMidpoints(flrg)
ret.append(sum(mp) / len(mp)) ret.append(flrg.get_midpoint())
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]]) ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])

View File

@ -1,3 +1,4 @@
import numpy as np
class FLRG(object): class FLRG(object):
@ -10,6 +11,33 @@ class FLRG(object):
self.lower = None self.lower = None
self.upper = None self.upper = None
def get_midpoint(self):
if self.midpoint is None:
self.midpoint = sum(self.get_midpoints())/len(self.RHS)
return self.midpoint
def get_midpoints(self):
if isinstance(self.RHS, list):
return np.array([s.centroid for s in self.RHS])
elif isinstance(self.RHS, dict):
return np.array([self.RHS[s].centroid for s in self.RHS.keys()])
def get_lower(self):
if self.lower is None:
if isinstance(self.RHS, list):
self.lower = min([rhs.lower for rhs in self.RHS])
elif isinstance(self.RHS, dict):
self.lower = min([self.RHS[s].lower for s in self.RHS.keys()])
return self.lower
def get_upper(self, t):
if self.upper is None:
if isinstance(self.RHS, list):
self.upper = max([rhs.upper for rhs in self.RHS])
elif isinstance(self.RHS, dict):
self.upper = max([self.RHS[s].upper for s in self.RHS.keys()])
return self.upper
def __len__(self): def __len__(self):
return len(self.RHS) return len(self.RHS)

View File

@ -132,10 +132,6 @@ class FTS(object):
""" """
pass pass
def getMidpoints(self, flrg):
ret = np.array([s.centroid for s in flrg.RHS])
return ret
def appendTransformation(self, transformation): def appendTransformation(self, transformation):
if transformation is not None: if transformation is not None:
self.transformations.append(transformation) self.transformations.append(transformation)

View File

@ -84,10 +84,6 @@ class HighOrderFTS(fts.FTS):
flrs = FLR.generateRecurrentFLRs(tmpdata) flrs = FLR.generateRecurrentFLRs(tmpdata)
self.flrgs = self.generateFLRG(flrs) self.flrgs = self.generateFLRG(flrs)
def getMidpoints(self, flrg):
ret = np.array([self.setsDict[s].centroid for s in flrg.RHS])
return ret
def forecast(self, data, **kwargs): def forecast(self, data, **kwargs):
ret = [] ret = []
@ -109,9 +105,7 @@ class HighOrderFTS(fts.FTS):
ret.append(tmpdata[-1].centroid) ret.append(tmpdata[-1].centroid)
else: else:
flrg = self.flrgs[tmpflrg.strLHS()] flrg = self.flrgs[tmpflrg.strLHS()]
mp = self.getMidpoints(flrg) ret.append(flrg.get_midpoint())
ret.append(sum(mp) / len(mp))
ret = self.doInverseTransformations(ret, params=[data[self.order-1:]]) ret = self.doInverseTransformations(ret, params=[data[self.order-1:]])

View File

@ -23,27 +23,27 @@ class IntervalFTS(hofts.HighOrderFTS):
self.has_interval_forecasting = True self.has_interval_forecasting = True
self.is_high_order = True self.is_high_order = True
def getUpper(self, flrg): def get_upper(self, flrg):
if flrg.strLHS() in self.flrgs: if flrg.strLHS() in self.flrgs:
tmp = self.flrgs[flrg.strLHS()] tmp = self.flrgs[flrg.strLHS()]
ret = max(np.array([self.setsDict[s].upper for s in tmp.RHS])) ret = tmp.get_upper()
else: else:
ret = flrg.LHS[-1].upper ret = flrg.LHS[-1].upper
return ret return ret
def getLower(self, flrg): def get_lower(self, flrg):
if flrg.strLHS() in self.flrgs: if flrg.strLHS() in self.flrgs:
tmp = self.flrgs[flrg.strLHS()] tmp = self.flrgs[flrg.strLHS()]
ret = min(np.array([self.setsDict[s].lower for s in tmp.RHS])) ret = tmp.get_lower()
else: else:
ret = flrg.LHS[-1].lower ret = flrg.LHS[-1].lower
return ret return ret
def getSequenceMembership(self, data, fuzzySets): def get_sequence_membership(self, data, fuzzySets):
mb = [fuzzySets[k].membership(data[k]) for k in np.arange(0, len(data))] mb = [fuzzySets[k].membership(data[k]) for k in np.arange(0, len(data))]
return mb return mb
def buildTree(self, node, lags, level): def build_tree(self, node, lags, level):
if level >= self.order: if level >= self.order:
return return
@ -51,7 +51,7 @@ class IntervalFTS(hofts.HighOrderFTS):
node.appendChild(tree.FLRGTreeNode(s)) node.appendChild(tree.FLRGTreeNode(s))
for child in node.getChildren(): for child in node.getChildren():
self.buildTree(child, lags, level + 1) self.build_tree(child, lags, level + 1)
def forecastInterval(self, data, **kwargs): def forecastInterval(self, data, **kwargs):
@ -96,7 +96,7 @@ class IntervalFTS(hofts.HighOrderFTS):
root = tree.FLRGTreeNode(None) root = tree.FLRGTreeNode(None)
self.buildTree(root, lags, 0) self.build_tree(root, lags, 0)
# Traça os possíveis caminhos e costrói as HOFLRG's # Traça os possíveis caminhos e costrói as HOFLRG's
@ -132,8 +132,8 @@ class IntervalFTS(hofts.HighOrderFTS):
count = 0 count = 0
for flrg in affected_flrgs: for flrg in affected_flrgs:
# achar o os bounds de cada FLRG, ponderados pela pertinência # achar o os bounds de cada FLRG, ponderados pela pertinência
up.append(affected_flrgs_memberships[count] * self.getUpper(flrg)) up.append(affected_flrgs_memberships[count] * self.get_upper(flrg))
lo.append(affected_flrgs_memberships[count] * self.getLower(flrg)) lo.append(affected_flrgs_memberships[count] * self.get_lower(flrg))
count = count + 1 count = count + 1
# gerar o intervalo # gerar o intervalo

View File

@ -70,10 +70,6 @@ class ImprovedWeightedFTS(fts.FTS):
flrs = FLR.generateRecurrentFLRs(tmpdata) flrs = FLR.generateRecurrentFLRs(tmpdata)
self.flrgs = self.generateFLRG(flrs) self.flrgs = self.generateFLRG(flrs)
def getMidpoints(self, flrg):
ret = np.array([self.setsDict[s].centroid for s in flrg.RHS])
return ret
def forecast(self, data, **kwargs): def forecast(self, data, **kwargs):
l = 1 l = 1
@ -94,7 +90,7 @@ class ImprovedWeightedFTS(fts.FTS):
ret.append(actual.centroid) ret.append(actual.centroid)
else: else:
flrg = self.flrgs[actual.name] flrg = self.flrgs[actual.name]
mp = self.getMidpoints(flrg) mp = flrg.get_midpoints()
ret.append(mp.dot(flrg.weights())) ret.append(mp.dot(flrg.weights()))

View File

@ -11,24 +11,16 @@ class NonStationaryFLRG(flrg.FLRG):
def get_midpoint(self, t): def get_midpoint(self, t):
if self.midpoint is None: if self.midpoint is None:
tmp = [] tmp = [r.get_midpoint(t) for r in self.RHS]
for r in self.RHS:
tmp.append(r.get_midpoint(t))
self.midpoint = sum(tmp) / len(tmp) self.midpoint = sum(tmp) / len(tmp)
return self.midpoint return self.midpoint
def get_lower(self, t): def get_lower(self, t):
if self.lower is None: if self.lower is None:
tmp = [] self.lower = min([r.get_lower(t) for r in self.RHS])
for r in self.RHS:
tmp.append(r.get_midpoint(t))
self.lower = min(tmp)
return self.lower return self.lower
def get_upper(self, t): def get_upper(self, t):
if self.upper is None: if self.upper is None:
tmp = [] self.upper = min([r.get_upper(t) for r in self.RHS])
for r in self.RHS:
tmp.append(r.get_midpoint(t))
self.upper = max(tmp)
return self.upper return self.upper

View File

@ -1,55 +1,64 @@
import numpy as np import numpy as np
from pyFTS.common import FuzzySet, FLR from pyFTS.common import FuzzySet, FLR
from pyFTS import fts, hofts from pyFTS import fts, hofts
from pyFTS.nonstationary import common from pyFTS.nonstationary import common, flrg
class HighOrderNonStationaryFLRG(hofts.HighOrderFLRG): class HighOrderNonStationaryFLRG(flrg.NonStationaryFLRG):
"""First Order NonStationary Fuzzy Logical Relationship Group""" """First Order NonStationary Fuzzy Logical Relationship Group"""
def __init__(self, order): def __init__(self, order, **kwargs):
super(HighOrderNonStationaryFLRG, self).__init__(order) super(HighOrderNonStationaryFLRG, self).__init__(order, **kwargs)
def get_midpoint(self, t): self.LHS = []
if self.midpoint is None: self.RHS = {}
tmp = [] self.strlhs = ""
for r in self.RHS:
tmp.append(r.get_midpoint(t))
self.midpoint = sum(tmp)/len(tmp)
return self.midpoint
def get_lower(self, t): def appendRHS(self, c):
if self.lower is None: if c.name not in self.RHS:
tmp = [] self.RHS[c.name] = c
for r in self.RHS:
tmp.append(r.get_midpoint(t))
self.lower = min(tmp)
return self.lower
def get_upper(self, t): def strLHS(self):
if self.upper is None: if len(self.strlhs) == 0:
tmp = [] for c in self.LHS:
for r in self.RHS: if len(self.strlhs) > 0:
tmp.append(r.get_midpoint(t)) self.strlhs += ", "
self.upper = max(tmp) self.strlhs = self.strlhs + c.name
return self.upper return self.strlhs
def appendLHS(self, c):
self.LHS.append(c)
def __str__(self):
tmp = ""
for c in sorted(self.RHS):
if len(tmp) > 0:
tmp = tmp + ","
tmp = tmp + c
return self.strLHS() + " -> " + tmp
class NonStationaryFTS(fts.FTS): class HighOrderNonStationaryFTS(hofts.HighOrderFLRG):
"""NonStationaryFTS Fuzzy Time Series""" """NonStationaryFTS Fuzzy Time Series"""
def __init__(self, name, **kwargs): def __init__(self, name, **kwargs):
super(NonStationaryFTS, self).__init__(1, "NSFTS " + name, **kwargs) super(HighOrderNonStationaryFTS, self).__init__(1, "HONSFTS " + name, **kwargs)
self.name = "Non Stationary FTS" self.name = "High Order Non Stationary FTS"
self.detail = "" self.detail = ""
self.flrgs = {} self.flrgs = {}
def generateFLRG(self, flrs): def generateFLRG(self, flrs):
flrgs = {} flrgs = {}
for flr in flrs: l = len(flrs)
if flr.LHS.name in flrgs: for k in np.arange(self.order + 1, l):
flrgs[flr.LHS.name].append(flr.RHS) flrg = HighOrderNonStationaryFLRG(self.order)
for kk in np.arange(k - self.order, k):
flrg.appendLHS(flrs[kk].LHS)
if flrg.strLHS() in flrgs:
flrgs[flrg.strLHS()].appendRHS(flrs[k].RHS)
else: else:
flrgs[flr.LHS.name] = NonStationaryFLRG(flr.LHS) flrgs[flrg.strLHS()] = flrg;
flrgs[flr.LHS.name].append(flr.RHS) flrgs[flrg.strLHS()].appendRHS(flrs[k].RHS)
return (flrgs) return (flrgs)
def train(self, data, sets=None,order=1,parameters=None): def train(self, data, sets=None,order=1,parameters=None):

View File

@ -15,25 +15,28 @@ class ProbabilisticWeightedFLRG(hofts.HighOrderFLRG):
def __init__(self, order): def __init__(self, order):
super(ProbabilisticWeightedFLRG, self).__init__(order) super(ProbabilisticWeightedFLRG, self).__init__(order)
self.RHS = {} self.RHS = {}
self.rhs_count = {}
self.frequency_count = 0.0 self.frequency_count = 0.0
self.Z = None self.Z = None
def appendRHS(self, c): def appendRHS(self, c):
self.frequency_count += 1.0 self.frequency_count += 1.0
if c.name in self.RHS: if c.name in self.RHS:
self.RHS[c.name] += 1.0 self.rhs_count[c.name] += 1.0
else: else:
self.RHS[c.name] = 1.0 self.RHS[c.name] = c
self.rhs_count[c.name] = 1.0
def appendRHSFuzzy(self, c, mv): def appendRHSFuzzy(self, c, mv):
self.frequency_count += mv self.frequency_count += mv
if c.name in self.RHS: if c.name in self.RHS:
self.RHS[c.name] += mv self.rhs_count[c.name] += mv
else: else:
self.RHS[c.name] = mv self.RHS[c.name] = c
self.rhs_count[c.name] = mv
def get_RHSprobability(self, c): def get_RHSprobability(self, c):
return self.RHS[c] / self.frequency_count return self.rhs_count[c] / self.frequency_count
def lhs_probability(self, x, norm, uod, nbins): def lhs_probability(self, x, norm, uod, nbins):
pk = self.frequency_count / norm pk = self.frequency_count / norm
@ -44,8 +47,8 @@ class ProbabilisticWeightedFLRG(hofts.HighOrderFLRG):
def rhs_conditional_probability(self, x, sets, uod, nbins): def rhs_conditional_probability(self, x, sets, uod, nbins):
total = 0.0 total = 0.0
for rhs in self.RHS: for rhs in self.RHS.keys():
set = sets[rhs] set = self.RHS[rhs]
wi = self.get_RHSprobability(rhs) wi = self.get_RHSprobability(rhs)
mv = set.membership(x) / set.partition_function(uod, nbins=nbins) mv = set.membership(x) / set.partition_function(uod, nbins=nbins)
total += wi * mv total += wi * mv
@ -58,7 +61,7 @@ class ProbabilisticWeightedFLRG(hofts.HighOrderFLRG):
mv.append(set.membership(x[count])) mv.append(set.membership(x[count]))
min_mv = np.prod(mv) min_mv = np.prod(mv)
return min_mv return min_mv
def partition_function(self, uod, nbins=100): def partition_function(self, uod, nbins=100):
if self.Z is None: if self.Z is None:
@ -69,12 +72,24 @@ class ProbabilisticWeightedFLRG(hofts.HighOrderFLRG):
return self.Z return self.Z
def get_midpoint(self):
return sum(np.array([self.get_RHSprobability(s.name) * self.RHS[s].centroid
for s in self.RHS.keys()]))
def get_upper(self):
return sum(np.array([self.get_RHSprobability(s.name) * self.RHS[s].upper
for s in self.RHS.keys()]))
def get_lower(self):
return sum(np.array([self.get_RHSprobability(s.name) * self.RHS[s].lower
for s in self.RHS.keys()]))
def __str__(self): def __str__(self):
tmp2 = "" tmp2 = ""
for c in sorted(self.RHS): for c in sorted(self.RHS.keys()):
if len(tmp2) > 0: if len(tmp2) > 0:
tmp2 = tmp2 + ", " tmp2 = tmp2 + ", "
tmp2 = tmp2 + "(" + str(round(self.RHS[c] / self.frequency_count, 3)) + ")" + c tmp2 = tmp2 + "(" + str(round(self.rhs_count[c] / self.frequency_count, 3)) + ")" + c
return self.strLHS() + " -> " + tmp2 return self.strLHS() + " -> " + tmp2
@ -136,7 +151,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
root = tree.FLRGTreeNode(None) root = tree.FLRGTreeNode(None)
self.buildTreeWithoutOrder(root, lags, 0) self.build_tree_without_order(root, lags, 0)
# Trace the possible paths # Trace the possible paths
for p in root.paths(): for p in root.paths():
@ -214,10 +229,10 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
self.add_new_PWFLGR(flrg) self.add_new_PWFLGR(flrg)
return self.get_flrg_global_probability(flrg) return self.get_flrg_global_probability(flrg)
def getMidpoints(self, flrg): def get_midpoint(self, flrg):
if flrg.strLHS() in self.flrgs: if flrg.strLHS() in self.flrgs:
tmp = self.flrgs[flrg.strLHS()] tmp = self.flrgs[flrg.strLHS()]
ret = sum(np.array([tmp.get_RHSprobability(s) * self.setsDict[s].centroid for s in tmp.RHS])) ret = tmp.get_midpoint() #sum(np.array([tmp.get_RHSprobability(s) * self.setsDict[s].centroid for s in tmp.RHS]))
else: else:
pi = 1 / len(flrg.LHS) pi = 1 / len(flrg.LHS)
ret = sum(np.array([pi * s.centroid for s in flrg.LHS])) ret = sum(np.array([pi * s.centroid for s in flrg.LHS]))
@ -241,25 +256,25 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
ret = sum(np.array([pi * self.setsDict[s].membership(x) for s in flrg.LHS])) ret = sum(np.array([pi * self.setsDict[s].membership(x) for s in flrg.LHS]))
return ret return ret
def getUpper(self, flrg): def get_upper(self, flrg):
if flrg.strLHS() in self.flrgs: if flrg.strLHS() in self.flrgs:
tmp = self.flrgs[flrg.strLHS()] tmp = self.flrgs[flrg.strLHS()]
ret = sum(np.array([tmp.get_RHSprobability(s) * self.setsDict[s].upper for s in tmp.RHS])) ret = tmp.get_upper()
else: else:
pi = 1 / len(flrg.LHS) pi = 1 / len(flrg.LHS)
ret = sum(np.array([pi * s.upper for s in flrg.LHS])) ret = sum(np.array([pi * s.upper for s in flrg.LHS]))
return ret return ret
def getLower(self, flrg): def get_lower(self, flrg):
if flrg.strLHS() in self.flrgs: if flrg.strLHS() in self.flrgs:
tmp = self.flrgs[flrg.strLHS()] tmp = self.flrgs[flrg.strLHS()]
ret = sum(np.array([tmp.get_RHSprobability(s) * self.setsDict[s].lower for s in tmp.RHS])) ret = tmp.get_lower()
else: else:
pi = 1 / len(flrg.LHS) pi = 1 / len(flrg.LHS)
ret = sum(np.array([pi * s.lower for s in flrg.LHS])) ret = sum(np.array([pi * s.lower for s in flrg.LHS]))
return ret return ret
def buildTreeWithoutOrder(self, node, lags, level): def build_tree_without_order(self, node, lags, level):
if level not in lags: if level not in lags:
return return
@ -268,7 +283,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
node.appendChild(tree.FLRGTreeNode(s)) node.appendChild(tree.FLRGTreeNode(s))
for child in node.getChildren(): for child in node.getChildren():
self.buildTreeWithoutOrder(child, lags, level + 1) self.build_tree_without_order(child, lags, level + 1)
def forecast(self, data, **kwargs): def forecast(self, data, **kwargs):
@ -316,7 +331,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
root = tree.FLRGTreeNode(None) root = tree.FLRGTreeNode(None)
self.buildTree(root, lags, 0) self.build_tree(root, lags, 0)
# Trace the possible paths and build the PFLRG's # Trace the possible paths and build the PFLRG's
@ -331,7 +346,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
affected_flrgs.append(flrg) affected_flrgs.append(flrg)
# Find the general membership of FLRG # Find the general membership of FLRG
affected_flrgs_memberships.append(min(self.getSequenceMembership(subset, flrg.LHS))) affected_flrgs_memberships.append(min(self.get_sequence_membership(subset, flrg.LHS)))
else: else:
@ -358,7 +373,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
norm = self.get_flrg_global_probability(flrg) * affected_flrgs_memberships[count] norm = self.get_flrg_global_probability(flrg) * affected_flrgs_memberships[count]
if norm == 0: if norm == 0:
norm = self.get_flrg_global_probability(flrg) # * 0.001 norm = self.get_flrg_global_probability(flrg) # * 0.001
mp.append(norm * self.getMidpoints(flrg)) mp.append(norm * self.get_midpoint(flrg))
norms.append(norm) norms.append(norm)
# gerar o intervalo # gerar o intervalo
@ -438,7 +453,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
root = tree.FLRGTreeNode(None) root = tree.FLRGTreeNode(None)
self.buildTree(root, lags, 0) self.build_tree(root, lags, 0)
# Trace the possible paths and build the PFLRG's # Trace the possible paths and build the PFLRG's
@ -453,7 +468,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
affected_flrgs.append(flrg) affected_flrgs.append(flrg)
# Find the general membership of FLRG # Find the general membership of FLRG
affected_flrgs_memberships.append(min(self.getSequenceMembership(subset, flrg.LHS))) affected_flrgs_memberships.append(min(self.get_sequence_membership(subset, flrg.LHS)))
else: else:
@ -480,8 +495,8 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
norm = self.get_flrg_global_probability(flrg) * affected_flrgs_memberships[count] norm = self.get_flrg_global_probability(flrg) * affected_flrgs_memberships[count]
if norm == 0: if norm == 0:
norm = self.get_flrg_global_probability(flrg) # * 0.001 norm = self.get_flrg_global_probability(flrg) # * 0.001
up.append(norm * self.getUpper(flrg)) up.append(norm * self.get_upper(flrg))
lo.append(norm * self.getLower(flrg)) lo.append(norm * self.get_lower(flrg))
norms.append(norm) norms.append(norm)
# gerar o intervalo # gerar o intervalo
@ -613,7 +628,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
root = tree.FLRGTreeNode(None) root = tree.FLRGTreeNode(None)
self.buildTreeWithoutOrder(root, lags, 0) self.build_tree_without_order(root, lags, 0)
# Trace the possible paths # Trace the possible paths
for p in root.paths(): for p in root.paths():
@ -661,7 +676,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
root = tree.FLRGTreeNode(None) root = tree.FLRGTreeNode(None)
self.buildTreeWithoutOrder(root, lags, 0) self.build_tree_without_order(root, lags, 0)
# Trace the possible paths # Trace the possible paths
for p in root.paths(): for p in root.paths():
@ -683,8 +698,6 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
return ret return ret
def __str__(self): def __str__(self):
tmp = self.name + ":\n" tmp = self.name + ":\n"
for r in sorted(self.flrgs): for r in sorted(self.flrgs):

View File

@ -92,7 +92,7 @@ class ExponentialyWeightedFTS(fts.FTS):
ret.append(actual.centroid) ret.append(actual.centroid)
else: else:
flrg = self.flrgs[actual.name] flrg = self.flrgs[actual.name]
mp = self.getMidpoints(flrg) mp = flrg.get_midpoints()
ret.append(mp.dot(flrg.weights())) ret.append(mp.dot(flrg.weights()))

View File

@ -84,7 +84,7 @@ class WeightedFTS(fts.FTS):
ret.append(actual.centroid) ret.append(actual.centroid)
else: else:
flrg = self.flrgs[actual.name] flrg = self.flrgs[actual.name]
mp = self.getMidpoints(flrg) mp = flrg.get_midpoints()
ret.append(mp.dot(flrg.weights())) ret.append(mp.dot(flrg.weights()))