Introducing alpha_cut on fuzzyfication processes
This commit is contained in:
parent
393388f722
commit
aeb8236a7f
@ -38,7 +38,7 @@ class IndexedFLR(FLR):
|
|||||||
self.index = index
|
self.index = index
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.index) + ": "+ self.LHS + " -> " + self.RHS
|
return str(self.index) + ": "+ str(self.LHS) + " -> " + str(self.RHS)
|
||||||
|
|
||||||
|
|
||||||
def generate_high_order_recurrent_flr(fuzzyData):
|
def generate_high_order_recurrent_flr(fuzzyData):
|
||||||
@ -70,24 +70,12 @@ def generate_recurrent_flrs(fuzzyData):
|
|||||||
"""
|
"""
|
||||||
flrs = []
|
flrs = []
|
||||||
for i in np.arange(1,len(fuzzyData)):
|
for i in np.arange(1,len(fuzzyData)):
|
||||||
lhs = fuzzyData[i - 1]
|
lhs = [fuzzyData[i - 1]]
|
||||||
rhs = fuzzyData[i]
|
rhs = [fuzzyData[i]]
|
||||||
if isinstance(lhs, list) and isinstance(rhs, list):
|
for l in np.array(lhs).flatten():
|
||||||
for l in lhs:
|
for r in np.array(rhs).flatten():
|
||||||
for r in rhs:
|
|
||||||
tmp = FLR(l, r)
|
tmp = FLR(l, r)
|
||||||
flrs.append(tmp)
|
flrs.append(tmp)
|
||||||
elif isinstance(lhs, list) and not isinstance(rhs, list):
|
|
||||||
for l in lhs:
|
|
||||||
tmp = FLR(l, rhs)
|
|
||||||
flrs.append(tmp)
|
|
||||||
elif not isinstance(lhs, list) and isinstance(rhs, list):
|
|
||||||
for r in rhs:
|
|
||||||
tmp = FLR(lhs, r)
|
|
||||||
flrs.append(tmp)
|
|
||||||
else:
|
|
||||||
tmp = FLR(lhs,rhs)
|
|
||||||
flrs.append(tmp)
|
|
||||||
return flrs
|
return flrs
|
||||||
|
|
||||||
|
|
||||||
@ -104,7 +92,7 @@ def generate_non_recurrent_flrs(fuzzyData):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def generate_indexed_flrs(sets, indexer, data, transformation=None):
|
def generate_indexed_flrs(sets, indexer, data, transformation=None, alpha_cut=0.0):
|
||||||
"""
|
"""
|
||||||
Create a season-indexed ordered FLR set from a list of fuzzy sets with recurrence
|
Create a season-indexed ordered FLR set from a list of fuzzy sets with recurrence
|
||||||
:param sets: fuzzy sets
|
:param sets: fuzzy sets
|
||||||
@ -118,9 +106,11 @@ def generate_indexed_flrs(sets, indexer, data, transformation=None):
|
|||||||
if transformation is not None:
|
if transformation is not None:
|
||||||
ndata = transformation.apply(ndata)
|
ndata = transformation.apply(ndata)
|
||||||
for k in np.arange(1,len(ndata)):
|
for k in np.arange(1,len(ndata)):
|
||||||
lhs = FuzzySet.get_maximum_membership_fuzzyset(ndata[k - 1], sets)
|
lhs = FuzzySet.fuzzyfy_series([ndata[k - 1]], sets, method='fuzzy',alpha_cut=alpha_cut)
|
||||||
rhs = FuzzySet.get_maximum_membership_fuzzyset(ndata[k], sets)
|
rhs = FuzzySet.fuzzyfy_series([ndata[k]], sets, method='fuzzy',alpha_cut=alpha_cut)
|
||||||
season = index[k]
|
season = index[k]
|
||||||
flr = IndexedFLR(season,lhs,rhs)
|
for _l in np.array(lhs).flatten():
|
||||||
|
for _r in np.array(rhs).flatten():
|
||||||
|
flr = IndexedFLR(season,_l,_r)
|
||||||
flrs.append(flr)
|
flrs.append(flr)
|
||||||
return flrs
|
return flrs
|
||||||
|
@ -97,6 +97,21 @@ def fuzzyfy_instances(data, fuzzySets, ordered_sets=None):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def get_fuzzysets(inst, fuzzySets, ordered_sets=None, alpha_cut=0.0):
|
||||||
|
"""
|
||||||
|
Return the fuzzy sets which membership value for a inst is greater than the alpha_cut
|
||||||
|
:param inst: data point
|
||||||
|
:param fuzzySets: dict of fuzzy sets
|
||||||
|
:param alpha_cut: Minimal membership to be considered on fuzzyfication process
|
||||||
|
:return: array of membership values
|
||||||
|
"""
|
||||||
|
|
||||||
|
if ordered_sets is None:
|
||||||
|
ordered_sets = set_ordered(fuzzySets)
|
||||||
|
|
||||||
|
fs = [key for key in ordered_sets if fuzzySets[key].membership(inst) > alpha_cut]
|
||||||
|
return fs
|
||||||
|
|
||||||
def get_maximum_membership_fuzzyset(inst, fuzzySets, ordered_sets=None):
|
def get_maximum_membership_fuzzyset(inst, fuzzySets, ordered_sets=None):
|
||||||
"""
|
"""
|
||||||
Fuzzify a data point, returning the fuzzy set with maximum membership value
|
Fuzzify a data point, returning the fuzzy set with maximum membership value
|
||||||
@ -129,7 +144,7 @@ def fuzzyfy_series_old(data, fuzzySets, method='maximum'):
|
|||||||
return fts
|
return fts
|
||||||
|
|
||||||
|
|
||||||
def fuzzyfy_series(data, fuzzySets, method='maximum'):
|
def fuzzyfy_series(data, fuzzySets, method='maximum', alpha_cut=0.0):
|
||||||
fts = []
|
fts = []
|
||||||
ordered_sets = set_ordered(fuzzySets)
|
ordered_sets = set_ordered(fuzzySets)
|
||||||
for t, i in enumerate(data):
|
for t, i in enumerate(data):
|
||||||
@ -138,7 +153,7 @@ def fuzzyfy_series(data, fuzzySets, method='maximum'):
|
|||||||
sets = check_bounds(i, fuzzySets.items(), ordered_sets)
|
sets = check_bounds(i, fuzzySets.items(), ordered_sets)
|
||||||
else:
|
else:
|
||||||
if method == 'fuzzy':
|
if method == 'fuzzy':
|
||||||
ix = np.ravel(np.argwhere(mv > 0.0))
|
ix = np.ravel(np.argwhere(mv > alpha_cut))
|
||||||
sets = [fuzzySets[ordered_sets[i]].name for i in ix]
|
sets = [fuzzySets[ordered_sets[i]].name for i in ix]
|
||||||
elif method == 'maximum':
|
elif method == 'maximum':
|
||||||
mx = max(mv)
|
mx = max(mv)
|
||||||
|
@ -10,10 +10,31 @@ class FTS(object):
|
|||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
"""
|
"""
|
||||||
Create a Fuzzy Time Series model
|
Create a Fuzzy Time Series model
|
||||||
:param order: model order
|
|
||||||
:param name: model name
|
|
||||||
:param kwargs: model specific parameters
|
:param kwargs: model specific parameters
|
||||||
|
|
||||||
|
alpha_cut: Minimal membership to be considered on fuzzyfication process
|
||||||
|
auto_update: Boolean, indicate that model is incremental
|
||||||
|
benchmark_only: Boolean, indicates a façade for external (non-FTS) model used on benchmarks or ensembles.
|
||||||
|
indexer: SeasonalIndexer used for SeasonalModels, default: None
|
||||||
|
is_high_order: Boolean, if the model support orders greater than 1, default: False
|
||||||
|
is_multivariate = False
|
||||||
|
has_seasonality: Boolean, if the model support seasonal indexers, default: False
|
||||||
|
has_point_forecasting: Boolean, if the model support point forecasting, default: True
|
||||||
|
has_interval_forecasting: Boolean, if the model support interval forecasting, default: False
|
||||||
|
has_probability_forecasting: Boolean, if the model support probabilistic forecasting, default: False
|
||||||
|
min_order: Integer, minimal order supported for the model, default: 1
|
||||||
|
name: Model name
|
||||||
|
order: model order (number of past lags are used on forecasting)
|
||||||
|
original_max: Real, the upper limit of the Universe of Discourse, the maximal value found on training data
|
||||||
|
original_min: Real, the lower limit of the Universe of Discourse, the minimal value found on training data
|
||||||
|
partitioner: partitioner object
|
||||||
|
sets: List, fuzzy sets used on this model
|
||||||
|
shortname: Acronymn for the model
|
||||||
|
transformations: List, data transformations (common.Transformations) applied on model pre and post processing, default: []
|
||||||
|
transformations_param:List, specific parameters for each data transformation
|
||||||
|
uod_clip: If the test data will be clipped inside the training Universe of Discourse
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.sets = {}
|
self.sets = {}
|
||||||
self.flrgs = {}
|
self.flrgs = {}
|
||||||
self.order = kwargs.get('order',"")
|
self.order = kwargs.get('order',"")
|
||||||
@ -39,6 +60,7 @@ class FTS(object):
|
|||||||
self.benchmark_only = False
|
self.benchmark_only = False
|
||||||
self.indexer = kwargs.get("indexer", None)
|
self.indexer = kwargs.get("indexer", None)
|
||||||
self.uod_clip = kwargs.get("uod_clip", True)
|
self.uod_clip = kwargs.get("uod_clip", True)
|
||||||
|
self.alpha_cut = kwargs.get("alpha_cut", 0.0)
|
||||||
|
|
||||||
def fuzzy(self, data):
|
def fuzzy(self, data):
|
||||||
"""
|
"""
|
||||||
|
@ -52,7 +52,8 @@ class HighOrderFTS(fts.FTS):
|
|||||||
flrgs = []
|
flrgs = []
|
||||||
|
|
||||||
for o in np.arange(0, self.order):
|
for o in np.arange(0, self.order):
|
||||||
lhs = [key for key in self.partitioner.ordered_sets if self.sets[key].membership(sample[o]) > 0.0]
|
lhs = [key for key in self.partitioner.ordered_sets
|
||||||
|
if self.sets[key].membership(sample[o]) > self.alpha_cut]
|
||||||
lags[o] = lhs
|
lags[o] = lhs
|
||||||
|
|
||||||
root = tree.FLRGTreeNode(None)
|
root = tree.FLRGTreeNode(None)
|
||||||
@ -78,7 +79,8 @@ class HighOrderFTS(fts.FTS):
|
|||||||
|
|
||||||
sample = data[k - self.order: k]
|
sample = data[k - self.order: k]
|
||||||
|
|
||||||
rhs = [key for key in self.partitioner.ordered_sets if self.sets[key].membership(data[k]) > 0.0]
|
rhs = [key for key in self.partitioner.ordered_sets
|
||||||
|
if self.sets[key].membership(data[k]) > self.alpha_cut]
|
||||||
|
|
||||||
flrgs = self.generate_lhs_flrg(sample)
|
flrgs = self.generate_lhs_flrg(sample)
|
||||||
|
|
||||||
|
@ -2,9 +2,9 @@ import numpy as np
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
|
||||||
|
|
||||||
def fuzzyfy_instance(data_point, var):
|
def fuzzyfy_instance(data_point, var, alpha_cut=0.0):
|
||||||
mv = np.array([var.partitioner.sets[key].membership(data_point) for key in var.partitioner.ordered_sets])
|
mv = np.array([var.partitioner.sets[key].membership(data_point) for key in var.partitioner.ordered_sets])
|
||||||
ix = np.ravel(np.argwhere(mv > 0.0))
|
ix = np.ravel(np.argwhere(mv > alpha_cut))
|
||||||
sets = [(var.name, var.partitioner.ordered_sets[i]) for i in ix]
|
sets = [(var.name, var.partitioner.ordered_sets[i]) for i in ix]
|
||||||
return sets
|
return sets
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ class MVFTS(fts.FTS):
|
|||||||
lags = {}
|
lags = {}
|
||||||
for vc, var in enumerate(self.explanatory_variables):
|
for vc, var in enumerate(self.explanatory_variables):
|
||||||
data_point = data[var.data_label]
|
data_point = data[var.data_label]
|
||||||
lags[vc] = common.fuzzyfy_instance(data_point, var)
|
lags[vc] = common.fuzzyfy_instance(data_point, var, self.alpha_cut)
|
||||||
|
|
||||||
root = tree.FLRGTreeNode(None)
|
root = tree.FLRGTreeNode(None)
|
||||||
|
|
||||||
|
@ -125,7 +125,8 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
flrgs = []
|
flrgs = []
|
||||||
|
|
||||||
for o in np.arange(0, self.order):
|
for o in np.arange(0, self.order):
|
||||||
lhs = [key for key in self.partitioner.ordered_sets if self.sets[key].membership(sample[o]) > 0.0]
|
lhs = [key for key in self.partitioner.ordered_sets
|
||||||
|
if self.sets[key].membership(sample[o]) > self.alpha_cut]
|
||||||
lags[o] = lhs
|
lags[o] = lhs
|
||||||
|
|
||||||
root = tree.FLRGTreeNode(None)
|
root = tree.FLRGTreeNode(None)
|
||||||
@ -161,7 +162,8 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
|||||||
self.flrgs[flrg.get_key()] = flrg;
|
self.flrgs[flrg.get_key()] = flrg;
|
||||||
|
|
||||||
fuzzyfied = [(s, self.sets[s].membership(data[k]))
|
fuzzyfied = [(s, self.sets[s].membership(data[k]))
|
||||||
for s in self.sets.keys() if self.sets[s].membership(data[k]) > 0]
|
for s in self.sets.keys()
|
||||||
|
if self.sets[s].membership(data[k]) > self.alpha_cut]
|
||||||
|
|
||||||
mvs = []
|
mvs = []
|
||||||
for set, mv in fuzzyfied:
|
for set, mv in fuzzyfied:
|
||||||
|
@ -13,11 +13,12 @@ class ContextualSeasonalFLRG(sfts.SeasonalFLRG):
|
|||||||
self.RHS = {}
|
self.RHS = {}
|
||||||
|
|
||||||
def append_rhs(self, flr, **kwargs):
|
def append_rhs(self, flr, **kwargs):
|
||||||
if flr.LHS.name in self.RHS:
|
print(flr)
|
||||||
self.RHS[flr.LHS.name].append_rhs(flr.RHS.name)
|
if flr.LHS in self.RHS:
|
||||||
|
self.RHS[flr.LHS].append_rhs(flr.RHS)
|
||||||
else:
|
else:
|
||||||
self.RHS[flr.LHS.name] = chen.ConventionalFLRG(flr.LHS.name)
|
self.RHS[flr.LHS] = chen.ConventionalFLRG(flr.LHS)
|
||||||
self.RHS[flr.LHS.name].append_rhs(flr.RHS.name)
|
self.RHS[flr.LHS].append_rhs(flr.RHS)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
tmp = str(self.LHS) + ": \n "
|
tmp = str(self.LHS) + ": \n "
|
||||||
@ -57,15 +58,20 @@ class ContextualMultiSeasonalFTS(sfts.SeasonalFTS):
|
|||||||
self.sets = kwargs.get('sets', None)
|
self.sets = kwargs.get('sets', None)
|
||||||
if kwargs.get('parameters', None) is not None:
|
if kwargs.get('parameters', None) is not None:
|
||||||
self.seasonality = kwargs.get('parameters', None)
|
self.seasonality = kwargs.get('parameters', None)
|
||||||
flrs = FLR.generate_indexed_flrs(self.sets, self.indexer, data)
|
flrs = FLR.generate_indexed_flrs(self.sets, self.indexer, data,
|
||||||
|
transformation=self.partitioner.transformation,
|
||||||
|
alpha_cut=self.alpha_cut)
|
||||||
self.generate_flrg(flrs)
|
self.generate_flrg(flrs)
|
||||||
|
|
||||||
def get_midpoints(self, flrg, data):
|
def get_midpoints(self, flrg, data):
|
||||||
if data.name in flrg.RHS:
|
ret = []
|
||||||
ret = np.array([self.sets[s].centroid for s in flrg.RHS[data.name].RHS])
|
for d in data:
|
||||||
return ret
|
if d in flrg.RHS:
|
||||||
|
ret.extend([self.sets[s].centroid for s in flrg.RHS[d].RHS])
|
||||||
else:
|
else:
|
||||||
return np.array([self.sets[data.name].centroid])
|
ret.extend([self.sets[d].centroid])
|
||||||
|
|
||||||
|
return np.array(ret)
|
||||||
|
|
||||||
def forecast(self, data, **kwargs):
|
def forecast(self, data, **kwargs):
|
||||||
ordered_sets = FuzzySet.set_ordered(self.sets)
|
ordered_sets = FuzzySet.set_ordered(self.sets)
|
||||||
@ -79,7 +85,7 @@ class ContextualMultiSeasonalFTS(sfts.SeasonalFTS):
|
|||||||
|
|
||||||
flrg = self.flrgs[str(index[k])]
|
flrg = self.flrgs[str(index[k])]
|
||||||
|
|
||||||
d = FuzzySet.get_maximum_membership_fuzzyset(ndata[k], self.sets, ordered_sets)
|
d = FuzzySet.get_fuzzysets(ndata[k], self.sets, ordered_sets, alpha_cut=self.alpha_cut)
|
||||||
|
|
||||||
mp = self.get_midpoints(flrg, d)
|
mp = self.get_midpoints(flrg, d)
|
||||||
|
|
||||||
|
@ -45,6 +45,9 @@ class Partitioner(object):
|
|||||||
else:
|
else:
|
||||||
ndata = data
|
ndata = data
|
||||||
|
|
||||||
|
if self.indexer is not None:
|
||||||
|
ndata = self.indexer.get_data(ndata)
|
||||||
|
|
||||||
_min = min(ndata)
|
_min = min(ndata)
|
||||||
if _min < 0:
|
if _min < 0:
|
||||||
self.min = _min * 1.1
|
self.min = _min * 1.1
|
||||||
|
@ -12,23 +12,25 @@ os.chdir("/home/petronio/Downloads")
|
|||||||
|
|
||||||
data = pd.read_csv("dress_data.csv", sep=",")
|
data = pd.read_csv("dress_data.csv", sep=",")
|
||||||
|
|
||||||
#sonda['data'] = pd.to_datetime(sonda['data'])
|
data["date"] = pd.to_datetime(data["date"], format='%Y%m%d')
|
||||||
|
|
||||||
#data.index = np.arange(0,len(data.index))
|
#data.index = np.arange(0,len(data.index))
|
||||||
|
|
||||||
data = data["a"].tolist()
|
#data = data["a"].tolist()
|
||||||
|
|
||||||
|
from pyFTS.models.seasonal import sfts, cmsfts, SeasonalIndexer, common
|
||||||
|
|
||||||
from pyFTS.models.seasonal import sfts, cmsfts, SeasonalIndexer
|
# ix = SeasonalIndexer.LinearSeasonalIndexer([7],[1])
|
||||||
|
|
||||||
ix = SeasonalIndexer.LinearSeasonalIndexer([7],[1])
|
ix = SeasonalIndexer.DateTimeSeasonalIndexer("date", [common.DateTime.day_of_week],
|
||||||
|
[None, None], 'a', name="weekday")
|
||||||
|
|
||||||
from pyFTS.partitioners import Grid
|
from pyFTS.partitioners import Grid
|
||||||
|
|
||||||
fs = Grid.GridPartitioner(data=data,npart=10)
|
fs = Grid.GridPartitioner(data=data,npart=10,indexer=ix)
|
||||||
|
|
||||||
model = sfts.SeasonalFTS(indexer=ix, partitioner=fs)
|
#model = sfts.SeasonalFTS(indexer=ix, partitioner=fs)
|
||||||
#model = cmsfts.ContextualMultiSeasonalFTS(indexer=ix, partitioner=fs)
|
model = cmsfts.ContextualMultiSeasonalFTS(indexer=ix, partitioner=fs)
|
||||||
|
|
||||||
model.fit(data)
|
model.fit(data)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user