- Deep refactor on function names for
This commit is contained in:
parent
09e5415929
commit
6e4df0ce33
@ -303,7 +303,7 @@ def get_point_statistics(data, model, indexer=None):
|
||||
def get_interval_statistics(original, model):
|
||||
"""Condensate all measures for point_to_interval forecasters"""
|
||||
ret = list()
|
||||
forecasts = model.forecastInterval(original)
|
||||
forecasts = model.forecast_interval(original)
|
||||
ret.append(round(sharpness(forecasts), 2))
|
||||
ret.append(round(resolution(forecasts), 2))
|
||||
ret.append(round(coverage(original[model.order:], forecasts[:-1]), 2))
|
||||
@ -318,7 +318,7 @@ def get_distribution_statistics(original, model, steps, resolution):
|
||||
ret = list()
|
||||
try:
|
||||
_s1 = time.time()
|
||||
densities1 = model.forecastAheadDistribution(original, steps, parameters=3)
|
||||
densities1 = model.forecast_ahead_distribution(original, steps, parameters=3)
|
||||
_e1 = time.time()
|
||||
ret.append(round(crps(original, densities1), 3))
|
||||
ret.append(round(_e1 - _s1, 3))
|
||||
@ -329,7 +329,7 @@ def get_distribution_statistics(original, model, steps, resolution):
|
||||
|
||||
try:
|
||||
_s2 = time.time()
|
||||
densities2 = model.forecastAheadDistribution(original, steps, parameters=2)
|
||||
densities2 = model.forecast_ahead_distribution(original, steps, parameters=2)
|
||||
_e2 = time.time()
|
||||
ret.append( round(crps(original, densities2), 3))
|
||||
ret.append(round(_e2 - _s2, 3))
|
||||
|
@ -94,7 +94,7 @@ def plotResiduals(targets, models, tam=[8, 8], save=False, file=None):
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
|
||||
def plot_residuals(targets, models, tam=[8, 8], save=False, file=None):
|
||||
@ -127,7 +127,7 @@ def plot_residuals(targets, models, tam=[8, 8], save=False, file=None):
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
|
||||
def single_plot_residuals(targets, forecasts, order, tam=[8, 8], save=False, file=None):
|
||||
@ -153,4 +153,4 @@ def single_plot_residuals(targets, forecasts, order, tam=[8, 8], save=False, fil
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
@ -317,7 +317,7 @@ def unified_scaled_point(experiments, tam, save=False, file=None,
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
|
||||
def plot_dataframe_point(file_synthetic, file_analytic, experiments, tam, save=False, file=None,
|
||||
@ -372,7 +372,7 @@ def plot_dataframe_point(file_synthetic, file_analytic, experiments, tam, save=F
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
|
||||
def check_replace_list(m, replace):
|
||||
@ -640,7 +640,7 @@ def unified_scaled_interval(experiments, tam, save=False, file=None,
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
|
||||
def plot_dataframe_interval(file_synthetic, file_analytic, experiments, tam, save=False, file=None,
|
||||
@ -695,7 +695,7 @@ def plot_dataframe_interval(file_synthetic, file_analytic, experiments, tam, sav
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
|
||||
def unified_scaled_interval_pinball(experiments, tam, save=False, file=None,
|
||||
@ -793,7 +793,7 @@ def unified_scaled_interval_pinball(experiments, tam, save=False, file=None,
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
def plot_dataframe_interval_pinball(file_synthetic, file_analytic, experiments, tam, save=False, file=None,
|
||||
sort_columns=['COVAVG','SHARPAVG','COVSTD','SHARPSTD'],
|
||||
@ -843,7 +843,7 @@ def plot_dataframe_interval_pinball(file_synthetic, file_analytic, experiments,
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
|
||||
def save_dataframe_ahead(experiments, file, objs, crps_interval, crps_distr, times1, times2, save, synthetic):
|
||||
@ -1067,7 +1067,7 @@ def unified_scaled_ahead(experiments, tam, save=False, file=None,
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
|
||||
def plot_dataframe_ahead(file_synthetic, file_analytic, experiments, tam, save=False, file=None,
|
||||
@ -1110,5 +1110,5 @@ def plot_dataframe_ahead(file_synthetic, file_analytic, experiments, tam, save=F
|
||||
axes[1].boxplot(crps2, labels=labels, autorange=True, showmeans=True)
|
||||
|
||||
plt.tight_layout()
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
|
@ -43,7 +43,7 @@ class ARIMA(fts.FTS):
|
||||
if self.indexer is not None:
|
||||
data = self.indexer.get_data(data)
|
||||
|
||||
data = self.doTransformations(data, updateUoD=True)
|
||||
data = self.apply_transformations(data, updateUoD=True)
|
||||
|
||||
old_fit = self.model_fit
|
||||
try:
|
||||
@ -66,7 +66,7 @@ class ARIMA(fts.FTS):
|
||||
if self.indexer is not None and isinstance(data, pd.DataFrame):
|
||||
data = self.indexer.get_data(data)
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -86,18 +86,18 @@ class ARIMA(fts.FTS):
|
||||
else:
|
||||
ret = ar
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
||||
def forecastInterval(self, data, **kwargs):
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
|
||||
if self.model_fit is None:
|
||||
return np.nan
|
||||
|
||||
sigma = np.sqrt(self.model_fit.sigma2)
|
||||
|
||||
#ndata = np.array(self.doTransformations(data))
|
||||
#ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(data)
|
||||
|
||||
@ -118,11 +118,11 @@ class ARIMA(fts.FTS):
|
||||
|
||||
ret.append(tmp)
|
||||
|
||||
#ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]], point_to_interval=True)
|
||||
#ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]], point_to_interval=True)
|
||||
|
||||
return ret
|
||||
|
||||
def forecastAheadInterval(self, data, steps, **kwargs):
|
||||
def forecast_ahead_interval(self, data, steps, **kwargs):
|
||||
if self.model_fit is None:
|
||||
return np.nan
|
||||
|
||||
@ -130,11 +130,11 @@ class ARIMA(fts.FTS):
|
||||
|
||||
sigma = np.sqrt(self.model_fit.sigma2)
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
nmeans = self.forecastAhead(ndata, steps, **kwargs)
|
||||
nmeans = self.forecast_ahead(ndata, steps, **kwargs)
|
||||
|
||||
ret = []
|
||||
|
||||
@ -148,14 +148,14 @@ class ARIMA(fts.FTS):
|
||||
|
||||
ret.append(tmp)
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[[data[-1] for a in np.arange(0,steps)]], interval=True)
|
||||
ret = self.apply_inverse_transformations(ret, params=[[data[-1] for a in np.arange(0, steps)]], interval=True)
|
||||
|
||||
return ret
|
||||
|
||||
def empty_grid(self, resolution):
|
||||
return self.get_empty_grid(-(self.original_max*2), self.original_max*2, resolution)
|
||||
|
||||
def forecastDistribution(self, data, **kwargs):
|
||||
def forecast_distribution(self, data, **kwargs):
|
||||
|
||||
if self.indexer is not None and isinstance(data, pd.DataFrame):
|
||||
data = self.indexer.get_data(data)
|
||||
@ -185,14 +185,14 @@ class ARIMA(fts.FTS):
|
||||
|
||||
intervals.append([qt1, qt2])
|
||||
|
||||
dist.appendInterval(intervals)
|
||||
dist.append_interval(intervals)
|
||||
|
||||
ret.append(dist)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def forecastAheadDistribution(self, data, steps, **kwargs):
|
||||
def forecast_ahead_distribution(self, data, steps, **kwargs):
|
||||
smoothing = kwargs.get("smoothing", 0.5)
|
||||
|
||||
sigma = np.sqrt(self.model_fit.sigma2)
|
||||
@ -201,7 +201,7 @@ class ARIMA(fts.FTS):
|
||||
|
||||
ret = []
|
||||
|
||||
nmeans = self.forecastAhead(data, steps, **kwargs)
|
||||
nmeans = self.forecast_ahead(data, steps, **kwargs)
|
||||
|
||||
for k in np.arange(0, steps):
|
||||
dist = ProbabilityDistribution.ProbabilityDistribution(type="histogram",
|
||||
@ -217,7 +217,7 @@ class ARIMA(fts.FTS):
|
||||
|
||||
intervals.append(tmp)
|
||||
|
||||
dist.appendInterval(intervals)
|
||||
dist.append_interval(intervals)
|
||||
|
||||
ret.append(dist)
|
||||
|
||||
|
@ -87,7 +87,7 @@ def run_point(mfts, partitioner, train_data, test_data, window_key=None, transfo
|
||||
_key = mfts.shortname + " n = " + str(mfts.order) + " " + pttr + " q = " + str(partitioner.partitions)
|
||||
mfts.partitioner = partitioner
|
||||
if transformation is not None:
|
||||
mfts.appendTransformation(transformation)
|
||||
mfts.append_transformation(transformation)
|
||||
|
||||
_start = time.time()
|
||||
mfts.train(train_data, partitioner.sets, order=mfts.order)
|
||||
@ -272,7 +272,7 @@ def all_point_forecasters(data_train, data_test, partitions, max_order=3, statis
|
||||
for count, model in enumerate(models, start=0):
|
||||
#print(model)
|
||||
if transformation is not None:
|
||||
model.appendTransformation(transformation)
|
||||
model.append_transformation(transformation)
|
||||
model.train(data_train, data_train_fs.sets, order=model.order)
|
||||
objs.append(model)
|
||||
lcolors.append( colors[count % ncol] )
|
||||
@ -380,7 +380,7 @@ def interval_sliding_window(data, windowsize, train=0.8, models=None, partitione
|
||||
times[_key] = []
|
||||
|
||||
if transformation is not None:
|
||||
mfts.appendTransformation(transformation)
|
||||
mfts.append_transformation(transformation)
|
||||
|
||||
_start = time.time()
|
||||
mfts.train(training, data_train_fs.sets)
|
||||
@ -414,7 +414,7 @@ def interval_sliding_window(data, windowsize, train=0.8, models=None, partitione
|
||||
times[_key] = []
|
||||
|
||||
if transformation is not None:
|
||||
mfts.appendTransformation(transformation)
|
||||
mfts.append_transformation(transformation)
|
||||
|
||||
_start = time.time()
|
||||
mfts.train(training, data_train_fs.sets, order=order)
|
||||
@ -473,7 +473,7 @@ def all_interval_forecasters(data_train, data_test, partitions, max_order=3,save
|
||||
|
||||
for count, model in Util.enumerate2(models, start=0, step=2):
|
||||
if transformation is not None:
|
||||
model.appendTransformation(transformation)
|
||||
model.append_transformation(transformation)
|
||||
model.train(data_train, data_train_fs, order=model.order)
|
||||
objs.append(model)
|
||||
lcolors.append( colors[count % ncol] )
|
||||
@ -552,7 +552,7 @@ def plot_compared_series(original, models, colors, typeonlegend=False, save=Fals
|
||||
ax.plot(forecasts, color=colors[count], label=lbl, ls="-",linewidth=linewidth)
|
||||
|
||||
if fts.has_interval_forecasting and intervals:
|
||||
forecasts = fts.forecastInterval(original)
|
||||
forecasts = fts.forecast_interval(original)
|
||||
lbl = fts.shortname + " " + str(fts.order if fts.is_high_order and not fts.benchmark_only else "")
|
||||
if not points and intervals:
|
||||
ls = "-"
|
||||
@ -573,7 +573,7 @@ def plot_compared_series(original, models, colors, typeonlegend=False, save=Fals
|
||||
ax.set_xlabel('T')
|
||||
ax.set_xlim([0, len(original)])
|
||||
|
||||
Util.showAndSaveImage(fig, file, save, lgd=legends)
|
||||
Util.show_and_save_image(fig, file, save, lgd=legends)
|
||||
|
||||
|
||||
def plot_probability_distributions(pmfs, lcolors, tam=[15, 7]):
|
||||
@ -627,7 +627,7 @@ def ahead_sliding_window(data, windowsize, train, steps, models=None, resolution
|
||||
times2[_key] = []
|
||||
|
||||
if transformation is not None:
|
||||
mfts.appendTransformation(transformation)
|
||||
mfts.append_transformation(transformation)
|
||||
|
||||
_start = time.time()
|
||||
mfts.train(train, data_train_fs.sets)
|
||||
@ -662,7 +662,7 @@ def ahead_sliding_window(data, windowsize, train, steps, models=None, resolution
|
||||
times2[_key] = []
|
||||
|
||||
if transformation is not None:
|
||||
mfts.appendTransformation(transformation)
|
||||
mfts.append_transformation(transformation)
|
||||
|
||||
_start = time.time()
|
||||
mfts.train(train, data_train_fs.sets, order=order)
|
||||
@ -699,7 +699,7 @@ def all_ahead_forecasters(data_train, data_test, partitions, start, steps, resol
|
||||
mfts = model("")
|
||||
if not mfts.is_high_order:
|
||||
if transformation is not None:
|
||||
mfts.appendTransformation(transformation)
|
||||
mfts.append_transformation(transformation)
|
||||
mfts.train(data_train, data_train_fs)
|
||||
objs.append(mfts)
|
||||
lcolors.append( colors[count % ncol] )
|
||||
@ -708,7 +708,7 @@ def all_ahead_forecasters(data_train, data_test, partitions, start, steps, resol
|
||||
if order >= mfts.min_order:
|
||||
mfts = model(" n = " + str(order))
|
||||
if transformation is not None:
|
||||
mfts.appendTransformation(transformation)
|
||||
mfts.append_transformation(transformation)
|
||||
mfts.train(data_train, data_train_fs, order=order)
|
||||
objs.append(mfts)
|
||||
lcolors.append(colors[count % ncol])
|
||||
@ -771,14 +771,14 @@ def plot_compared_intervals_ahead(original, models, colors, distributions, time_
|
||||
|
||||
for count, fts in enumerate(models, start=0):
|
||||
if fts.has_probability_forecasting and distributions[count]:
|
||||
density = fts.forecastAheadDistribution(original[time_from - fts.order:time_from], time_to,
|
||||
resolution=resolution)
|
||||
density = fts.forecast_ahead_distribution(original[time_from - fts.order:time_from], time_to,
|
||||
resolution=resolution)
|
||||
|
||||
#plot_density_scatter(ax, cmap, density, fig, resolution, time_from, time_to)
|
||||
plot_density_rectange(ax, cm, density, fig, resolution, time_from, time_to)
|
||||
|
||||
if fts.has_interval_forecasting and intervals:
|
||||
forecasts = fts.forecastAheadInterval(original[time_from - fts.order:time_from], time_to)
|
||||
forecasts = fts.forecast_ahead_interval(original[time_from - fts.order:time_from], time_to)
|
||||
lower = [kk[0] for kk in forecasts]
|
||||
upper = [kk[1] for kk in forecasts]
|
||||
mi.append(min(lower))
|
||||
@ -811,7 +811,7 @@ def plot_compared_intervals_ahead(original, models, colors, distributions, time_
|
||||
ax.set_xlabel('T')
|
||||
ax.set_xlim([0, len(original)])
|
||||
|
||||
Util.showAndSaveImage(fig, file, save, lgd=lgd)
|
||||
Util.show_and_save_image(fig, file, save, lgd=lgd)
|
||||
|
||||
|
||||
def plot_density_rectange(ax, cmap, density, fig, resolution, time_from, time_to):
|
||||
@ -1043,7 +1043,7 @@ def simpleSearch_RMSE(train, test, model, partitions, orders, save=False, file=N
|
||||
sets = partitioner(train, p, transformation=transformation).sets
|
||||
for oc, o in enumerate(orders, start=0):
|
||||
fts = model("q = " + str(p) + " n = " + str(o))
|
||||
fts.appendTransformation(transformation)
|
||||
fts.append_transformation(transformation)
|
||||
fts.train(train, sets, o, parameters=parameters)
|
||||
if not intervals:
|
||||
forecasted = fts.forecast(test)
|
||||
@ -1055,7 +1055,7 @@ def simpleSearch_RMSE(train, test, model, partitions, orders, save=False, file=N
|
||||
forecasted.insert(0, None)
|
||||
if plotforecasts: ax0.plot(forecasted, label=fts.name)
|
||||
else:
|
||||
forecasted = fts.forecastInterval(test)
|
||||
forecasted = fts.forecast_interval(test)
|
||||
error = 1.0 - Measures.rmse_interval(np.array(test[o:]), np.array(forecasted[:-1]))
|
||||
errors[oc, pc] = error
|
||||
if error < min_rmse:
|
||||
@ -1090,7 +1090,7 @@ def simpleSearch_RMSE(train, test, model, partitions, orders, save=False, file=N
|
||||
|
||||
# plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
return ret
|
||||
|
||||
@ -1131,7 +1131,7 @@ def sliding_window_simple_search(data, windowsize, model, partitions, orders, sa
|
||||
forecasted.insert(0, None)
|
||||
if plotforecasts: ax0.plot(forecasted, label=fts.name)
|
||||
else:
|
||||
forecasted = fts.forecastInterval(test)
|
||||
forecasted = fts.forecast_interval(test)
|
||||
_error.append( 1.0 - Measures.rmse_interval(np.array(test[o:]), np.array(forecasted[:-1])) )
|
||||
error = np.nanmean(_error)
|
||||
errors[oc, pc] = error
|
||||
@ -1166,7 +1166,7 @@ def sliding_window_simple_search(data, windowsize, model, partitions, orders, sa
|
||||
|
||||
# plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
return ret
|
||||
|
||||
@ -1185,7 +1185,7 @@ def pftsExploreOrderAndPartitions(data,save=False, file=None):
|
||||
fts.shortname = "n = " + str(order)
|
||||
fts.train(data, data_fs1, order=order)
|
||||
point_forecasts = fts.forecast(data)
|
||||
interval_forecasts = fts.forecastInterval(data)
|
||||
interval_forecasts = fts.forecast_interval(data)
|
||||
lower = [kk[0] for kk in interval_forecasts]
|
||||
upper = [kk[1] for kk in interval_forecasts]
|
||||
mi.append(min(lower) * 0.95)
|
||||
@ -1207,7 +1207,7 @@ def pftsExploreOrderAndPartitions(data,save=False, file=None):
|
||||
fts.shortname = "q = " + str(partitions)
|
||||
fts.train(data, data_fs, 1)
|
||||
point_forecasts = fts.forecast(data)
|
||||
interval_forecasts = fts.forecastInterval(data)
|
||||
interval_forecasts = fts.forecast_interval(data)
|
||||
lower = [kk[0] for kk in interval_forecasts]
|
||||
upper = [kk[1] for kk in interval_forecasts]
|
||||
mi.append(min(lower) * 0.95)
|
||||
@ -1230,5 +1230,5 @@ def pftsExploreOrderAndPartitions(data,save=False, file=None):
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
|
@ -57,7 +57,7 @@ def run_point(mfts, partitioner, train_data, test_data, window_key=None, transfo
|
||||
mfts.partitioner = partitioner
|
||||
|
||||
if transformation is not None:
|
||||
mfts.appendTransformation(transformation)
|
||||
mfts.append_transformation(transformation)
|
||||
|
||||
_start = time.time()
|
||||
mfts.train(train_data, partitioner.sets, order=mfts.order)
|
||||
@ -243,7 +243,7 @@ def run_interval(mfts, partitioner, train_data, test_data, window_key=None, tran
|
||||
mfts.partitioner = partitioner
|
||||
|
||||
if transformation is not None:
|
||||
mfts.appendTransformation(transformation)
|
||||
mfts.append_transformation(transformation)
|
||||
|
||||
_start = time.time()
|
||||
mfts.train(train_data, partitioner.sets, order=mfts.order)
|
||||
@ -443,7 +443,7 @@ def run_ahead(mfts, partitioner, train_data, test_data, steps, resolution, windo
|
||||
mfts.partitioner = partitioner
|
||||
|
||||
if transformation is not None:
|
||||
mfts.appendTransformation(transformation)
|
||||
mfts.append_transformation(transformation)
|
||||
|
||||
if mfts.has_seasonality:
|
||||
mfts.indexer = indexer
|
||||
|
@ -31,7 +31,7 @@ def run_point(mfts, partitioner, train_data, test_data, transformation=None, ind
|
||||
_key = mfts.shortname + " n = " + str(mfts.order) + " " + pttr + " q = " + str(partitioner.partitions)
|
||||
mfts.partitioner = partitioner
|
||||
if transformation is not None:
|
||||
mfts.appendTransformation(transformation)
|
||||
mfts.append_transformation(transformation)
|
||||
|
||||
try:
|
||||
_start = time.time()
|
||||
@ -157,7 +157,7 @@ def run_interval(mfts, partitioner, train_data, test_data, transformation=None,
|
||||
_key = mfts.shortname + " n = " + str(mfts.order) + " " + pttr + " q = " + str(partitioner.partitions)
|
||||
mfts.partitioner = partitioner
|
||||
if transformation is not None:
|
||||
mfts.appendTransformation(transformation)
|
||||
mfts.append_transformation(transformation)
|
||||
|
||||
try:
|
||||
_start = time.time()
|
||||
@ -285,7 +285,7 @@ def run_ahead(mfts, partitioner, train_data, test_data, steps, resolution, trans
|
||||
_key = mfts.shortname + " n = " + str(mfts.order) + " " + pttr + " q = " + str(partitioner.partitions)
|
||||
mfts.partitioner = partitioner
|
||||
if transformation is not None:
|
||||
mfts.appendTransformation(transformation)
|
||||
mfts.append_transformation(transformation)
|
||||
|
||||
try:
|
||||
_start = time.time()
|
||||
|
@ -35,7 +35,7 @@ class QuantileRegression(fts.FTS):
|
||||
if self.indexer is not None and isinstance(data, pd.DataFrame):
|
||||
data = self.indexer.get_data(data)
|
||||
|
||||
tmp = np.array(self.doTransformations(data, updateUoD=True))
|
||||
tmp = np.array(self.apply_transformations(data, updateUoD=True))
|
||||
|
||||
lagdata, ndata = lagmat(tmp, maxlag=order, trim="both", original='sep')
|
||||
|
||||
@ -82,7 +82,7 @@ class QuantileRegression(fts.FTS):
|
||||
if self.indexer is not None and isinstance(data, pd.DataFrame):
|
||||
data = self.indexer.get_data(data)
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
l = len(ndata)
|
||||
|
||||
ret = []
|
||||
@ -92,16 +92,16 @@ class QuantileRegression(fts.FTS):
|
||||
|
||||
ret.append(self.linearmodel(sample, self.mean_qt))
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
||||
def forecastInterval(self, data, **kwargs):
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
|
||||
if self.indexer is not None and isinstance(data, pd.DataFrame):
|
||||
data = self.indexer.get_data(data)
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -111,16 +111,16 @@ class QuantileRegression(fts.FTS):
|
||||
sample = ndata[k - self.order: k]
|
||||
ret.append(self.point_to_interval(sample, self.lower_qt, self.upper_qt))
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]], interval=True)
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]], interval=True)
|
||||
|
||||
return ret
|
||||
|
||||
def forecastAheadInterval(self, data, steps, **kwargs):
|
||||
def forecast_ahead_interval(self, data, steps, **kwargs):
|
||||
|
||||
if self.indexer is not None and isinstance(data, pd.DataFrame):
|
||||
data = self.indexer.get_data(data)
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
smoothing = kwargs.get("smoothing", 0.9)
|
||||
|
||||
@ -128,7 +128,7 @@ class QuantileRegression(fts.FTS):
|
||||
|
||||
ret = []
|
||||
|
||||
nmeans = self.forecastAhead(ndata, steps, **kwargs)
|
||||
nmeans = self.forecast_ahead(ndata, steps, **kwargs)
|
||||
|
||||
for k in np.arange(0, self.order):
|
||||
nmeans.insert(k,ndata[-(k+1)])
|
||||
@ -138,16 +138,16 @@ class QuantileRegression(fts.FTS):
|
||||
|
||||
ret.append([intl[0]*(1 + k*smoothing), intl[1]*(1 + k*smoothing)])
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[[data[-1] for a in np.arange(0, steps + self.order)]], interval=True)
|
||||
ret = self.apply_inverse_transformations(ret, params=[[data[-1] for a in np.arange(0, steps + self.order)]], interval=True)
|
||||
|
||||
return ret[-steps:]
|
||||
|
||||
def forecastDistribution(self, data, **kwargs):
|
||||
def forecast_distribution(self, data, **kwargs):
|
||||
|
||||
if self.indexer is not None and isinstance(data, pd.DataFrame):
|
||||
data = self.indexer.get_data(data)
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
ret = []
|
||||
|
||||
@ -162,18 +162,18 @@ class QuantileRegression(fts.FTS):
|
||||
intl = self.point_to_interval(sample, qt[0], qt[1])
|
||||
intervals.append(intl)
|
||||
|
||||
dist.appendInterval(intervals)
|
||||
dist.append_interval(intervals)
|
||||
|
||||
ret.append(dist)
|
||||
|
||||
return ret
|
||||
|
||||
def forecastAheadDistribution(self, data, steps, **kwargs):
|
||||
def forecast_ahead_distribution(self, data, steps, **kwargs):
|
||||
|
||||
if self.indexer is not None and isinstance(data, pd.DataFrame):
|
||||
data = self.indexer.get_data(data)
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
ret = []
|
||||
|
||||
@ -184,7 +184,7 @@ class QuantileRegression(fts.FTS):
|
||||
for qt in self.dist_qt:
|
||||
intl = self.interval_to_interval([intervals[x] for x in np.arange(k - self.order, k)], qt[0], qt[1])
|
||||
intervals.append(intl)
|
||||
dist.appendInterval(intervals)
|
||||
dist.append_interval(intervals)
|
||||
|
||||
ret.append(dist)
|
||||
|
||||
|
@ -49,14 +49,14 @@ class ConventionalFTS(fts.FTS):
|
||||
|
||||
def train(self, data, sets,order=1,parameters=None):
|
||||
self.sets = sets
|
||||
ndata = self.doTransformations(data)
|
||||
tmpdata = FuzzySet.fuzzySeries(ndata, sets)
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, sets)
|
||||
flrs = FLR.generateNonRecurrentFLRs(tmpdata)
|
||||
self.flrgs = self.generateFLRG(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -64,7 +64,7 @@ class ConventionalFTS(fts.FTS):
|
||||
|
||||
for k in np.arange(0, l):
|
||||
|
||||
mv = FuzzySet.fuzzyInstance(ndata[k], self.sets)
|
||||
mv = FuzzySet.fuzzyfy_instance(ndata[k], self.sets)
|
||||
|
||||
actual = self.sets[np.argwhere(mv == max(mv))[0, 0]]
|
||||
|
||||
@ -75,6 +75,6 @@ class ConventionalFTS(fts.FTS):
|
||||
|
||||
ret.append(_flrg.get_midpoint())
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
@ -49,7 +49,7 @@ class TrendWeightedFTS(yu.WeightedFTS):
|
||||
self.detail = "Cheng"
|
||||
self.is_high_order = False
|
||||
|
||||
def generateFLRG(self, flrs):
|
||||
def generate_FLRG(self, flrs):
|
||||
flrgs = {}
|
||||
for flr in flrs:
|
||||
if flr.LHS.name in flrgs:
|
||||
|
@ -111,8 +111,8 @@ def generateIndexedFLRs(sets, indexer, data, transformation=None):
|
||||
if transformation is not None:
|
||||
ndata = transformation.apply(ndata)
|
||||
for k in np.arange(1,len(ndata)):
|
||||
lhs = FuzzySet.getMaxMembershipFuzzySet(ndata[k-1],sets)
|
||||
rhs = FuzzySet.getMaxMembershipFuzzySet(ndata[k], sets)
|
||||
lhs = FuzzySet.get_maximum_membership_fuzzyset(ndata[k - 1], sets)
|
||||
rhs = FuzzySet.get_maximum_membership_fuzzyset(ndata[k], sets)
|
||||
season = index[k]
|
||||
flr = IndexedFLR(season,lhs,rhs)
|
||||
flrs.append(flr)
|
||||
|
@ -55,7 +55,7 @@ class FuzzySet(object):
|
||||
return self.name + ": " + str(self.mf.__name__) + "(" + str(self.parameters) + ")"
|
||||
|
||||
|
||||
def fuzzyInstance(inst, fuzzySets):
|
||||
def fuzzyfy_instance(inst, fuzzySets):
|
||||
"""
|
||||
Calculate the membership values for a data point given fuzzy sets
|
||||
:param inst: data point
|
||||
@ -66,7 +66,7 @@ def fuzzyInstance(inst, fuzzySets):
|
||||
return mv
|
||||
|
||||
|
||||
def fuzzyInstances(data, fuzzySets):
|
||||
def fuzzyfy_instances(data, fuzzySets):
|
||||
"""
|
||||
Calculate the membership values for a data point given fuzzy sets
|
||||
:param inst: data point
|
||||
@ -80,36 +80,36 @@ def fuzzyInstances(data, fuzzySets):
|
||||
return ret
|
||||
|
||||
|
||||
def getMaxMembershipFuzzySet(inst, fuzzySets):
|
||||
def get_maximum_membership_fuzzyset(inst, fuzzySets):
|
||||
"""
|
||||
Fuzzify a data point, returning the fuzzy set with maximum membership value
|
||||
:param inst: data point
|
||||
:param fuzzySets: list of fuzzy sets
|
||||
:return: fuzzy set with maximum membership
|
||||
"""
|
||||
mv = fuzzyInstance(inst, fuzzySets)
|
||||
mv = fuzzyfy_instance(inst, fuzzySets)
|
||||
return fuzzySets[np.argwhere(mv == max(mv))[0, 0]]
|
||||
|
||||
|
||||
def getMaxMembershipFuzzySetIndex(inst, fuzzySets):
|
||||
def get_maximum_membership_fuzzyset_index(inst, fuzzySets):
|
||||
"""
|
||||
Fuzzify a data point, returning the fuzzy set with maximum membership value
|
||||
:param inst: data point
|
||||
:param fuzzySets: list of fuzzy sets
|
||||
:return: fuzzy set with maximum membership
|
||||
"""
|
||||
mv = fuzzyInstance(inst, fuzzySets)
|
||||
mv = fuzzyfy_instance(inst, fuzzySets)
|
||||
return np.argwhere(mv == max(mv))[0, 0]
|
||||
|
||||
|
||||
def fuzzySeries(data, fuzzySets, method='maximum'):
|
||||
def fuzzyfy_series_old(data, fuzzySets, method='maximum'):
|
||||
fts = []
|
||||
for item in data:
|
||||
fts.append(getMaxMembershipFuzzySet(item, fuzzySets))
|
||||
fts.append(get_maximum_membership_fuzzyset(item, fuzzySets))
|
||||
return fts
|
||||
|
||||
|
||||
def fuzzifySeries(data, fuzzySets, method='maximum'):
|
||||
def fuzzify_series(data, fuzzySets, method='maximum'):
|
||||
fts = []
|
||||
for t, i in enumerate(data):
|
||||
mv = np.array([fs.membership(i) for fs in fuzzySets])
|
||||
|
@ -15,7 +15,7 @@ def uniquefilename(name):
|
||||
return name + str(current_milli_time())
|
||||
|
||||
|
||||
def showAndSaveImage(fig,file,flag,lgd=None):
|
||||
def show_and_save_image(fig, file, flag, lgd=None):
|
||||
"""
|
||||
Show and image and save on file
|
||||
:param fig: Matplotlib Figure object
|
||||
|
@ -117,7 +117,7 @@ class EnsembleFTS(fts.FTS):
|
||||
|
||||
return ret
|
||||
|
||||
def forecastInterval(self, data, **kwargs):
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
|
||||
if "method" in kwargs:
|
||||
self.interval_method = kwargs.get('method','quantile')
|
||||
@ -139,7 +139,7 @@ class EnsembleFTS(fts.FTS):
|
||||
|
||||
return ret
|
||||
|
||||
def forecastAheadInterval(self, data, steps, **kwargs):
|
||||
def forecast_ahead_interval(self, data, steps, **kwargs):
|
||||
|
||||
if 'method' in kwargs:
|
||||
self.interval_method = kwargs.get('method','quantile')
|
||||
@ -180,7 +180,7 @@ class EnsembleFTS(fts.FTS):
|
||||
def empty_grid(self, resolution):
|
||||
return self.get_empty_grid(-(self.original_max*2), self.original_max*2, resolution)
|
||||
|
||||
def forecastAheadDistribution(self, data, steps, **kwargs):
|
||||
def forecast_ahead_distribution(self, data, steps, **kwargs):
|
||||
if 'method' in kwargs:
|
||||
self.point_method = kwargs.get('method','mean')
|
||||
|
||||
@ -232,7 +232,7 @@ class AllMethodEnsembleFTS(EnsembleFTS):
|
||||
|
||||
def set_transformations(self, model):
|
||||
for t in self.transformations:
|
||||
model.appendTransformation(t)
|
||||
model.append_transformation(t)
|
||||
|
||||
def train(self, data, sets, order=1, parameters=None):
|
||||
self.original_max = max(data)
|
||||
|
@ -26,7 +26,7 @@ def train_individual_model(partitioner, train_data, indexer):
|
||||
print(_key)
|
||||
|
||||
model = cmsfts.ContextualMultiSeasonalFTS(_key, indexer=indexer)
|
||||
model.appendTransformation(partitioner.transformation)
|
||||
model.append_transformation(partitioner.transformation)
|
||||
model.train(train_data, partitioner.sets, order=1)
|
||||
|
||||
cUtil.persist_obj(model, "models/"+_key+".pkl")
|
||||
@ -70,7 +70,7 @@ class SeasonalEnsembleFTS(ensemble.EnsembleFTS):
|
||||
|
||||
cUtil.persist_obj(self, "models/"+self.name+".pkl")
|
||||
|
||||
def forecastDistribution(self, data, **kwargs):
|
||||
def forecast_distribution(self, data, **kwargs):
|
||||
|
||||
ret = []
|
||||
|
||||
|
61
pyFTS/fts.py
61
pyFTS/fts.py
@ -54,6 +54,32 @@ class FTS(object):
|
||||
|
||||
return best
|
||||
|
||||
def predict(self, data, **kwargs):
|
||||
"""
|
||||
Forecast using trained model
|
||||
:param data: time series with minimal length to the order of the model
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
type = kwargs.get("type", 'point')
|
||||
steps_ahead = kwargs.get("steps_ahead", None)
|
||||
|
||||
if type == 'point' and steps_ahead == None:
|
||||
return self.forecast(data, **kwargs)
|
||||
elif type == 'point' and steps_ahead != None:
|
||||
return self.forecast_ahead(data, steps_ahead, **kwargs)
|
||||
elif type == 'interval' and steps_ahead == None:
|
||||
return self.forecast_interval(data, **kwargs)
|
||||
elif type == 'interval' and steps_ahead != None:
|
||||
return self.forecast_ahead_interval(data, steps_ahead, **kwargs)
|
||||
elif type == 'distribution' and steps_ahead == None:
|
||||
return self.forecast_distribution(data, **kwargs)
|
||||
elif type == 'distribution' and steps_ahead != None:
|
||||
return self.forecast_ahead_distribution(data, steps_ahead, **kwargs)
|
||||
else:
|
||||
raise ValueError('The argument \'type\' has an unknown value.')
|
||||
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
"""
|
||||
Point forecast one step ahead
|
||||
@ -61,27 +87,27 @@ class FTS(object):
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
pass
|
||||
raise NotImplementedError('This model do not perform one step ahead point forecasts!')
|
||||
|
||||
def forecastInterval(self, data, **kwargs):
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
"""
|
||||
Interval forecast one step ahead
|
||||
:param data:
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
pass
|
||||
raise NotImplementedError('This model do not perform one step ahead interval forecasts!')
|
||||
|
||||
def forecastDistribution(self, data, **kwargs):
|
||||
def forecast_distribution(self, data, **kwargs):
|
||||
"""
|
||||
Probabilistic forecast one step ahead
|
||||
:param data:
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
pass
|
||||
raise NotImplementedError('This model do not perform one step ahead distribution forecasts!')
|
||||
|
||||
def forecastAhead(self, data, steps, **kwargs):
|
||||
def forecast_ahead(self, data, steps, **kwargs):
|
||||
"""
|
||||
Point forecast n steps ahead
|
||||
:param data:
|
||||
@ -101,7 +127,7 @@ class FTS(object):
|
||||
|
||||
return ret
|
||||
|
||||
def forecastAheadInterval(self, data, steps, **kwargs):
|
||||
def forecast_ahead_interval(self, data, steps, **kwargs):
|
||||
"""
|
||||
Interval forecast n steps ahead
|
||||
:param data:
|
||||
@ -109,9 +135,9 @@ class FTS(object):
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
pass
|
||||
raise NotImplementedError('This model do not perform multi step ahead interval forecasts!')
|
||||
|
||||
def forecastAheadDistribution(self, data, steps, **kwargs):
|
||||
def forecast_ahead_distribution(self, data, steps, **kwargs):
|
||||
"""
|
||||
Probabilistic forecast n steps ahead
|
||||
:param data:
|
||||
@ -119,7 +145,7 @@ class FTS(object):
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
pass
|
||||
raise NotImplementedError('This model do not perform multi step ahead distribution forecasts!')
|
||||
|
||||
def train(self, data, sets, order=1, parameters=None):
|
||||
"""
|
||||
@ -132,11 +158,20 @@ class FTS(object):
|
||||
"""
|
||||
pass
|
||||
|
||||
def appendTransformation(self, transformation):
|
||||
def fit(self, data, **kwargs):
|
||||
"""
|
||||
|
||||
:param data:
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
self.train(data, sets=None)
|
||||
|
||||
def append_transformation(self, transformation):
|
||||
if transformation is not None:
|
||||
self.transformations.append(transformation)
|
||||
|
||||
def doTransformations(self,data,params=None,updateUoD=False, **kwargs):
|
||||
def apply_transformations(self, data, params=None, updateUoD=False, **kwargs):
|
||||
ndata = data
|
||||
if updateUoD:
|
||||
if min(data) < 0:
|
||||
@ -158,7 +193,7 @@ class FTS(object):
|
||||
|
||||
return ndata
|
||||
|
||||
def doInverseTransformations(self, data, params=None, **kwargs):
|
||||
def apply_inverse_transformations(self, data, params=None, **kwargs):
|
||||
if len(self.transformations) > 0:
|
||||
if params is None:
|
||||
params = [None for k in self.transformations]
|
||||
|
@ -133,7 +133,7 @@ class HighOrderFTS(fts.FTS):
|
||||
|
||||
def train(self, data, sets, order=1,parameters=None):
|
||||
|
||||
data = self.doTransformations(data, updateUoD=True)
|
||||
data = self.apply_transformations(data, updateUoD=True)
|
||||
|
||||
self.order = order
|
||||
self.sets = sets
|
||||
@ -149,10 +149,10 @@ class HighOrderFTS(fts.FTS):
|
||||
if l <= self.order:
|
||||
return data
|
||||
|
||||
ndata = self.doTransformations(data)
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
for k in np.arange(self.order, l+1):
|
||||
tmpdata = FuzzySet.fuzzySeries(ndata[k - self.order: k], self.sets)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata[k - self.order: k], self.sets)
|
||||
tmpflrg = HighOrderFLRG(self.order)
|
||||
|
||||
for s in tmpdata: tmpflrg.appendLHS(s)
|
||||
@ -163,6 +163,6 @@ class HighOrderFTS(fts.FTS):
|
||||
flrg = self.flrgs[tmpflrg.strLHS()]
|
||||
ret.append(flrg.get_midpoint())
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order-1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
@ -21,7 +21,7 @@ class HighOrderFTS(fts.FTS):
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
|
||||
ndata = self.doTransformations(data)
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
cn = np.array([0.0 for k in range(len(self.sets))])
|
||||
ow = np.array([[0.0 for k in range(len(self.sets))] for z in range(self.order - 1)])
|
||||
@ -47,7 +47,7 @@ class HighOrderFTS(fts.FTS):
|
||||
count += 1.0
|
||||
ret.append(out / count)
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
||||
|
@ -43,9 +43,9 @@ class IntervalFTS(hofts.HighOrderFTS):
|
||||
mb = [fuzzySets[k].membership(data[k]) for k in np.arange(0, len(data))]
|
||||
return mb
|
||||
|
||||
def forecastInterval(self, data, **kwargs):
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -66,7 +66,7 @@ class IntervalFTS(hofts.HighOrderFTS):
|
||||
subset = ndata[k - (self.order - 1): k + 1]
|
||||
|
||||
for instance in subset:
|
||||
mb = FuzzySet.fuzzyInstance(instance, self.sets)
|
||||
mb = FuzzySet.fuzzyfy_instance(instance, self.sets)
|
||||
tmp = np.argwhere(mb)
|
||||
idx = np.ravel(tmp) # flat the array
|
||||
|
||||
@ -101,7 +101,7 @@ class IntervalFTS(hofts.HighOrderFTS):
|
||||
affected_flrgs_memberships.append(min(self.getSequenceMembership(subset, flrg.LHS)))
|
||||
else:
|
||||
|
||||
mv = FuzzySet.fuzzyInstance(ndata[k], self.sets)
|
||||
mv = FuzzySet.fuzzyfy_instance(ndata[k], self.sets)
|
||||
tmp = np.argwhere(mv)
|
||||
idx = np.ravel(tmp)
|
||||
|
||||
@ -132,6 +132,6 @@ class IntervalFTS(hofts.HighOrderFTS):
|
||||
up_ = sum(up) / norm
|
||||
ret.append([lo_, up_])
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]], interval=True)
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]], interval=True)
|
||||
|
||||
return ret
|
||||
|
@ -66,9 +66,9 @@ class ImprovedWeightedFTS(fts.FTS):
|
||||
|
||||
for s in self.sets: self.setsDict[s.name] = s
|
||||
|
||||
ndata = self.doTransformations(data)
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
tmpdata = FuzzySet.fuzzySeries(ndata, self.sets)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, self.sets)
|
||||
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
||||
self.flrgs = self.generateFLRG(flrs)
|
||||
|
||||
@ -76,7 +76,7 @@ class ImprovedWeightedFTS(fts.FTS):
|
||||
l = 1
|
||||
|
||||
data = np.array(data)
|
||||
ndata = self.doTransformations(data)
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -84,7 +84,7 @@ class ImprovedWeightedFTS(fts.FTS):
|
||||
|
||||
for k in np.arange(0, l):
|
||||
|
||||
mv = FuzzySet.fuzzyInstance(ndata[k], self.sets)
|
||||
mv = FuzzySet.fuzzyfy_instance(ndata[k], self.sets)
|
||||
|
||||
actual = self.sets[np.argwhere(mv == max(mv))[0, 0]]
|
||||
|
||||
@ -96,6 +96,6 @@ class ImprovedWeightedFTS(fts.FTS):
|
||||
|
||||
ret.append(mp.dot(flrg.weights()))
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
@ -10,14 +10,14 @@ class ConditionalVarianceFTS(chen.ConventionalFTS):
|
||||
self.name = "Conditional Variance FTS"
|
||||
self.detail = ""
|
||||
self.flrgs = {}
|
||||
#self.appendTransformation(Transformations.Differential(1))
|
||||
#self.append_transformation(Transformations.Differential(1))
|
||||
if self.partitioner is None:
|
||||
self.min_tx = None
|
||||
self.max_tx = None
|
||||
else:
|
||||
self.min_tx = self.partitioner.min
|
||||
self.max_tx = self.partitioner.max
|
||||
self.appendTransformation(self.partitioner.transformation)
|
||||
self.append_transformation(self.partitioner.transformation)
|
||||
|
||||
self.min_stack = [0,0,0]
|
||||
self.max_stack = [0,0,0]
|
||||
@ -28,7 +28,7 @@ class ConditionalVarianceFTS(chen.ConventionalFTS):
|
||||
else:
|
||||
self.sets = self.partitioner.sets
|
||||
|
||||
ndata = self.doTransformations(data)
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
self.min_tx = min(ndata)
|
||||
self.max_tx = max(ndata)
|
||||
@ -89,7 +89,7 @@ class ConditionalVarianceFTS(chen.ConventionalFTS):
|
||||
return affected_sets
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -127,13 +127,13 @@ class ConditionalVarianceFTS(chen.ConventionalFTS):
|
||||
|
||||
ret.append(pto)
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def forecastInterval(self, data, **kwargs):
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -175,6 +175,6 @@ class ConditionalVarianceFTS(chen.ConventionalFTS):
|
||||
|
||||
ret.append(itvl)
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
@ -104,8 +104,8 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
||||
else:
|
||||
self.sets = self.partitioner.sets
|
||||
|
||||
ndata = self.doTransformations(data)
|
||||
#tmpdata = common.fuzzySeries(ndata, self.sets)
|
||||
ndata = self.apply_transformations(data)
|
||||
#tmpdata = common.fuzzyfy_series_old(ndata, self.sets)
|
||||
#flrs = FLR.generateRecurrentFLRs(ndata)
|
||||
window_size = parameters if parameters is not None else 1
|
||||
self.flrgs = self.generate_flrg(ndata, window_size=window_size)
|
||||
@ -175,7 +175,7 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
||||
|
||||
window_size = kwargs.get("window_size", 1)
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -215,17 +215,17 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
||||
|
||||
ret.append(pto)
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
||||
def forecastInterval(self, data, **kwargs):
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
|
||||
time_displacement = kwargs.get("time_displacement", 0)
|
||||
|
||||
window_size = kwargs.get("window_size", 1)
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -273,6 +273,6 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
||||
ret.append([sum(lower), sum(upper)])
|
||||
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
@ -51,7 +51,7 @@ class NonStationaryFTS(fts.FTS):
|
||||
else:
|
||||
self.sets = self.partitioner.sets
|
||||
|
||||
ndata = self.doTransformations(data)
|
||||
ndata = self.apply_transformations(data)
|
||||
window_size = parameters if parameters is not None else 1
|
||||
tmpdata = common.fuzzySeries(ndata, self.sets, window_size, method=self.method)
|
||||
#print([k[0].name for k in tmpdata])
|
||||
@ -65,7 +65,7 @@ class NonStationaryFTS(fts.FTS):
|
||||
|
||||
window_size = kwargs.get("window_size", 1)
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -120,17 +120,17 @@ class NonStationaryFTS(fts.FTS):
|
||||
|
||||
ret.append(pto)
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
||||
def forecastInterval(self, data, **kwargs):
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
|
||||
time_displacement = kwargs.get("time_displacement", 0)
|
||||
|
||||
window_size = kwargs.get("window_size", 1)
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -181,6 +181,6 @@ class NonStationaryFTS(fts.FTS):
|
||||
|
||||
ret.append([sum(lower), sum(upper)])
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
@ -50,7 +50,7 @@ def plot_sets(sets, start=0, end=10, step=1, tam=[5, 5], colors=None,
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
|
||||
def plot_sets_conditional(model, data, start=0, end=10, step=1, tam=[5, 5], colors=None,
|
||||
@ -88,4 +88,4 @@ def plot_sets_conditional(model, data, start=0, end=10, step=1, tam=[5, 5], colo
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
@ -38,7 +38,7 @@ def plot_sets(data, sets, titles, tam=[12, 10], save=False, file=None):
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
Util.showAndSaveImage(fig, file, save)
|
||||
Util.show_and_save_image(fig, file, save)
|
||||
|
||||
|
||||
def plot_partitioners(data, objs, tam=[12, 10], save=False, file=None):
|
||||
|
@ -65,7 +65,7 @@ class ProbabilityDistribution(object):
|
||||
for v,d in enumerate(dens):
|
||||
self.distribution[self.bins[v]] = d
|
||||
|
||||
def appendInterval(self, intervals):
|
||||
def append_interval(self, intervals):
|
||||
if self.type == "histogram":
|
||||
for interval in intervals:
|
||||
for k in self.bin_index.inside(interval[0], interval[1]):
|
||||
|
@ -113,7 +113,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
def train(self, data, sets, order=1,parameters='Fuzzy'):
|
||||
|
||||
data = self.doTransformations(data, updateUoD=True)
|
||||
data = self.apply_transformations(data, updateUoD=True)
|
||||
|
||||
self.order = order
|
||||
if sets is None and self.partitioner is not None:
|
||||
@ -124,7 +124,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
self.sets = sets
|
||||
for s in self.sets: self.setsDict[s.name] = s
|
||||
if parameters == 'Monotonic':
|
||||
tmpdata = FuzzySet.fuzzySeries(data, sets)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(data, sets)
|
||||
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
||||
self.flrgs = self.generateFLRG(flrs)
|
||||
else:
|
||||
@ -138,10 +138,10 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
sample = data[k - self.order: k]
|
||||
|
||||
mvs = FuzzySet.fuzzyInstances(sample, self.sets)
|
||||
mvs = FuzzySet.fuzzyfy_instances(sample, self.sets)
|
||||
lags = {}
|
||||
|
||||
mv = FuzzySet.fuzzyInstance(data[k], self.sets)
|
||||
mv = FuzzySet.fuzzyfy_instance(data[k], self.sets)
|
||||
tmp = np.argwhere(mv)
|
||||
idx = np.ravel(tmp) # flatten the array
|
||||
|
||||
@ -201,7 +201,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
def update_model(self,data):
|
||||
|
||||
fzzy = FuzzySet.fuzzySeries(data, self.sets)
|
||||
fzzy = FuzzySet.fuzzyfy_series_old(data, self.sets)
|
||||
|
||||
flrg = ProbabilisticWeightedFLRG(self.order)
|
||||
|
||||
@ -277,7 +277,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -299,7 +299,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
subset = ndata[k - (self.order - 1): k + 1]
|
||||
|
||||
for count, instance in enumerate(subset):
|
||||
mb = FuzzySet.fuzzyInstance(instance, self.sets)
|
||||
mb = FuzzySet.fuzzyfy_instance(instance, self.sets)
|
||||
tmp = np.argwhere(mb)
|
||||
idx = np.ravel(tmp) # flatten the array
|
||||
|
||||
@ -332,11 +332,11 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
affected_flrgs.append(flrg)
|
||||
|
||||
# Find the general membership of FLRG
|
||||
affected_flrgs_memberships.append(flrg.get_membership())
|
||||
affected_flrgs_memberships.append(flrg.get_membership(subset))
|
||||
|
||||
else:
|
||||
|
||||
mv = FuzzySet.fuzzyInstance(ndata[k], self.sets) # get all membership values
|
||||
mv = FuzzySet.fuzzyfy_instance(ndata[k], self.sets) # get all membership values
|
||||
tmp = np.argwhere(mv) # get the indices of values > 0
|
||||
idx = np.ravel(tmp) # flatten the array
|
||||
|
||||
@ -371,11 +371,11 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
if self.auto_update and k > self.order+1: self.update_model(ndata[k - self.order - 1 : k])
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
||||
def forecastInterval(self, data, **kwargs):
|
||||
def forecast_interval(self, data, **kwargs):
|
||||
|
||||
if 'method' in kwargs:
|
||||
self.interval_method = kwargs.get('method','quantile')
|
||||
@ -383,7 +383,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
if 'alpha' in kwargs:
|
||||
self.alpha = kwargs.get('alpha', 0.05)
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -396,12 +396,12 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
else:
|
||||
self.interval_quantile(k, ndata, ret)
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]], interval=True)
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]], interval=True)
|
||||
|
||||
return ret
|
||||
|
||||
def interval_quantile(self, k, ndata, ret):
|
||||
dist = self.forecastDistribution(ndata)
|
||||
dist = self.forecast_distribution(ndata)
|
||||
lo_qt = dist[0].quantile(self.alpha)
|
||||
up_qt = dist[0].quantile(1.0 - self.alpha)
|
||||
ret.append([lo_qt, up_qt])
|
||||
@ -419,7 +419,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
subset = ndata[k - (self.order - 1): k + 1]
|
||||
|
||||
for instance in subset:
|
||||
mb = FuzzySet.fuzzyInstance(instance, self.sets)
|
||||
mb = FuzzySet.fuzzyfy_instance(instance, self.sets)
|
||||
tmp = np.argwhere(mb)
|
||||
idx = np.ravel(tmp) # flatten the array
|
||||
|
||||
@ -458,7 +458,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
else:
|
||||
|
||||
mv = FuzzySet.fuzzyInstance(ndata[k], self.sets) # get all membership values
|
||||
mv = FuzzySet.fuzzyfy_instance(ndata[k], self.sets) # get all membership values
|
||||
tmp = np.argwhere(mv) # get the indices of values > 0
|
||||
idx = np.ravel(tmp) # flatten the array
|
||||
|
||||
@ -494,7 +494,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
up_ = sum(up) / norm
|
||||
ret.append([lo_, up_])
|
||||
|
||||
def forecastDistribution(self, data, **kwargs):
|
||||
def forecast_distribution(self, data, **kwargs):
|
||||
|
||||
if not isinstance(data, (list, set, np.ndarray)):
|
||||
data = [data]
|
||||
@ -502,7 +502,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
smooth = kwargs.get("smooth", "none")
|
||||
nbins = kwargs.get("num_bins", 100)
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -532,7 +532,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
return ret
|
||||
|
||||
def forecastAhead(self, data, steps, **kwargs):
|
||||
def forecast_ahead(self, data, steps, **kwargs):
|
||||
ret = [data[k] for k in np.arange(len(data) - self.order, len(data))]
|
||||
|
||||
for k in np.arange(self.order - 1, steps):
|
||||
@ -546,7 +546,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
return ret
|
||||
|
||||
def forecastAheadInterval(self, data, steps, **kwargs):
|
||||
def forecast_ahead_interval(self, data, steps, **kwargs):
|
||||
|
||||
l = len(data)
|
||||
|
||||
@ -559,14 +559,14 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
1] >= self.original_max):
|
||||
ret.append(ret[-1])
|
||||
else:
|
||||
lower = self.forecastInterval([ret[x][0] for x in np.arange(k - self.order, k)])
|
||||
upper = self.forecastInterval([ret[x][1] for x in np.arange(k - self.order, k)])
|
||||
lower = self.forecast_interval([ret[x][0] for x in np.arange(k - self.order, k)])
|
||||
upper = self.forecast_interval([ret[x][1] for x in np.arange(k - self.order, k)])
|
||||
|
||||
ret.append([np.min(lower), np.max(upper)])
|
||||
|
||||
return ret
|
||||
|
||||
def forecastAheadDistribution(self, data, steps, **kwargs):
|
||||
def forecast_ahead_distribution(self, data, steps, **kwargs):
|
||||
|
||||
ret = []
|
||||
|
||||
@ -578,7 +578,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
_bins = np.linspace(uod[0], uod[1], nbins).tolist()
|
||||
|
||||
if method != 4:
|
||||
intervals = self.forecastAheadInterval(data, steps)
|
||||
intervals = self.forecast_ahead_interval(data, steps)
|
||||
else:
|
||||
l = len(data)
|
||||
for k in np.arange(l - self.order, l):
|
||||
@ -623,7 +623,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
for p in root.paths():
|
||||
path = list(reversed(list(filter(None.__ne__, p))))
|
||||
|
||||
qtle = np.ravel(self.forecastInterval(path))
|
||||
qtle = np.ravel(self.forecast_interval(path))
|
||||
|
||||
data.extend(np.linspace(qtle[0],qtle[1],100).tolist())
|
||||
|
||||
@ -631,17 +631,17 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
|
||||
for qt in np.arange(0, 50, 1):
|
||||
# print(qt)
|
||||
qtle_lower = self.forecastInterval(
|
||||
qtle_lower = self.forecast_interval(
|
||||
[intervals[x][0] + qt * ((intervals[x][1] - intervals[x][0]) / 100) for x in
|
||||
np.arange(k - self.order, k)])
|
||||
qtle_lower = np.ravel(qtle_lower)
|
||||
data.extend(np.linspace(qtle_lower[0], qtle_lower[1], 100).tolist())
|
||||
qtle_upper = self.forecastInterval(
|
||||
qtle_upper = self.forecast_interval(
|
||||
[intervals[x][1] - qt * ((intervals[x][1] - intervals[x][0]) / 100) for x in
|
||||
np.arange(k - self.order, k)])
|
||||
qtle_upper = np.ravel(qtle_upper)
|
||||
data.extend(np.linspace(qtle_upper[0], qtle_upper[1], 100).tolist())
|
||||
qtle_mid = self.forecastInterval(
|
||||
qtle_mid = self.forecast_interval(
|
||||
[intervals[x][0] + (intervals[x][1] - intervals[x][0]) / 2 for x in np.arange(k - self.order, k)])
|
||||
qtle_mid = np.ravel(qtle_mid)
|
||||
data.extend(np.linspace(qtle_mid[0], qtle_mid[1], 100).tolist())
|
||||
@ -674,7 +674,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
pk = np.prod([ret[k - self.order + o].density(path[o])
|
||||
for o in np.arange(0,self.order)])
|
||||
|
||||
d = self.forecastDistribution(path)[0]
|
||||
d = self.forecast_distribution(path)[0]
|
||||
|
||||
for bin in _bins:
|
||||
dist.set(bin, dist.density(bin) + pk * d.density(bin))
|
||||
|
@ -66,8 +66,8 @@ class ExponentialyWeightedFTS(fts.FTS):
|
||||
def train(self, data, sets,order=1,parameters=1.05):
|
||||
self.c = parameters
|
||||
self.sets = sets
|
||||
ndata = self.doTransformations(data)
|
||||
tmpdata = FuzzySet.fuzzySeries(ndata, sets)
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, sets)
|
||||
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
||||
self.flrgs = self.generateFLRG(flrs, self.c)
|
||||
|
||||
@ -76,7 +76,7 @@ class ExponentialyWeightedFTS(fts.FTS):
|
||||
|
||||
data = np.array(data)
|
||||
|
||||
ndata = self.doTransformations(data)
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -84,7 +84,7 @@ class ExponentialyWeightedFTS(fts.FTS):
|
||||
|
||||
for k in np.arange(0, l):
|
||||
|
||||
mv = FuzzySet.fuzzyInstance(ndata[k], self.sets)
|
||||
mv = FuzzySet.fuzzyfy_instance(ndata[k], self.sets)
|
||||
|
||||
actual = self.sets[np.argwhere(mv == max(mv))[0, 0]]
|
||||
|
||||
@ -96,6 +96,6 @@ class ExponentialyWeightedFTS(fts.FTS):
|
||||
|
||||
ret.append(mp.dot(flrg.weights()))
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
@ -79,7 +79,7 @@ class ContextualMultiSeasonalFTS(sfts.SeasonalFTS):
|
||||
|
||||
flrg = self.flrgs[str(index[k])]
|
||||
|
||||
d = FuzzySet.getMaxMembershipFuzzySet(ndata[k], self.sets)
|
||||
d = FuzzySet.get_maximum_membership_fuzzyset(ndata[k], self.sets)
|
||||
|
||||
mp = self.getMidpoints(flrg, d)
|
||||
|
||||
|
@ -54,11 +54,11 @@ class MultiSeasonalFTS(sfts.SeasonalFTS):
|
||||
|
||||
ret.append(sum(mp) / len(mp))
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[ndata])
|
||||
ret = self.apply_inverse_transformations(ret, params=[ndata])
|
||||
|
||||
return ret
|
||||
|
||||
def forecastAhead(self, data, steps, **kwargs):
|
||||
def forecast_ahead(self, data, steps, **kwargs):
|
||||
ret = []
|
||||
for i in steps:
|
||||
flrg = self.flrgs[str(i)]
|
||||
@ -67,6 +67,6 @@ class MultiSeasonalFTS(sfts.SeasonalFTS):
|
||||
|
||||
ret.append(sum(mp) / len(mp))
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=data)
|
||||
ret = self.apply_inverse_transformations(ret, params=data)
|
||||
|
||||
return ret
|
||||
|
@ -63,14 +63,14 @@ class SeasonalFTS(fts.FTS):
|
||||
|
||||
def train(self, data, sets, order=1, parameters=None):
|
||||
self.sets = sets
|
||||
ndata = self.doTransformations(data)
|
||||
tmpdata = FuzzySet.fuzzySeries(ndata, sets)
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, sets)
|
||||
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
||||
self.flrgs = self.generateFLRG(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -86,6 +86,6 @@ class SeasonalFTS(fts.FTS):
|
||||
|
||||
ret.append(np.percentile(mp, 50))
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
@ -40,14 +40,14 @@ class ConventionalFTS(fts.FTS):
|
||||
|
||||
def train(self, data, sets,order=1,parameters=None):
|
||||
self.sets = sets
|
||||
ndata = self.doTransformations(data)
|
||||
tmpdata = FuzzySet.fuzzySeries(ndata, sets)
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, sets)
|
||||
flrs = FLR.generateNonRecurrentFLRs(tmpdata)
|
||||
self.R = self.operation_matrix(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
|
||||
ndata = np.array(self.doTransformations(data))
|
||||
ndata = np.array(self.apply_transformations(data))
|
||||
|
||||
l = len(ndata)
|
||||
npart = len(self.sets)
|
||||
@ -55,7 +55,7 @@ class ConventionalFTS(fts.FTS):
|
||||
ret = []
|
||||
|
||||
for k in np.arange(0, l):
|
||||
mv = FuzzySet.fuzzyInstance(ndata[k], self.sets)
|
||||
mv = FuzzySet.fuzzyfy_instance(ndata[k], self.sets)
|
||||
|
||||
r = [max([ min(self.R[i][j], mv[j]) for j in np.arange(0,npart) ]) for i in np.arange(0,npart)]
|
||||
|
||||
@ -68,6 +68,6 @@ class ConventionalFTS(fts.FTS):
|
||||
|
||||
ret.append( sum(mp)/len(mp))
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
129
pyFTS/tests/distributed.py
Normal file
129
pyFTS/tests/distributed.py
Normal file
@ -0,0 +1,129 @@
|
||||
from pyFTS.partitioners import Grid
|
||||
from pyFTS import fts, flrg, song, chen, yu, sadaei, ismailefendi, cheng, hofts
|
||||
from pyFTS.benchmarks import Measures
|
||||
from pyFTS.common import Util as cUtil
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import os
|
||||
from pyFTS.common import Transformations
|
||||
from copy import deepcopy
|
||||
from pyFTS.nonstationary import common, flrg, util, perturbation, nsfts, honsfts, partitioners
|
||||
|
||||
bc = Transformations.BoxCox(0)
|
||||
|
||||
import dispy
|
||||
import dispy.httpd
|
||||
|
||||
os.chdir("/home/petronio/Dropbox/Doutorado/Codigos/")
|
||||
|
||||
|
||||
def evaluate_individual_model(model, partitioner, train, test, window_size, time_displacement):
|
||||
import numpy as np
|
||||
from pyFTS.common import FLR, FuzzySet
|
||||
from pyFTS.partitioners import Grid
|
||||
from pyFTS.benchmarks import Measures
|
||||
from pyFTS.nonstationary import common, flrg, util, perturbation, nsfts, honsfts, partitioners
|
||||
|
||||
try:
|
||||
model.train(train, sets=partitioner.sets, order=model.order, parameters=window_size)
|
||||
forecasts = model.forecast(test, time_displacement=time_displacement, window_size=window_size)
|
||||
_rmse = Measures.rmse(test[model.order:], forecasts[:-1])
|
||||
_mape = Measures.mape(test[model.order:], forecasts[:-1])
|
||||
_u = Measures.UStatistic(test[model.order:], forecasts[:-1])
|
||||
except Exception as e:
|
||||
print(e)
|
||||
_rmse = np.nan
|
||||
_mape = np.nan
|
||||
_u = np.nan
|
||||
|
||||
return {'model': model.shortname, 'partitions': partitioner.partitions, 'order': model.order,
|
||||
'rmse': _rmse, 'mape': _mape, 'u': _u}
|
||||
|
||||
|
||||
data = pd.read_csv("DataSets/synthetic_nonstationary_dataset_A.csv", sep=";")
|
||||
data = np.array(data["0"][:])
|
||||
|
||||
cluster = dispy.JobCluster(evaluate_individual_model, nodes=['192.168.0.108', '192.168.0.110'])
|
||||
http_server = dispy.httpd.DispyHTTPServer(cluster)
|
||||
|
||||
jobs = []
|
||||
|
||||
models = []
|
||||
|
||||
for order in [1, 2, 3]:
|
||||
if order == 1:
|
||||
model = nsfts.NonStationaryFTS("")
|
||||
else:
|
||||
model = honsfts.HighOrderNonStationaryFTS("")
|
||||
|
||||
model.order = order
|
||||
|
||||
models.append(model)
|
||||
|
||||
for ct, train, test in cUtil.sliding_window(data, 300):
|
||||
for partition in np.arange(5, 100, 1):
|
||||
tmp_partitioner = Grid.GridPartitioner(train, partition)
|
||||
partitioner = partitioners.PolynomialNonStationaryPartitioner(train, tmp_partitioner,
|
||||
window_size=35, degree=1)
|
||||
for model in models:
|
||||
# print(model.shortname, partition, model.order)
|
||||
#job = evaluate_individual_model(model, train, test)
|
||||
job = cluster.submit(deepcopy(model), deepcopy(partitioner), train, test, 35, 240)
|
||||
job.id = ct + model.order*100
|
||||
jobs.append(job)
|
||||
|
||||
results = {}
|
||||
|
||||
for job in jobs:
|
||||
tmp = job()
|
||||
# print(tmp)
|
||||
if job.status == dispy.DispyJob.Finished and tmp is not None:
|
||||
_m = tmp['model']
|
||||
_o = tmp['order']
|
||||
_p = tmp['partitions']
|
||||
if _m not in results:
|
||||
results[_m] = {}
|
||||
if _o not in results[_m]:
|
||||
results[_m][_o] = {}
|
||||
if _p not in results[_m][_o]:
|
||||
results[_m][_o][_p] = {}
|
||||
results[_m][_o][_p]['rmse'] = []
|
||||
results[_m][_o][_p]['mape'] = []
|
||||
results[_m][_o][_p]['u'] = []
|
||||
|
||||
results[_m][_o][_p]['rmse'].append(tmp['rmse'])
|
||||
results[_m][_o][_p]['mape'].append(tmp['mape'])
|
||||
results[_m][_o][_p]['u'].append(tmp['u'])
|
||||
|
||||
cluster.wait() # wait for all jobs to finish
|
||||
|
||||
cluster.print_status()
|
||||
|
||||
http_server.shutdown() # this waits until browser gets all updates
|
||||
cluster.close()
|
||||
|
||||
dados = []
|
||||
ncolunas = None
|
||||
|
||||
for m in sorted(results.keys()):
|
||||
for o in sorted(results[m].keys()):
|
||||
for p in sorted(results[m][o].keys()):
|
||||
for r in ['rmse', 'mape', 'u']:
|
||||
tmp = []
|
||||
tmp.append(m)
|
||||
tmp.append(o)
|
||||
tmp.append(p)
|
||||
tmp.append(r)
|
||||
tmp.extend(results[m][o][p][r])
|
||||
|
||||
dados.append(tmp)
|
||||
|
||||
if ncolunas is None:
|
||||
ncolunas = len(results[m][o][p][r])
|
||||
|
||||
colunas = ["model", "order", "partitions", "metric"]
|
||||
for k in np.arange(0, ncolunas):
|
||||
colunas.append(str(k))
|
||||
|
||||
dat = pd.DataFrame(dados, columns=colunas)
|
||||
dat.to_csv("experiments/nsfts_partitioning_A.csv", sep=";")
|
@ -34,7 +34,7 @@ ho_methods = [hofts.HighOrderFTS, hwang.HighOrderFTS]
|
||||
|
||||
fs = Grid.GridPartitioner(passengers, 10, transformation=diff)
|
||||
|
||||
e.appendTransformation(diff)
|
||||
e.append_transformation(diff)
|
||||
|
||||
e.train(passengers, fs.sets, order=3)
|
||||
|
||||
@ -42,7 +42,7 @@ e.train(passengers, fs.sets, order=3)
|
||||
|
||||
for method in fo_methods:
|
||||
model = method("")
|
||||
model.appendTransformation(diff)
|
||||
model.append_transformation(diff)
|
||||
model.train(passengers, fs.sets)
|
||||
e.appendModel(model)
|
||||
|
||||
@ -50,25 +50,25 @@ for method in fo_methods:
|
||||
for method in ho_methods:
|
||||
for order in [1,2,3]:
|
||||
model = method("")
|
||||
model.appendTransformation(diff)
|
||||
model.append_transformation(diff)
|
||||
model.train(passengers, fs.sets, order=order)
|
||||
e.appendModel(model)
|
||||
|
||||
|
||||
arima100 = arima.ARIMA("", alpha=0.25)
|
||||
#tmp.appendTransformation(diff)
|
||||
#tmp.append_transformation(diff)
|
||||
arima100.train(passengers, None, order=(1,0,0))
|
||||
|
||||
arima101 = arima.ARIMA("", alpha=0.25)
|
||||
#tmp.appendTransformation(diff)
|
||||
#tmp.append_transformation(diff)
|
||||
arima101.train(passengers, None, order=(1,0,1))
|
||||
|
||||
arima200 = arima.ARIMA("", alpha=0.25)
|
||||
#tmp.appendTransformation(diff)
|
||||
#tmp.append_transformation(diff)
|
||||
arima200.train(passengers, None, order=(2,0,0))
|
||||
|
||||
arima201 = arima.ARIMA("", alpha=0.25)
|
||||
#tmp.appendTransformation(diff)
|
||||
#tmp.append_transformation(diff)
|
||||
arima201.train(passengers, None, order=(2,0,1))
|
||||
|
||||
|
||||
@ -87,34 +87,34 @@ _median = e.forecast(passengers, method="median")
|
||||
print(_median)
|
||||
"""
|
||||
"""
|
||||
_extremum = e.forecastInterval(passengers, method="extremum")
|
||||
_extremum = e.forecast_interval(passengers, method="extremum")
|
||||
print(_extremum)
|
||||
|
||||
_quantile = e.forecastInterval(passengers, method="quantile", alpha=0.25)
|
||||
_quantile = e.forecast_interval(passengers, method="quantile", alpha=0.25)
|
||||
print(_quantile)
|
||||
|
||||
|
||||
_normal = e.forecastInterval(passengers, method="normal", alpha=0.25)
|
||||
_normal = e.forecast_interval(passengers, method="normal", alpha=0.25)
|
||||
print(_normal)
|
||||
"""
|
||||
|
||||
#"""
|
||||
_extremum = e.forecastAheadInterval(passengers, 10, method="extremum")
|
||||
_extremum = e.forecast_ahead_interval(passengers, 10, method="extremum")
|
||||
print(_extremum)
|
||||
|
||||
_quantile = e.forecastAheadInterval(passengers[:50], 10, method="quantile", alpha=0.05)
|
||||
_quantile = e.forecast_ahead_interval(passengers[:50], 10, method="quantile", alpha=0.05)
|
||||
print(_quantile)
|
||||
|
||||
_quantile = e.forecastAheadInterval(passengers[:50], 10, method="quantile", alpha=0.25)
|
||||
_quantile = e.forecast_ahead_interval(passengers[:50], 10, method="quantile", alpha=0.25)
|
||||
print(_quantile)
|
||||
|
||||
_normal = e.forecastAheadInterval(passengers[:50], 10, method="normal", alpha=0.05)
|
||||
_normal = e.forecast_ahead_interval(passengers[:50], 10, method="normal", alpha=0.05)
|
||||
print(_normal)
|
||||
_normal = e.forecastAheadInterval(passengers[:50], 10, method="normal", alpha=0.25)
|
||||
_normal = e.forecast_ahead_interval(passengers[:50], 10, method="normal", alpha=0.25)
|
||||
print(_normal)
|
||||
#"""
|
||||
|
||||
#dist = e.forecastAheadDistribution(passengers, 20)
|
||||
#dist = e.forecast_ahead_distribution(passengers, 20)
|
||||
|
||||
#print(dist)
|
||||
|
||||
|
@ -94,7 +94,7 @@ methods = [song.ConventionalFTS, chen.ConventionalFTS, yu.WeightedFTS, sadaei.Ex
|
||||
|
||||
for method in methods:
|
||||
model = method("")
|
||||
model.appendTransformation(bc)
|
||||
model.append_transformation(bc)
|
||||
model.train(train, sets=fs.sets)
|
||||
|
||||
forecasts = model.forecast(test)
|
||||
@ -113,7 +113,7 @@ for method in methods:
|
||||
|
||||
#obj = msfts.MultiSeasonalFTS("sonda_msfts_Entropy40_Mhm15", indexer=ix)
|
||||
|
||||
#obj.appendTransformation(diff)
|
||||
#obj.append_transformation(diff)
|
||||
|
||||
#obj.train(sonda_treino, fs.sets)
|
||||
|
||||
@ -121,7 +121,7 @@ for method in methods:
|
||||
|
||||
#ftse = cUtil.load_obj("models/sonda_ensemble_msfts.pkl")
|
||||
|
||||
#tmp = ftse.forecastDistribution(sonda_teste[850:860], h=0.5, method="gaussian")
|
||||
#tmp = ftse.forecast_distribution(sonda_teste[850:860], h=0.5, method="gaussian")
|
||||
|
||||
#print(tmp[0])
|
||||
|
||||
@ -168,7 +168,7 @@ fts.train(sonda_treino, sets=None)
|
||||
|
||||
#ftse.update_uod(sonda_treino)
|
||||
|
||||
#tmp = ftse.forecastDistribution(sonda_teste,h=1)
|
||||
#tmp = ftse.forecast_distribution(sonda_teste,h=1)
|
||||
|
||||
#tmp = ftse.forecast(sonda_teste,h=1)
|
||||
|
||||
@ -187,27 +187,27 @@ from pyFTS.benchmarks import arima, quantreg, Measures
|
||||
#Util.plot_dataframe_point("experiments/taiex_point_sintetic.csv","experiments/taiex_point_analitic.csv",11)
|
||||
"""
|
||||
arima100 = arima.ARIMA("", alpha=0.25)
|
||||
#tmp.appendTransformation(diff)
|
||||
#tmp.append_transformation(diff)
|
||||
arima100.train(passengers, None, order=(1,0,0))
|
||||
|
||||
arima101 = arima.ARIMA("", alpha=0.25)
|
||||
#tmp.appendTransformation(diff)
|
||||
#tmp.append_transformation(diff)
|
||||
arima101.train(passengers, None, order=(1,0,1))
|
||||
|
||||
arima200 = arima.ARIMA("", alpha=0.25)
|
||||
#tmp.appendTransformation(diff)
|
||||
#tmp.append_transformation(diff)
|
||||
arima200.train(passengers, None, order=(2,0,0))
|
||||
|
||||
arima201 = arima.ARIMA("", alpha=0.25)
|
||||
#tmp.appendTransformation(diff)
|
||||
#tmp.append_transformation(diff)
|
||||
arima201.train(passengers, None, order=(2,0,1))
|
||||
|
||||
|
||||
#tmp = quantreg.QuantileRegression("", alpha=0.25, dist=True)
|
||||
#tmp.appendTransformation(diff)
|
||||
#tmp.append_transformation(diff)
|
||||
#tmp.train(sunspots[:150], None, order=1)
|
||||
#teste = tmp.forecastAheadInterval(sunspots[150:155], 5)
|
||||
#teste = tmp.forecastAheadDistribution(nasdaq[1600:1604], steps=5, resolution=50)
|
||||
#teste = tmp.forecast_ahead_interval(sunspots[150:155], 5)
|
||||
#teste = tmp.forecast_ahead_distribution(nasdaq[1600:1604], steps=5, resolution=50)
|
||||
|
||||
bchmk.plot_compared_series(enrollments,[tmp], ['blue','red'], points=False, intervals=True)
|
||||
|
||||
@ -282,7 +282,7 @@ from pyFTS.partitioners import Grid
|
||||
|
||||
|
||||
#model = pwfts.ProbabilisticWeightedFTS("FTS 1")
|
||||
#model.appendTransformation(diff)
|
||||
#model.append_transformation(diff)
|
||||
#model.train(best[0:1600],fs.sets, order=3)
|
||||
|
||||
#bchmk.plot_compared_intervals_ahead(best[1600:1700],[model], ['blue','red'],
|
||||
@ -370,11 +370,11 @@ fs = Grid.GridPartitioner(sonda[:9000], 10, transformation=diff)
|
||||
|
||||
tmp = sfts.SeasonalFTS("")
|
||||
tmp.indexer = ix
|
||||
tmp.appendTransformation(diff)
|
||||
tmp.append_transformation(diff)
|
||||
|
||||
#tmp = pwfts.ProbabilisticWeightedFTS("")
|
||||
|
||||
#tmp.appendTransformation(diff)
|
||||
#tmp.append_transformation(diff)
|
||||
|
||||
tmp.train(sonda[:9000], fs.sets, order=1)
|
||||
|
||||
|
@ -109,7 +109,7 @@ nsftsp.train(trainp, order=2, parameters=ws)
|
||||
#print(nsftsp)
|
||||
|
||||
tmpp = nsftsp.forecast(passengers[101:104], time_displacement=101, window_size=ws)
|
||||
tmpi = nsftsp.forecastInterval(passengers[101:104], time_displacement=101, window_size=ws)
|
||||
tmpi = nsftsp.forecast_interval(passengers[101:104], time_displacement=101, window_size=ws)
|
||||
|
||||
#print(passengers[101:104])
|
||||
print([k[0] for k in tmpi])
|
||||
|
@ -56,9 +56,9 @@ pfts1.shortname = "1st Order"
|
||||
|
||||
#tmp = pfts1.forecast(data[3000:3020])
|
||||
|
||||
#tmp = pfts1.forecastInterval(data[3000:3020])
|
||||
#tmp = pfts1.forecast_interval(data[3000:3020])
|
||||
|
||||
tmp = pfts1.forecastDistribution(data[3500])
|
||||
tmp = pfts1.forecast_distribution(data[3500])
|
||||
|
||||
p = 0
|
||||
for b in tmp[0].bins:
|
||||
@ -66,9 +66,9 @@ for b in tmp[0].bins:
|
||||
|
||||
print(p)
|
||||
|
||||
#tmp = pfts1.forecastAheadInterval(data[3000:3020],20)
|
||||
#tmp = pfts1.forecast_ahead_interval(data[3000:3020],20)
|
||||
|
||||
#tmp = pfts1.forecastAheadDistribution(data[3000:3020],20, method=3, h=0.45, kernel="gaussian")
|
||||
#tmp = pfts1.forecast_ahead_distribution(data[3000:3020],20, method=3, h=0.45, kernel="gaussian")
|
||||
#print(tmp[0])
|
||||
|
||||
#print(tmp[0].quantile([0.05, 0.95]))
|
||||
|
14
pyFTS/yu.py
14
pyFTS/yu.py
@ -46,7 +46,7 @@ class WeightedFTS(fts.FTS):
|
||||
self.name = "Weighted FTS"
|
||||
self.detail = "Yu"
|
||||
|
||||
def generateFLRG(self, flrs):
|
||||
def generate_FLRG(self, flrs):
|
||||
flrgs = {}
|
||||
for flr in flrs:
|
||||
if flr.LHS.name in flrgs:
|
||||
@ -58,17 +58,17 @@ class WeightedFTS(fts.FTS):
|
||||
|
||||
def train(self, data, sets,order=1,parameters=None):
|
||||
self.sets = sets
|
||||
ndata = self.doTransformations(data)
|
||||
tmpdata = FuzzySet.fuzzySeries(ndata, sets)
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, sets)
|
||||
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
||||
self.flrgs = self.generateFLRG(flrs)
|
||||
self.flrgs = self.generate_FLRG(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
l = 1
|
||||
|
||||
data = np.array(data)
|
||||
|
||||
ndata = self.doTransformations(data)
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
l = len(ndata)
|
||||
|
||||
@ -76,7 +76,7 @@ class WeightedFTS(fts.FTS):
|
||||
|
||||
for k in np.arange(0, l):
|
||||
|
||||
mv = FuzzySet.fuzzyInstance(ndata[k], self.sets)
|
||||
mv = FuzzySet.fuzzyfy_instance(ndata[k], self.sets)
|
||||
|
||||
actual = self.sets[np.argwhere(mv == max(mv))[0, 0]]
|
||||
|
||||
@ -88,6 +88,6 @@ class WeightedFTS(fts.FTS):
|
||||
|
||||
ret.append(mp.dot(flrg.weights()))
|
||||
|
||||
ret = self.doInverseTransformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
|
||||
return ret
|
||||
|
31
setup.py
31
setup.py
@ -1,16 +1,21 @@
|
||||
from distutils.core import setup
|
||||
|
||||
setup(
|
||||
name = 'pyFTS',
|
||||
packages = ['pyFTS','pyFTS.benchmarks','pyFTS.common','pyFTS.data', 'pyFTS.ensemble',
|
||||
'pyFTS.models','pyFTS.seasonal','pyFTS.partitioners','pyFTS.probabilistic',
|
||||
'pyFTS.tests','pyFTS.nonstationary'],
|
||||
package_data = {'benchmarks':['*'], 'common':['*'], 'data':['*'], 'ensemble':['*'], 'models':['*'], 'seasonal':['*'], 'partitioners':['*'], 'probabilistic':['*'], 'tests':['*']},
|
||||
version = '1.1.1',
|
||||
description = 'Fuzzy Time Series for Python',
|
||||
author = 'Petronio Candido L. e Silva',
|
||||
author_email = 'petronio.candido@gmail.com',
|
||||
url = 'https://github.com/petroniocandido/pyFTS',
|
||||
download_url = 'https://github.com/petroniocandido/pyFTS/archive/pkg1.1.1.tar.gz',
|
||||
keywords = ['forecasting', 'fuzzy time series', 'fuzzy', 'time series forecasting'],
|
||||
classifiers = [],
|
||||
name='pyFTS',
|
||||
packages=['pyFTS', 'pyFTS.benchmarks', 'pyFTS.common', 'pyFTS.data', 'pyFTS.ensemble',
|
||||
'pyFTS.models', 'pyFTS.seasonal', 'pyFTS.partitioners', 'pyFTS.probabilistic',
|
||||
'pyFTS.tests', 'pyFTS.nonstationary'],
|
||||
package_data={'benchmarks': ['*'], 'common': ['*'], 'data': ['*'], 'ensemble': ['*'], 'models': ['*'],
|
||||
'seasonal': ['*'], 'partitioners': ['*'], 'probabilistic': ['*'], 'tests': ['*']},
|
||||
version='1.1.1',
|
||||
description='Fuzzy Time Series for Python',
|
||||
author='Petronio Candido L. e Silva',
|
||||
author_email='petronio.candido@gmail.com',
|
||||
url='https://github.com/petroniocandido/pyFTS',
|
||||
download_url='https://github.com/petroniocandido/pyFTS/archive/pkg1.1.1.tar.gz',
|
||||
keywords=['forecasting', 'fuzzy time series', 'fuzzy', 'time series forecasting'],
|
||||
classifiers=[],
|
||||
install_requires=[
|
||||
'numpy','pandas','matplotlib','dill','copy','dispy','multiprocessing','joblib'
|
||||
],
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user