- More data files and artificial data generators
This commit is contained in:
parent
fc2e118576
commit
37862c661d
4
MANIFEST
4
MANIFEST
@ -1,2 +1,6 @@
|
||||
include data/Enrollments.csv
|
||||
include data/AirPassengers.csv
|
||||
include data/NASDAQ.csv
|
||||
include data/SP500.csv
|
||||
include data/sunspots.csv
|
||||
include data/TAIEX.csv
|
@ -17,7 +17,7 @@ def residuals(targets, forecasts, order=1):
|
||||
return np.array(targets[order:]) - np.array(forecasts[:-1])
|
||||
|
||||
|
||||
def ChiSquared(q,h):
|
||||
def chi_squared(q, h):
|
||||
"""
|
||||
Chi-Squared value
|
||||
:param q:
|
||||
@ -28,7 +28,7 @@ def ChiSquared(q,h):
|
||||
return p
|
||||
|
||||
|
||||
def compareResiduals(data, models):
|
||||
def compare_residuals(data, models):
|
||||
"""
|
||||
Compare residual's statistics of several models
|
||||
:param data:
|
||||
@ -49,7 +49,7 @@ def compareResiduals(data, models):
|
||||
ret += str(round(q1,2)) + " & "
|
||||
q2 = Measures.BoxLjungStatistic(res, 10)
|
||||
ret += str(round(q2,2)) + " & "
|
||||
ret += str(ChiSquared(q2, 10))
|
||||
ret += str(chi_squared(q2, 10))
|
||||
ret += " \\\\ \n"
|
||||
return ret
|
||||
|
||||
@ -131,9 +131,8 @@ def plot_residuals(targets, models, tam=[8, 8], save=False, file=None):
|
||||
|
||||
|
||||
def single_plot_residuals(targets, forecasts, order, tam=[8, 8], save=False, file=None):
|
||||
fig, axes = plt.subplots(nrows=1, ncols=3, figsize=tam)
|
||||
fig, ax = plt.subplots(nrows=1, ncols=3, figsize=tam)
|
||||
|
||||
ax = axes
|
||||
res = residuals(targets, forecasts, order)
|
||||
|
||||
ax[0].set_title("Residuals", size='large')
|
||||
|
@ -13,7 +13,7 @@ import matplotlib.cm as cmx
|
||||
import matplotlib.colors as pltcolors
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
#from mpl_toolkits.mplot3d import Axes3D
|
||||
from mpl_toolkits.mplot3d import Axes3D
|
||||
|
||||
from pyFTS.probabilistic import ProbabilityDistribution
|
||||
from pyFTS.models import song, chen, yu, ismailefendi, sadaei, hofts, pwfts, ifts, cheng, hwang
|
||||
@ -30,8 +30,8 @@ rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
|
||||
#rc('font',**{'family':'serif','serif':['Palatino']})
|
||||
rc('text', usetex=True)
|
||||
|
||||
colors = ['grey', 'rosybrown', 'maroon', 'red','orange', 'yellow', 'olive', 'green',
|
||||
'cyan', 'blue', 'darkblue', 'purple', 'darkviolet']
|
||||
colors = ['grey', 'darkgrey', 'rosybrown', 'maroon', 'red','orange', 'gold', 'yellow', 'olive', 'green',
|
||||
'darkgreen', 'cyan', 'lightblue','blue', 'darkblue', 'purple', 'darkviolet' ]
|
||||
|
||||
ncol = len(colors)
|
||||
|
||||
@ -281,7 +281,7 @@ def all_point_forecasters(data_train, data_test, partitions, max_order=3, statis
|
||||
print_point_statistics(data_test, objs)
|
||||
|
||||
if residuals:
|
||||
print(ResidualAnalysis.compareResiduals(data_test, objs))
|
||||
print(ResidualAnalysis.compare_residuals(data_test, objs))
|
||||
ResidualAnalysis.plot_residuals(data_test, objs, save=save, file=file, tam=tam)
|
||||
|
||||
if series:
|
||||
@ -303,6 +303,8 @@ def all_point_forecasters(data_train, data_test, partitions, max_order=3, statis
|
||||
|
||||
plot_probability_distributions(pmfs, lcolors, tam=tam)
|
||||
|
||||
return models
|
||||
|
||||
|
||||
def print_point_statistics(data, models, externalmodels = None, externalforecasts = None, indexers=None):
|
||||
ret = "Model & Order & RMSE & SMAPE & Theil's U \\\\ \n"
|
||||
@ -539,6 +541,7 @@ def plot_compared_series(original, models, colors, typeonlegend=False, save=Fals
|
||||
ax.plot(original, color='black', label="Original", linewidth=linewidth*1.5)
|
||||
|
||||
for count, fts in enumerate(models, start=0):
|
||||
try:
|
||||
if fts.has_point_forecasting and points:
|
||||
forecasts = fts.forecast(original)
|
||||
if isinstance(forecasts, np.ndarray):
|
||||
@ -562,6 +565,8 @@ def plot_compared_series(original, models, colors, typeonlegend=False, save=Fals
|
||||
color=colors[count], ls=ls, linewidth=linewidth)
|
||||
mi.append(tmpmi)
|
||||
ma.append(tmpma)
|
||||
except ValueError as ex:
|
||||
print(fts.shortname)
|
||||
|
||||
handles0, labels0 = ax.get_legend_handles_labels()
|
||||
lgd = ax.legend(handles0, labels0, loc=2, bbox_to_anchor=(1, 1))
|
||||
@ -573,7 +578,7 @@ def plot_compared_series(original, models, colors, typeonlegend=False, save=Fals
|
||||
ax.set_xlabel('T')
|
||||
ax.set_xlim([0, len(original)])
|
||||
|
||||
Util.show_and_save_image(fig, file, save, lgd=legends)
|
||||
#Util.show_and_save_image(fig, file, save, lgd=legends)
|
||||
|
||||
|
||||
def plot_probability_distributions(pmfs, lcolors, tam=[15, 7]):
|
||||
@ -1022,7 +1027,10 @@ def simpleSearch_RMSE(train, test, model, partitions, orders, save=False, file=N
|
||||
partitioner=Grid.GridPartitioner,transformation=None,indexer=None):
|
||||
_3d = len(orders) > 1
|
||||
ret = []
|
||||
if _3d:
|
||||
errors = np.array([[0 for k in range(len(partitions))] for kk in range(len(orders))])
|
||||
else:
|
||||
errors = []
|
||||
forecasted_best = []
|
||||
fig = plt.figure(figsize=tam)
|
||||
# fig.suptitle("Comparação de modelos ")
|
||||
@ -1055,7 +1063,10 @@ def simpleSearch_RMSE(train, test, model, partitions, orders, save=False, file=N
|
||||
else:
|
||||
forecasted = fts.forecast_interval(test)
|
||||
error = 1.0 - Measures.rmse_interval(np.array(test[o:]), np.array(forecasted[:-1]))
|
||||
if _3d:
|
||||
errors[oc, pc] = error
|
||||
else:
|
||||
errors.append( error )
|
||||
if error < min_rmse:
|
||||
min_rmse = error
|
||||
best = fts
|
||||
@ -1067,9 +1078,8 @@ def simpleSearch_RMSE(train, test, model, partitions, orders, save=False, file=N
|
||||
# ax0.legend(handles0, labels0)
|
||||
ax0.plot(test, label="Original", linewidth=3.0, color="black")
|
||||
if _3d: ax1 = Axes3D(fig, rect=[0, 1, 0.9, 0.9], elev=elev, azim=azim)
|
||||
if not plotforecasts: ax1 = Axes3D(fig, rect=[0, 1, 0.9, 0.9], elev=elev, azim=azim)
|
||||
# ax1 = fig.add_axes([0.6, 0.5, 0.45, 0.45], projection='3d')
|
||||
if _3d:
|
||||
if _3d and not plotforecasts:
|
||||
ax1 = Axes3D(fig, rect=[0, 1, 0.9, 0.9], elev=elev, azim=azim)
|
||||
ax1.set_title('Error Surface')
|
||||
ax1.set_ylabel('Model order')
|
||||
ax1.set_xlabel('Number of partitions')
|
||||
@ -1079,9 +1089,9 @@ def simpleSearch_RMSE(train, test, model, partitions, orders, save=False, file=N
|
||||
else:
|
||||
ax1 = fig.add_axes([0, 1, 0.9, 0.9])
|
||||
ax1.set_title('Error Curve')
|
||||
ax1.set_ylabel('Number of partitions')
|
||||
ax1.set_xlabel('RMSE')
|
||||
ax0.plot(errors,partitions)
|
||||
ax1.set_xlabel('Number of partitions')
|
||||
ax1.set_ylabel('RMSE')
|
||||
ax1.plot(partitions, errors)
|
||||
ret.append(best)
|
||||
ret.append(forecasted_best)
|
||||
ret.append(min_rmse)
|
||||
|
@ -31,8 +31,7 @@ def run_point(mfts, partitioner, train_data, test_data, window_key=None, transfo
|
||||
:return: a dictionary with the benchmark results
|
||||
"""
|
||||
import time
|
||||
from pyFTS import yu, hofts, pwfts,ismailefendi,sadaei, song, cheng, hwang
|
||||
from pyFTS.models import chen
|
||||
from pyFTS.models import yu, chen, hofts, pwfts,ismailefendi,sadaei, song, cheng, hwang
|
||||
from pyFTS.partitioners import Grid, Entropy, FCM
|
||||
from pyFTS.benchmarks import Measures, naive, arima, quantreg
|
||||
from pyFTS.common import Transformations
|
||||
@ -223,7 +222,7 @@ def run_interval(mfts, partitioner, train_data, test_data, window_key=None, tran
|
||||
:return: a dictionary with the benchmark results
|
||||
"""
|
||||
import time
|
||||
from pyFTS import hofts,ifts,pwfts
|
||||
from pyFTS.models import hofts,ifts,pwfts
|
||||
from pyFTS.partitioners import Grid, Entropy, FCM
|
||||
from pyFTS.benchmarks import Measures, arima, quantreg
|
||||
|
||||
@ -424,8 +423,8 @@ def run_ahead(mfts, partitioner, train_data, test_data, steps, resolution, windo
|
||||
"""
|
||||
import time
|
||||
import numpy as np
|
||||
from pyFTS import hofts, ifts, pwfts
|
||||
from pyFTS.models import ensemble
|
||||
from pyFTS.models import hofts, ifts, pwfts
|
||||
from pyFTS.models.ensemble import ensemble
|
||||
from pyFTS.partitioners import Grid, Entropy, FCM
|
||||
from pyFTS.benchmarks import Measures, arima
|
||||
from pyFTS.models.seasonal import SeasonalIndexer
|
||||
|
@ -10,7 +10,7 @@ from copy import deepcopy
|
||||
import numpy as np
|
||||
from joblib import Parallel, delayed
|
||||
|
||||
from pyFTS.benchmarks import benchmarks, Util
|
||||
from pyFTS.benchmarks import benchmarks, Util as bUtil
|
||||
from pyFTS.common import Util
|
||||
from pyFTS.partitioners import Grid
|
||||
|
||||
|
@ -35,6 +35,7 @@ class IndexedFLR(FLR):
|
||||
def __str__(self):
|
||||
return str(self.index) + ": "+ self.LHS.name + " -> " + self.RHS.name
|
||||
|
||||
|
||||
def generate_high_order_recurrent_flr(fuzzyData):
|
||||
"""
|
||||
Create a ordered FLR set from a list of fuzzy sets with recurrence
|
||||
@ -55,7 +56,8 @@ def generate_high_order_recurrent_flr(fuzzyData):
|
||||
flrs.append(tmp)
|
||||
return flrs
|
||||
|
||||
def generateRecurrentFLRs(fuzzyData):
|
||||
|
||||
def generate_recurrent_flrs(fuzzyData):
|
||||
"""
|
||||
Create a ordered FLR set from a list of fuzzy sets with recurrence
|
||||
:param fuzzyData: ordered list of fuzzy sets
|
||||
@ -84,20 +86,20 @@ def generateRecurrentFLRs(fuzzyData):
|
||||
return flrs
|
||||
|
||||
|
||||
def generateNonRecurrentFLRs(fuzzyData):
|
||||
def generate_non_recurrent_flrs(fuzzyData):
|
||||
"""
|
||||
Create a ordered FLR set from a list of fuzzy sets without recurrence
|
||||
:param fuzzyData: ordered list of fuzzy sets
|
||||
:return: ordered list of FLR
|
||||
"""
|
||||
flrs = generateRecurrentFLRs(fuzzyData)
|
||||
flrs = generate_recurrent_flrs(fuzzyData)
|
||||
tmp = {}
|
||||
for flr in flrs: tmp[str(flr)] = flr
|
||||
ret = [value for key, value in tmp.items()]
|
||||
return ret
|
||||
|
||||
|
||||
def generateIndexedFLRs(sets, indexer, data, transformation=None):
|
||||
def generate_indexed_flrs(sets, indexer, data, transformation=None):
|
||||
"""
|
||||
Create a season-indexed ordered FLR set from a list of fuzzy sets with recurrence
|
||||
:param sets: fuzzy sets
|
||||
|
@ -56,11 +56,11 @@ class Differential(Transformation):
|
||||
if not isinstance(data, list):
|
||||
data = [data]
|
||||
|
||||
if not isinstance(param, list):
|
||||
param = [param]
|
||||
|
||||
n = len(data)
|
||||
|
||||
# print(n)
|
||||
# print(len(param))
|
||||
|
||||
if not interval:
|
||||
inc = [data[t] + param[t] for t in np.arange(0, n)]
|
||||
else:
|
||||
|
@ -1,7 +1,6 @@
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from pyFTS import tree
|
||||
from pyFTS.common import FuzzySet, SortedCollection
|
||||
from pyFTS.common import FuzzySet, SortedCollection, tree
|
||||
|
||||
|
||||
class FTS(object):
|
||||
@ -34,6 +33,8 @@ class FTS(object):
|
||||
self.original_max = 0
|
||||
self.original_min = 0
|
||||
self.partitioner = kwargs.get("partitioner", None)
|
||||
if self.partitioner != None:
|
||||
self.sets = self.partitioner.sets
|
||||
self.auto_update = False
|
||||
self.benchmark_only = False
|
||||
self.indexer = None
|
||||
@ -199,7 +200,6 @@ class FTS(object):
|
||||
params = [None for k in self.transformations]
|
||||
|
||||
for c, t in enumerate(reversed(self.transformations), start=0):
|
||||
print(c)
|
||||
ndata = t.inverse(data, params[c], **kwargs)
|
||||
|
||||
return ndata
|
||||
|
@ -1,8 +1,10 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import pkg_resources
|
||||
|
||||
|
||||
def get_data():
|
||||
passengers = pd.read_csv("DataSets/AirPassengers.csv", sep=",")
|
||||
filename = pkg_resources.resource_filename('pyFTS', 'data/AirPassengers.csv')
|
||||
passengers = pd.read_csv(filename, sep=",")
|
||||
passengers = np.array(passengers["Passengers"])
|
||||
return passengers
|
||||
|
@ -3,9 +3,8 @@ import numpy as np
|
||||
import os
|
||||
import pkg_resources
|
||||
|
||||
|
||||
def get_data():
|
||||
#data_path = os.path.dirname(__file__)
|
||||
#filename = os.path.join(data_path,"Enrollments.csv")
|
||||
filename = pkg_resources.resource_filename('pyFTS', 'data/Enrollments.csv')
|
||||
enrollments = pd.read_csv(filename, sep=";")
|
||||
enrollments = np.array(enrollments["Enrollments"])
|
||||
|
3927
pyFTS/data/NASDAQ.csv
Normal file
3927
pyFTS/data/NASDAQ.csv
Normal file
File diff suppressed because it is too large
Load Diff
11
pyFTS/data/NASDAQ.py
Normal file
11
pyFTS/data/NASDAQ.py
Normal file
@ -0,0 +1,11 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import os
|
||||
import pkg_resources
|
||||
|
||||
|
||||
def get_data():
|
||||
filename = pkg_resources.resource_filename('pyFTS', 'data/NASDAQ.csv')
|
||||
dat = pd.read_csv(filename, sep=";")
|
||||
dat = np.array(dat["avg"])
|
||||
return dat
|
16924
pyFTS/data/SP500.csv
Normal file
16924
pyFTS/data/SP500.csv
Normal file
File diff suppressed because it is too large
Load Diff
11
pyFTS/data/SP500.py
Normal file
11
pyFTS/data/SP500.py
Normal file
@ -0,0 +1,11 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import os
|
||||
import pkg_resources
|
||||
|
||||
|
||||
def get_data():
|
||||
filename = pkg_resources.resource_filename('pyFTS', 'data/SP500.csv')
|
||||
dat = pd.read_csv(filename, sep=",")
|
||||
dat = np.array(dat["Avg"])
|
||||
return dat
|
5261
pyFTS/data/TAIEX.csv
Normal file
5261
pyFTS/data/TAIEX.csv
Normal file
File diff suppressed because it is too large
Load Diff
11
pyFTS/data/TAIEX.py
Normal file
11
pyFTS/data/TAIEX.py
Normal file
@ -0,0 +1,11 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import os
|
||||
import pkg_resources
|
||||
|
||||
|
||||
def get_data():
|
||||
filename = pkg_resources.resource_filename('pyFTS', 'data/TAIEX.csv')
|
||||
dat = pd.read_csv(filename, sep=";")
|
||||
dat = np.array(dat["avg"])
|
||||
return dat
|
56
pyFTS/data/artificial.py
Normal file
56
pyFTS/data/artificial.py
Normal file
@ -0,0 +1,56 @@
|
||||
import numpy as np
|
||||
|
||||
|
||||
def generate_gaussian_linear(mu_ini, sigma_ini, mu_inc, sigma_inc, it=100, num=10, vmin=None, vmax=None):
|
||||
"""
|
||||
Generate data sampled from Gaussian distribution, with constant or linear changing parameters
|
||||
:param mu_ini: Initial mean
|
||||
:param sigma_ini: Initial variance
|
||||
:param mu_inc: Mean increment after 'num' samples
|
||||
:param sigma_inc: Variance increment after 'num' samples
|
||||
:param it: Number of iterations
|
||||
:param num: Number of samples generated on each iteration
|
||||
:param vmin: Lower bound value of generated data
|
||||
:param vmax: Upper bound value of generated data
|
||||
:return: A list of it*num float values
|
||||
"""
|
||||
mu = mu_ini
|
||||
sigma = sigma_ini
|
||||
ret = []
|
||||
for k in np.arange(0,it):
|
||||
tmp = np.random.normal(mu, sigma, num)
|
||||
if vmin is not None:
|
||||
tmp = np.maximum(np.full(num, vmin), tmp)
|
||||
if vmax is not None:
|
||||
tmp = np.minimum(np.full(num, vmax), tmp)
|
||||
ret.extend(tmp)
|
||||
mu += mu_inc
|
||||
sigma += sigma_inc
|
||||
return ret
|
||||
|
||||
def generate_uniform_linear(min_ini, max_ini, min_inc, max_inc, it=100, num=10, vmin=None, vmax=None):
|
||||
"""
|
||||
Generate data sampled from Uniform distribution, with constant or linear changing bounds
|
||||
:param mu_ini: Initial mean
|
||||
:param sigma_ini: Initial variance
|
||||
:param mu_inc: Mean increment after 'num' samples
|
||||
:param sigma_inc: Variance increment after 'num' samples
|
||||
:param it: Number of iterations
|
||||
:param num: Number of samples generated on each iteration
|
||||
:param vmin: Lower bound value of generated data
|
||||
:param vmax: Upper bound value of generated data
|
||||
:return: A list of it*num float values
|
||||
"""
|
||||
_min = min_ini
|
||||
_max = max_ini
|
||||
ret = []
|
||||
for k in np.arange(0,it):
|
||||
tmp = np.random.uniform(_min, _max, num)
|
||||
if vmin is not None:
|
||||
tmp = np.maximum(np.full(num, vmin), tmp)
|
||||
if vmax is not None:
|
||||
tmp = np.minimum(np.full(num, vmax), tmp)
|
||||
ret.extend(tmp)
|
||||
_min += min_inc
|
||||
_max += max_inc
|
||||
return ret
|
310
pyFTS/data/sunspots.csv
Normal file
310
pyFTS/data/sunspots.csv
Normal file
@ -0,0 +1,310 @@
|
||||
YEAR,SUNACTIVITY
|
||||
1700,5
|
||||
1701,11
|
||||
1702,16
|
||||
1703,23
|
||||
1704,36
|
||||
1705,58
|
||||
1706,29
|
||||
1707,20
|
||||
1708,10
|
||||
1709,8
|
||||
1710,3
|
||||
1711,0
|
||||
1712,0
|
||||
1713,2
|
||||
1714,11
|
||||
1715,27
|
||||
1716,47
|
||||
1717,63
|
||||
1718,60
|
||||
1719,39
|
||||
1720,28
|
||||
1721,26
|
||||
1722,22
|
||||
1723,11
|
||||
1724,21
|
||||
1725,40
|
||||
1726,78
|
||||
1727,122
|
||||
1728,103
|
||||
1729,73
|
||||
1730,47
|
||||
1731,35
|
||||
1732,11
|
||||
1733,5
|
||||
1734,16
|
||||
1735,34
|
||||
1736,70
|
||||
1737,81
|
||||
1738,111
|
||||
1739,101
|
||||
1740,73
|
||||
1741,40
|
||||
1742,20
|
||||
1743,16
|
||||
1744,5
|
||||
1745,11
|
||||
1746,22
|
||||
1747,40
|
||||
1748,60
|
||||
1749,80.9
|
||||
1750,83.4
|
||||
1751,47.7
|
||||
1752,47.8
|
||||
1753,30.7
|
||||
1754,12.2
|
||||
1755,9.6
|
||||
1756,10.2
|
||||
1757,32.4
|
||||
1758,47.6
|
||||
1759,54
|
||||
1760,62.9
|
||||
1761,85.9
|
||||
1762,61.2
|
||||
1763,45.1
|
||||
1764,36.4
|
||||
1765,20.9
|
||||
1766,11.4
|
||||
1767,37.8
|
||||
1768,69.8
|
||||
1769,106.1
|
||||
1770,100.8
|
||||
1771,81.6
|
||||
1772,66.5
|
||||
1773,34.8
|
||||
1774,30.6
|
||||
1775,7
|
||||
1776,19.8
|
||||
1777,92.5
|
||||
1778,154.4
|
||||
1779,125.9
|
||||
1780,84.8
|
||||
1781,68.1
|
||||
1782,38.5
|
||||
1783,22.8
|
||||
1784,10.2
|
||||
1785,24.1
|
||||
1786,82.9
|
||||
1787,132
|
||||
1788,130.9
|
||||
1789,118.1
|
||||
1790,89.9
|
||||
1791,66.6
|
||||
1792,60
|
||||
1793,46.9
|
||||
1794,41
|
||||
1795,21.3
|
||||
1796,16
|
||||
1797,6.4
|
||||
1798,4.1
|
||||
1799,6.8
|
||||
1800,14.5
|
||||
1801,34
|
||||
1802,45
|
||||
1803,43.1
|
||||
1804,47.5
|
||||
1805,42.2
|
||||
1806,28.1
|
||||
1807,10.1
|
||||
1808,8.1
|
||||
1809,2.5
|
||||
1810,0
|
||||
1811,1.4
|
||||
1812,5
|
||||
1813,12.2
|
||||
1814,13.9
|
||||
1815,35.4
|
||||
1816,45.8
|
||||
1817,41.1
|
||||
1818,30.1
|
||||
1819,23.9
|
||||
1820,15.6
|
||||
1821,6.6
|
||||
1822,4
|
||||
1823,1.8
|
||||
1824,8.5
|
||||
1825,16.6
|
||||
1826,36.3
|
||||
1827,49.6
|
||||
1828,64.2
|
||||
1829,67
|
||||
1830,70.9
|
||||
1831,47.8
|
||||
1832,27.5
|
||||
1833,8.5
|
||||
1834,13.2
|
||||
1835,56.9
|
||||
1836,121.5
|
||||
1837,138.3
|
||||
1838,103.2
|
||||
1839,85.7
|
||||
1840,64.6
|
||||
1841,36.7
|
||||
1842,24.2
|
||||
1843,10.7
|
||||
1844,15
|
||||
1845,40.1
|
||||
1846,61.5
|
||||
1847,98.5
|
||||
1848,124.7
|
||||
1849,96.3
|
||||
1850,66.6
|
||||
1851,64.5
|
||||
1852,54.1
|
||||
1853,39
|
||||
1854,20.6
|
||||
1855,6.7
|
||||
1856,4.3
|
||||
1857,22.7
|
||||
1858,54.8
|
||||
1859,93.8
|
||||
1860,95.8
|
||||
1861,77.2
|
||||
1862,59.1
|
||||
1863,44
|
||||
1864,47
|
||||
1865,30.5
|
||||
1866,16.3
|
||||
1867,7.3
|
||||
1868,37.6
|
||||
1869,74
|
||||
1870,139
|
||||
1871,111.2
|
||||
1872,101.6
|
||||
1873,66.2
|
||||
1874,44.7
|
||||
1875,17
|
||||
1876,11.3
|
||||
1877,12.4
|
||||
1878,3.4
|
||||
1879,6
|
||||
1880,32.3
|
||||
1881,54.3
|
||||
1882,59.7
|
||||
1883,63.7
|
||||
1884,63.5
|
||||
1885,52.2
|
||||
1886,25.4
|
||||
1887,13.1
|
||||
1888,6.8
|
||||
1889,6.3
|
||||
1890,7.1
|
||||
1891,35.6
|
||||
1892,73
|
||||
1893,85.1
|
||||
1894,78
|
||||
1895,64
|
||||
1896,41.8
|
||||
1897,26.2
|
||||
1898,26.7
|
||||
1899,12.1
|
||||
1900,9.5
|
||||
1901,2.7
|
||||
1902,5
|
||||
1903,24.4
|
||||
1904,42
|
||||
1905,63.5
|
||||
1906,53.8
|
||||
1907,62
|
||||
1908,48.5
|
||||
1909,43.9
|
||||
1910,18.6
|
||||
1911,5.7
|
||||
1912,3.6
|
||||
1913,1.4
|
||||
1914,9.6
|
||||
1915,47.4
|
||||
1916,57.1
|
||||
1917,103.9
|
||||
1918,80.6
|
||||
1919,63.6
|
||||
1920,37.6
|
||||
1921,26.1
|
||||
1922,14.2
|
||||
1923,5.8
|
||||
1924,16.7
|
||||
1925,44.3
|
||||
1926,63.9
|
||||
1927,69
|
||||
1928,77.8
|
||||
1929,64.9
|
||||
1930,35.7
|
||||
1931,21.2
|
||||
1932,11.1
|
||||
1933,5.7
|
||||
1934,8.7
|
||||
1935,36.1
|
||||
1936,79.7
|
||||
1937,114.4
|
||||
1938,109.6
|
||||
1939,88.8
|
||||
1940,67.8
|
||||
1941,47.5
|
||||
1942,30.6
|
||||
1943,16.3
|
||||
1944,9.6
|
||||
1945,33.2
|
||||
1946,92.6
|
||||
1947,151.6
|
||||
1948,136.3
|
||||
1949,134.7
|
||||
1950,83.9
|
||||
1951,69.4
|
||||
1952,31.5
|
||||
1953,13.9
|
||||
1954,4.4
|
||||
1955,38
|
||||
1956,141.7
|
||||
1957,190.2
|
||||
1958,184.8
|
||||
1959,159
|
||||
1960,112.3
|
||||
1961,53.9
|
||||
1962,37.6
|
||||
1963,27.9
|
||||
1964,10.2
|
||||
1965,15.1
|
||||
1966,47
|
||||
1967,93.8
|
||||
1968,105.9
|
||||
1969,105.5
|
||||
1970,104.5
|
||||
1971,66.6
|
||||
1972,68.9
|
||||
1973,38
|
||||
1974,34.5
|
||||
1975,15.5
|
||||
1976,12.6
|
||||
1977,27.5
|
||||
1978,92.5
|
||||
1979,155.4
|
||||
1980,154.6
|
||||
1981,140.4
|
||||
1982,115.9
|
||||
1983,66.6
|
||||
1984,45.9
|
||||
1985,17.9
|
||||
1986,13.4
|
||||
1987,29.4
|
||||
1988,100.2
|
||||
1989,157.6
|
||||
1990,142.6
|
||||
1991,145.7
|
||||
1992,94.3
|
||||
1993,54.6
|
||||
1994,29.9
|
||||
1995,17.5
|
||||
1996,8.6
|
||||
1997,21.5
|
||||
1998,64.3
|
||||
1999,93.3
|
||||
2000,119.6
|
||||
2001,111
|
||||
2002,104
|
||||
2003,63.7
|
||||
2004,40.4
|
||||
2005,29.8
|
||||
2006,15.2
|
||||
2007,7.5
|
||||
2008,2.9
|
|
11
pyFTS/data/sunspots.py
Normal file
11
pyFTS/data/sunspots.py
Normal file
@ -0,0 +1,11 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import os
|
||||
import pkg_resources
|
||||
|
||||
|
||||
def get_data():
|
||||
filename = pkg_resources.resource_filename('pyFTS', 'data/sunspots.csv')
|
||||
dat = pd.read_csv(filename, sep=",")
|
||||
dat = np.array(dat["SUNACTIVITY"])
|
||||
return dat
|
@ -50,7 +50,7 @@ class ConventionalFTS(fts.FTS):
|
||||
self.sets = sets
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, sets)
|
||||
flrs = FLR.generateNonRecurrentFLRs(tmpdata)
|
||||
flrs = FLR.generate_non_recurrent_flrs(tmpdata)
|
||||
self.flrgs = self.generateFLRG(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
|
@ -68,7 +68,7 @@ class ImprovedWeightedFTS(fts.FTS):
|
||||
ndata = self.apply_transformations(data)
|
||||
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, self.sets)
|
||||
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
||||
flrs = FLR.generate_recurrent_flrs(tmpdata)
|
||||
self.flrgs = self.generateFLRG(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
|
@ -34,7 +34,7 @@ class ConditionalVarianceFTS(chen.ConventionalFTS):
|
||||
self.max_tx = max(ndata)
|
||||
|
||||
tmpdata = common.fuzzySeries(ndata, self.sets, method='fuzzy', const_t=0)
|
||||
flrs = FLR.generateNonRecurrentFLRs(tmpdata)
|
||||
flrs = FLR.generate_non_recurrent_flrs(tmpdata)
|
||||
self.flrgs = self.generate_flrg(flrs)
|
||||
|
||||
def generate_flrg(self, flrs, **kwargs):
|
||||
|
@ -105,7 +105,7 @@ class HighOrderNonStationaryFTS(hofts.HighOrderFTS):
|
||||
|
||||
ndata = self.apply_transformations(data)
|
||||
#tmpdata = common.fuzzyfy_series_old(ndata, self.sets)
|
||||
#flrs = FLR.generateRecurrentFLRs(ndata)
|
||||
#flrs = FLR.generate_recurrent_flrs(ndata)
|
||||
window_size = parameters if parameters is not None else 1
|
||||
self.flrgs = self.generate_flrg(ndata, window_size=window_size)
|
||||
|
||||
|
@ -54,7 +54,7 @@ class NonStationaryFTS(fts.FTS):
|
||||
window_size = parameters if parameters is not None else 1
|
||||
tmpdata = common.fuzzySeries(ndata, self.sets, window_size, method=self.method)
|
||||
#print([k[0].name for k in tmpdata])
|
||||
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
||||
flrs = FLR.generate_recurrent_flrs(tmpdata)
|
||||
#print([str(k) for k in flrs])
|
||||
self.flrgs = self.generate_flrg(flrs)
|
||||
|
||||
|
@ -125,7 +125,7 @@ class ProbabilisticWeightedFTS(ifts.IntervalFTS):
|
||||
for s in self.sets: self.setsDict[s.name] = s
|
||||
if parameters == 'Monotonic':
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(data, sets)
|
||||
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
||||
flrs = FLR.generate_recurrent_flrs(tmpdata)
|
||||
self.flrgs = self.generateFLRG(flrs)
|
||||
else:
|
||||
self.flrgs = self.generate_flrg(data)
|
||||
|
@ -67,7 +67,7 @@ class ExponentialyWeightedFTS(fts.FTS):
|
||||
self.sets = sets
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, sets)
|
||||
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
||||
flrs = FLR.generate_recurrent_flrs(tmpdata)
|
||||
self.flrgs = self.generateFLRG(flrs, self.c)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
|
@ -59,7 +59,7 @@ class ContextualMultiSeasonalFTS(sfts.SeasonalFTS):
|
||||
def train(self, data, sets, order=1, parameters=None):
|
||||
self.sets = sets
|
||||
self.seasonality = parameters
|
||||
flrs = FLR.generateIndexedFLRs(self.sets, self.indexer, data)
|
||||
flrs = FLR.generate_indexed_flrs(self.sets, self.indexer, data)
|
||||
self.flrgs = self.generateFLRG(flrs)
|
||||
|
||||
def getMidpoints(self, flrg, data):
|
||||
|
@ -36,7 +36,7 @@ class MultiSeasonalFTS(sfts.SeasonalFTS):
|
||||
self.sets = sets
|
||||
self.seasonality = parameters
|
||||
#ndata = self.indexer.set_data(data,self.doTransformations(self.indexer.get_data(data)))
|
||||
flrs = FLR.generateIndexedFLRs(self.sets, self.indexer, data)
|
||||
flrs = FLR.generate_indexed_flrs(self.sets, self.indexer, data)
|
||||
self.flrgs = self.generateFLRG(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
|
@ -64,7 +64,7 @@ class SeasonalFTS(fts.FTS):
|
||||
self.sets = sets
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, sets)
|
||||
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
||||
flrs = FLR.generate_recurrent_flrs(tmpdata)
|
||||
self.flrgs = self.generateFLRG(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
|
@ -38,10 +38,11 @@ class ConventionalFTS(fts.FTS):
|
||||
return r
|
||||
|
||||
def train(self, data, sets,order=1,parameters=None):
|
||||
if sets != None:
|
||||
self.sets = sets
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, sets)
|
||||
flrs = FLR.generateNonRecurrentFLRs(tmpdata)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, self.sets)
|
||||
flrs = FLR.generate_non_recurrent_flrs(tmpdata)
|
||||
self.R = self.operation_matrix(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
@ -67,6 +68,6 @@ class ConventionalFTS(fts.FTS):
|
||||
|
||||
ret.append( sum(mp)/len(mp))
|
||||
|
||||
ret = self.apply_inverse_transformations(ret, params=[data[self.order - 1:]])
|
||||
ret = self.apply_inverse_transformations(ret, params=[data])
|
||||
|
||||
return ret
|
||||
|
@ -60,7 +60,7 @@ class WeightedFTS(fts.FTS):
|
||||
self.sets = sets
|
||||
ndata = self.apply_transformations(data)
|
||||
tmpdata = FuzzySet.fuzzyfy_series_old(ndata, sets)
|
||||
flrs = FLR.generateRecurrentFLRs(tmpdata)
|
||||
flrs = FLR.generate_recurrent_flrs(tmpdata)
|
||||
self.flrgs = self.generate_FLRG(flrs)
|
||||
|
||||
def forecast(self, data, **kwargs):
|
||||
|
@ -21,11 +21,11 @@ def plot_sets(data, sets, titles, tam=[12, 10], save=False, file=None):
|
||||
#print(h)
|
||||
fig, axes = plt.subplots(nrows=num, ncols=1,figsize=tam)
|
||||
for k in np.arange(0,num):
|
||||
#ax = fig.add_axes([0.05, 1-(k*h), 0.9, h*0.7]) # left, bottom, width, height
|
||||
ticks = []
|
||||
x = []
|
||||
ax = axes[k]
|
||||
ax.set_title(titles[k])
|
||||
ax.set_ylim([0, 1.1])
|
||||
ax.set_xlim([minx, maxx])
|
||||
for s in sets[k]:
|
||||
if s.mf == Membership.trimf:
|
||||
ax.plot(s.parameters,[0,1,0])
|
||||
@ -35,6 +35,10 @@ def plot_sets(data, sets, titles, tam=[12, 10], save=False, file=None):
|
||||
ax.plot(tmpx, tmpy)
|
||||
elif s.mf == Membership.trapmf:
|
||||
ax.plot(s.parameters, [0, 1, 1, 0])
|
||||
ticks.append(str(round(s.centroid, 0)) + '\n' + s.name)
|
||||
x.append(s.centroid)
|
||||
ax.xaxis.set_ticklabels(ticks)
|
||||
ax.xaxis.set_ticks(x)
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
@ -59,6 +63,9 @@ def explore_partitioners(data, npart, methods=None, mf=None, tam=[12, 10], save=
|
||||
for p in methods:
|
||||
for m in mf:
|
||||
obj = p(data, npart,m)
|
||||
obj.name = obj.name + " - " + obj.membership_function.__name__
|
||||
objs.append(obj)
|
||||
|
||||
plot_partitioners(data, objs, tam, save, file)
|
||||
|
||||
return objs
|
@ -1,5 +1,6 @@
|
||||
from pyFTS.common import FuzzySet, Membership
|
||||
import numpy as np
|
||||
import matplotlib.pylab as plt
|
||||
|
||||
|
||||
class Partitioner(object):
|
||||
@ -70,6 +71,8 @@ class Partitioner(object):
|
||||
ax.set_title(self.name)
|
||||
ax.set_ylim([0, 1])
|
||||
ax.set_xlim([self.min, self.max])
|
||||
ticks = []
|
||||
x = []
|
||||
for s in self.sets:
|
||||
if s.mf == Membership.trimf:
|
||||
ax.plot([s.parameters[0], s.parameters[1], s.parameters[2]], [0, 1, 0])
|
||||
@ -77,6 +80,10 @@ class Partitioner(object):
|
||||
tmpx = [kk for kk in np.arange(s.lower, s.upper)]
|
||||
tmpy = [s.membership(kk) for kk in np.arange(s.lower, s.upper)]
|
||||
ax.plot(tmpx, tmpy)
|
||||
ticks.append(str(round(s.centroid,0))+'\n'+s.name)
|
||||
x.append(s.centroid)
|
||||
plt.xticks(x,ticks)
|
||||
|
||||
|
||||
def __str__(self):
|
||||
tmp = self.name + ":\n"
|
||||
|
@ -1,10 +1,9 @@
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import matplotlib.pyplot as plt
|
||||
from pyFTS.common import FuzzySet,SortedCollection
|
||||
from pyFTS.common import FuzzySet,SortedCollection,tree
|
||||
from pyFTS.probabilistic import kde
|
||||
from pyFTS import tree
|
||||
from pyFTS.common import SortedCollection
|
||||
|
||||
|
||||
class ProbabilityDistribution(object):
|
||||
"""
|
||||
|
Loading…
Reference in New Issue
Block a user