Improvements of FCM_FTS

This commit is contained in:
Petrônio Cândido 2020-01-28 15:13:11 -03:00
parent dbfa1ac86e
commit be119604a9
4 changed files with 17 additions and 7 deletions

View File

@ -4,7 +4,6 @@ import math
import time import time
from functools import reduce from functools import reduce
from operator import itemgetter from operator import itemgetter
import dispy
import random import random
from pyFTS.common import Util from pyFTS.common import Util
@ -13,7 +12,6 @@ from pyFTS.partitioners import Grid, Entropy # , Huarng
from pyFTS.models import hofts from pyFTS.models import hofts
from pyFTS.common import Membership from pyFTS.common import Membership
from pyFTS.hyperparam import Util as hUtil from pyFTS.hyperparam import Util as hUtil
from pyFTS.distributed import dispy as dUtil
from pyFTS.fcm import common, fts from pyFTS.fcm import common, fts
@ -267,6 +265,8 @@ def GeneticAlgorithm(dataset, **kwargs):
ret = evaluate(dataset, individual, **kwargs) ret = evaluate(dataset, individual, **kwargs)
individual['rmse'] = ret['rmse'] individual['rmse'] = ret['rmse']
elif distributed=='dispy': elif distributed=='dispy':
import dispy
from pyFTS.distributed import dispy as dUtil
jobs = [] jobs = []
for ct, individual in enumerate(population): for ct, individual in enumerate(population):
job = cluster.submit(dataset, individual, **kwargs) job = cluster.submit(dataset, individual, **kwargs)
@ -396,13 +396,17 @@ def log_result(conn, datasetname, result):
def execute(datasetname, dataset, **kwargs): def execute(datasetname, dataset, **kwargs):
conn = hUtil.open_hyperparam_db('hyperparam.db') file = kwargs.get('file', 'hyperparam.db')
conn = hUtil.open_hyperparam_db(file)
experiments = kwargs.get('experiments', 30) experiments = kwargs.get('experiments', 30)
distributed = kwargs.get('distributed', False) distributed = kwargs.get('distributed', False)
if distributed == 'dispy': if distributed == 'dispy':
import dispy
from pyFTS.distributed import dispy as dUtil
nodes = kwargs.get('nodes', ['127.0.0.1']) nodes = kwargs.get('nodes', ['127.0.0.1'])
cluster, http_server = dUtil.start_dispy_cluster(evaluate, nodes=nodes) cluster, http_server = dUtil.start_dispy_cluster(evaluate, nodes=nodes)
kwargs['cluster'] = cluster kwargs['cluster'] = cluster

View File

@ -1,6 +1,6 @@
from pyFTS.common import fts from pyFTS.common import fts
from pyFTS.models import hofts from pyFTS.models import hofts
from pyFTS.fcm import common from pyFTS.fcm import common, GA, Activations
import numpy as np import numpy as np
@ -10,6 +10,10 @@ class FCM_FTS(hofts.HighOrderFTS):
super(FCM_FTS, self).__init__(**kwargs) super(FCM_FTS, self).__init__(**kwargs)
self.fcm = common.FuzzyCognitiveMap(**kwargs) self.fcm = common.FuzzyCognitiveMap(**kwargs)
def train(self, data, **kwargs):
ret = GA.execute(data, **kwargs)
self.fcm.weights = ret['weights']
def forecast(self, ndata, **kwargs): def forecast(self, ndata, **kwargs):
ret = [] ret = []

View File

@ -562,7 +562,7 @@ def execute(datasetname, dataset, **kwargs):
:param datasetname: :param datasetname:
:param dataset: The time series to optimize the FTS :param dataset: The time series to optimize the FTS
:keyword database_file: :keyword file:
:keyword experiments: :keyword experiments:
:keyword distributed: :keyword distributed:
:keyword ngen: An integer value with the maximum number of generations, default value: 30 :keyword ngen: An integer value with the maximum number of generations, default value: 30
@ -591,7 +591,7 @@ def execute(datasetname, dataset, **kwargs):
:return: the best genotype :return: the best genotype
""" """
file = kwargs.get('database_file', 'hyperparam.db') file = kwargs.get('file', 'hyperparam.db')
conn = hUtil.open_hyperparam_db(file) conn = hUtil.open_hyperparam_db(file)

View File

@ -127,7 +127,9 @@ def execute(hyperparams, datasetname, dataset, **kwargs):
print("Evaluation values: \n {}".format(hp_values)) print("Evaluation values: \n {}".format(hp_values))
cluster, http_server = dUtil.start_dispy_cluster(cluster_method, nodes=nodes) cluster, http_server = dUtil.start_dispy_cluster(cluster_method, nodes=nodes)
conn = hUtil.open_hyperparam_db('hyperparam.db') file = kwargs.get('file', 'hyperparam.db')
conn = hUtil.open_hyperparam_db(file)
for instance in product(*hp_values): for instance in product(*hp_values):
partitions = instance[index['partitions']] partitions = instance[index['partitions']]