Removing direct dispy import from hyperparam modules
This commit is contained in:
parent
783a77ec0f
commit
6a1ee719b7
@ -8,7 +8,6 @@ import math
|
|||||||
import time
|
import time
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
import dispy
|
|
||||||
|
|
||||||
import random
|
import random
|
||||||
from pyFTS.common import Util
|
from pyFTS.common import Util
|
||||||
@ -17,7 +16,7 @@ from pyFTS.partitioners import Grid, Entropy # , Huarng
|
|||||||
from pyFTS.common import Membership
|
from pyFTS.common import Membership
|
||||||
from pyFTS.models import hofts, ifts, pwfts
|
from pyFTS.models import hofts, ifts, pwfts
|
||||||
from pyFTS.hyperparam import Util as hUtil
|
from pyFTS.hyperparam import Util as hUtil
|
||||||
from pyFTS.distributed import dispy as dUtil
|
|
||||||
|
|
||||||
__measures = ['f1', 'f2', 'rmse', 'size']
|
__measures = ['f1', 'f2', 'rmse', 'size']
|
||||||
|
|
||||||
@ -415,6 +414,8 @@ def GeneticAlgorithm(dataset, **kwargs):
|
|||||||
for key in __measures:
|
for key in __measures:
|
||||||
individual[key] = ret[key]
|
individual[key] = ret[key]
|
||||||
elif distributed=='dispy':
|
elif distributed=='dispy':
|
||||||
|
from pyFTS.distributed import dispy as dUtil
|
||||||
|
import dispy
|
||||||
jobs = []
|
jobs = []
|
||||||
for ct, individual in enumerate(population):
|
for ct, individual in enumerate(population):
|
||||||
job = cluster.submit(dataset, individual, **kwargs)
|
job = cluster.submit(dataset, individual, **kwargs)
|
||||||
@ -602,6 +603,7 @@ def execute(datasetname, dataset, **kwargs):
|
|||||||
shortname = str(fts_method.__module__).split('.')[-1]
|
shortname = str(fts_method.__module__).split('.')[-1]
|
||||||
|
|
||||||
if distributed == 'dispy':
|
if distributed == 'dispy':
|
||||||
|
from pyFTS.distributed import dispy as dUtil
|
||||||
nodes = kwargs.get('nodes', ['127.0.0.1'])
|
nodes = kwargs.get('nodes', ['127.0.0.1'])
|
||||||
cluster, http_server = dUtil.start_dispy_cluster(evaluate, nodes=nodes)
|
cluster, http_server = dUtil.start_dispy_cluster(evaluate, nodes=nodes)
|
||||||
kwargs['cluster'] = cluster
|
kwargs['cluster'] = cluster
|
||||||
|
@ -4,8 +4,7 @@ from pyFTS.models import hofts
|
|||||||
from pyFTS.partitioners import Grid, Entropy
|
from pyFTS.partitioners import Grid, Entropy
|
||||||
from pyFTS.benchmarks import Measures
|
from pyFTS.benchmarks import Measures
|
||||||
from pyFTS.hyperparam import Util as hUtil
|
from pyFTS.hyperparam import Util as hUtil
|
||||||
from pyFTS.distributed import dispy as dUtil
|
|
||||||
import dispy
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from itertools import product
|
from itertools import product
|
||||||
|
|
||||||
@ -73,6 +72,8 @@ def cluster_method(individual, dataset, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
def process_jobs(jobs, datasetname, conn):
|
def process_jobs(jobs, datasetname, conn):
|
||||||
|
from pyFTS.distributed import dispy as dUtil
|
||||||
|
import dispy
|
||||||
for ct, job in enumerate(jobs):
|
for ct, job in enumerate(jobs):
|
||||||
print("Processing job {}".format(ct))
|
print("Processing job {}".format(ct))
|
||||||
result = job()
|
result = job()
|
||||||
@ -98,6 +99,8 @@ def process_jobs(jobs, datasetname, conn):
|
|||||||
|
|
||||||
|
|
||||||
def execute(hyperparams, datasetname, dataset, **kwargs):
|
def execute(hyperparams, datasetname, dataset, **kwargs):
|
||||||
|
from pyFTS.distributed import dispy as dUtil
|
||||||
|
import dispy
|
||||||
|
|
||||||
nodes = kwargs.get('nodes',['127.0.0.1'])
|
nodes = kwargs.get('nodes',['127.0.0.1'])
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ from pyFTS.partitioners import Grid, Entropy # , Huarng
|
|||||||
from pyFTS.common import Membership
|
from pyFTS.common import Membership
|
||||||
from pyFTS.models import hofts, ifts, pwfts
|
from pyFTS.models import hofts, ifts, pwfts
|
||||||
from pyFTS.hyperparam import Util as hUtil
|
from pyFTS.hyperparam import Util as hUtil
|
||||||
from pyFTS.distributed import dispy as dUtil
|
|
||||||
from pyFTS.hyperparam import Evolutionary, random_search as RS
|
from pyFTS.hyperparam import Evolutionary, random_search as RS
|
||||||
from pyFTS.models.multivariate import mvfts, wmvfts, variable
|
from pyFTS.models.multivariate import mvfts, wmvfts, variable
|
||||||
from pyFTS.models.seasonal import partitioner as seasonal
|
from pyFTS.models.seasonal import partitioner as seasonal
|
||||||
@ -458,6 +458,8 @@ def execute(datasetname, dataset, **kwargs):
|
|||||||
kwargs['random_individual'] = random_genotype
|
kwargs['random_individual'] = random_genotype
|
||||||
|
|
||||||
if distributed == 'dispy':
|
if distributed == 'dispy':
|
||||||
|
from pyFTS.distributed import dispy as dUtil
|
||||||
|
import dispy
|
||||||
nodes = kwargs.get('nodes', ['127.0.0.1'])
|
nodes = kwargs.get('nodes', ['127.0.0.1'])
|
||||||
cluster, http_server = dUtil.start_dispy_cluster(evaluate, nodes=nodes)
|
cluster, http_server = dUtil.start_dispy_cluster(evaluate, nodes=nodes)
|
||||||
kwargs['cluster'] = cluster
|
kwargs['cluster'] = cluster
|
||||||
|
Loading…
Reference in New Issue
Block a user