Exemple #1
0
def as_array(array_data):  # convert data to numpy.ndarray
	from hypernets.utils import logging
	logger = logging.get_logger(__name__)

	def _is_numpy_ndarray(obj):
		return isinstance(obj, np.ndarray)

	def _is_pylist(obj):
		return isinstance(obj, list)

	def _is_pd_series(obj):
		return isinstance(obj, pd.Series)

	def _is_cudf_series(obj):
		try:
			import cudf
			return isinstance(obj, cudf.Series)  #
		except Exception:
			return False

	def _is_cupy_array(obj):
		try:
			import cupy
			return isinstance(obj, cupy.ndarray)
		except Exception:
			return False

	if _is_pd_series(array_data):
		return array_data.values
	elif _is_numpy_ndarray(array_data):
		return array_data
	elif _is_pylist(array_data):
		return np.array(array_data)
	elif _is_cudf_series(array_data):
		return array_data.to_numpy()
	elif _is_cupy_array(array_data):
		return np.array(array_data.tolist())
	else:
		logger.warning(f"unseen data type {type(array_data)} convert to numpy ndarray")
		return array_data
Exemple #2
0
import math
import queue
import time
from concurrent.futures import ThreadPoolExecutor, as_completed

import dask
from dask.distributed import Client, default_client

from hypernets.core.callbacks import EarlyStoppingError
from hypernets.core.dispatcher import Dispatcher
from hypernets.core.trial import Trial
from hypernets.utils import logging, fs
from hypernets.utils.common import config, Counter

logger = logging.get_logger(__name__)


class DaskTrialItem(Trial):
    def __init__(self,
                 space_sample,
                 trial_no,
                 reward=math.nan,
                 elapsed=math.nan,
                 model_file=None):
        super(DaskTrialItem, self).__init__(space_sample, trial_no, reward,
                                            elapsed, model_file)

        self.space_id = space_sample.space_id
        self.queue_at = time.time()
Exemple #3
0
 def get_logger():
     from hypernets.utils import logging
     logger = logging.get_logger(__name__)
     return logger
Exemple #4
0
import os
import subprocess
import tempfile
from multiprocessing import cpu_count
from pathlib import Path
from typing import List

from paramiko import SFTPClient

from hypernets.hyperctl import consts, get_context
from hypernets.hyperctl.batch import ShellJob
from hypernets.hyperctl.dao import change_job_status
from hypernets.utils import logging as hyn_logging
from hypernets.utils import ssh_utils

logger = hyn_logging.get_logger(__name__)


class NoResourceException(Exception):
    pass


class ShellExecutor:
    def __init__(self, job: ShellJob):
        self.job = job

    def run(self):
        pass

    def post(self):
        pass
Exemple #5
0
# -*- coding:utf-8 -*-
__author__ = 'yangjian'
"""

"""
from hypernets.utils.logging import get_logger

from ._base import BaseDiscriminator, get_percentile_score
import numpy as np
logger = get_logger(__name__)


class PercentileDiscriminator(BaseDiscriminator):
    def __init__(self,
                 percentile,
                 min_trials=5,
                 min_steps=5,
                 stride=1,
                 history=None,
                 optimize_direction='min'):
        assert 0.0 <= percentile <= 100.0, f'Percentile which must be between 0 and 100 inclusive. got {percentile}'

        BaseDiscriminator.__init__(self, min_trials, min_steps, stride,
                                   history, optimize_direction)
        self.percentile = percentile

    def _is_promising(self,
                      iteration_trajectory,
                      group_id,
                      end_iteration=None):
        n_step = len(iteration_trajectory) - 1