Example #1
0
    def to_json(self):
        gmm_dict = dict()
        gmm_dict['num_components'] = self.num_components
        gmm_dict['inflate_coef_cov'] = self.inflate_coef_cov
        gmm_dict['cov_regularization'] = self.cov_regularization

        gmm_dict['obs_dim'] = self.obs_dim
        gmm_dict['num_components'] = self.num_components
        prior_params_pattern = \
            get_prior_params_pattern(
                self.obs_dim, self.num_components)
        gmm_dict['prior_params_flat'] = \
            list(prior_params_pattern.flatten(
                 self.prior_params, free=False))

        hess_dim = self.gmm_params_pattern.flat_length(free=True)
        gmm_dict['preconditioner_json'] = \
            json_tricks.dumps(
                self.get_kl_conditioned.get_preconditioner(hess_dim))

        # Because the svd is not unique, save these to make sure they
        # are the same.
        gmm_dict['transform_mat_json'] = \
            json_tricks.dumps(self.transform_mat)
        gmm_dict['unrotate_transform_mat_json'] = \
            json_tricks.dumps(self.unrotate_transform_mat)

        return json.dumps(gmm_dict)
Example #2
0
    def generate_multiple_parameters(self, parameter_id_list):
        '''
        generate mutiple instances of hyperparameters
        '''

        if self.first_one:
            params = []
            for one_id in parameter_id_list:
                init_challenger = self.smbo_solver.nni_smac_start()
                self.total_data[one_id] = init_challenger
                json_tricks.dumps(init_challenger.get_dictionary())
                params.append(
                    self.convert_loguniform_categorical(
                        init_challenger.get_dictionary()))
        else:
            challengers = self.smbo_solver.nni_smac_request_challengers()
            cnt = 0
            params = []
            for challenger in challengers:
                if cnt >= len(parameter_id_list):
                    break
                self.total_data[parameter_id_list[cnt]] = challenger
                json_tricks.dumps(challenger.get_dictionary())
                params.append(
                    self.convert_loguniform_categorical(
                        challenger.get_dictionary()))
                cnt += 1
        return params
Example #3
0
 def generate_parameters(self, parameter_id):
     """generate one instance of hyperparameters
     
     Parameters
     ----------
     parameter_id: int
         parameter id
     
     Returns
     -------
     list
         new generated parameters
     """
     if self.first_one:
         init_challenger = self.smbo_solver.nni_smac_start()
         self.total_data[parameter_id] = init_challenger
         json_tricks.dumps(init_challenger.get_dictionary())
         return self.convert_loguniform_categorical(
             init_challenger.get_dictionary())
     else:
         challengers = self.smbo_solver.nni_smac_request_challengers()
         for challenger in challengers:
             self.total_data[parameter_id] = challenger
             json_tricks.dumps(challenger.get_dictionary())
             return self.convert_loguniform_categorical(
                 challenger.get_dictionary())
Example #4
0
def is_jsonable(x):
    try:
        #        json.dumps(x)
        json_tricks.dumps(x)
        return True
    except:
        return False
Example #5
0
    def _request_one_trial_job(self):
        """get one trial job, i.e., one hyperparameter configuration.

        If this function is called, Command will be sent by BOHB:
        a. If there is a parameter need to run, will return "NewTrialJob" with a dict:
        { 
            'parameter_id': id of new hyperparameter
            'parameter_source': 'algorithm'
            'parameters': value of new hyperparameter
        }
        b. If BOHB don't have parameter waiting, will return "NoMoreTrialJobs" with
        {
            'parameter_id': '-1_0_0',
            'parameter_source': 'algorithm',
            'parameters': ''
        }
        """
        if not self.generated_hyper_configs:
            ret = {
                'parameter_id': '-1_0_0',
                'parameter_source': 'algorithm',
                'parameters': ''
            }
            send(CommandType.NoMoreTrialJobs, json_tricks.dumps(ret))
            return
        assert self.generated_hyper_configs
        params = self.generated_hyper_configs.pop()
        ret = {
            'parameter_id': params[0],
            'parameter_source': 'algorithm',
            'parameters': params[1]
        }
        self.parameters[params[0]] = params[1]
        send(CommandType.NewTrialJob, json_tricks.dumps(ret))
        self.credit -= 1
Example #6
0
    def validateAction(value):

        action = json.loads(value)

        if (not "type" in action):
            raise ValidationError("An action required a type property. " +
                                  dumps(action))

        if (action["type"] != "assign"):
            raise ValidationError("The action type should be one 'assign'. " +
                                  dumps(action))

        if (not "value" in action):
            raise ValidationError(
                "An 'assign' action requires a value property. " +
                dumps(action))

        if (action["value"]):
            if ("workflow" in action["value"]):
                if ("target" in action):
                    raise ValidationError(
                        "A 'workflow' action mustn't have target. " +
                        +dumps(action))
            else:
                if (not "target" in action or len(action["target"]) == 0):
                    raise ValidationError(
                        "An 'assign' action requires a target property. " +
                        dumps(action))

        return ActionValidator.validateFields(action["value"])
def get_image_as_128vector(image_base64):
    temp_image_name = get_random_string() + '.png'
    image_path = '/var/www/facerecognition/FaceRecognitionBackend/.temp_images/' + temp_image_name

    convert_and_save(image_base64, image_path)

    tf.reset_default_graph()

    with tf.Session() as sess:
        # Setup models
        mtcnn = detect_and_align.create_mtcnn(sess, None)

        # Locate faces and landmarks in frame
        image = misc.imread(os.path.expanduser(image_path), mode='RGB')
        face_patches, padded_bounding_boxes, landmarks = detect_and_align.detect_faces(image, mtcnn)

    face_patches = dumps(face_patches, primitives=True)
    face_patches = json.loads(face_patches)
    feed_dict = {'inputs': {'input': face_patches, 'phase_train': False}}

    response = requests.post(SERVER_URL, json=feed_dict)  # , headers=headers)

    if response.status_code == 200:
        outputs = response.json()
        output_vector_128 = dumps(outputs['outputs'][0])  # convert to string
    else:
        output_vector_128 = None

    if os.path.exists(image_path):  # delete temporary file after use
        os.remove(image_path)

    return output_vector_128
Example #8
0
def base_convert(
    read_obj,
    read_index: int = -1,
    read_format=None,
    write_filename=None,
    write_format=None,
    data=None,
    calc_data=None,
    compress: bool = True,
    compresslevel: int = 1,
):
    """
    base convert: convert anything from one type to another
    Input:
        read_obj: filename/StringIO/Atoms/Structure like
        read_index: int, which frame to read
        read_format: format of object
        write_filename: used for generating output
        write_format: what type to write
        data: dict, extra data written to arrays
        calc_data: dict, extra data write to calc_arrays
        compress: whether to compress the file, default True
        compresslevel: int, level of compress
    Output:
        string: transformed structure from read_format to write_format
    """
    rawbytes, read_filename, compressed = parse_input_obj(read_obj)
    if compresslevel == 0:
        compress = False
    if not compressed and compress and len(rawbytes) > 8 * 1024:
        rawbytes = gzip.compress(rawbytes, compresslevel)
        compressed = True
    if read_filename is None:
        assert read_format is not None
        read_filename = f"{atomtools.name.randString()}.{read_format}"
    files = {
        'read_file': (read_filename, BytesIO(rawbytes)),
    }
    payload = {
        'read_index': read_index,
        'read_format': read_format,
        'write_format': write_format,
        'write_filename': write_filename,
        'compressed': compressed,
        'data': json_tricks.dumps(data, allow_nan=True),
        'calc_data': json_tricks.dumps(calc_data, allow_nan=True),
    }
    default_url = "https://io.autochemistry.com/convert"
    url = os.environ.get("CHEMIO_SERVER_URL", default_url)
    logger.debug(f"url: {url}, files: {files}, payload: {payload}")
    response = requests.post(url, files=files, data=payload)
    result = response.json()
    logger.debug(f"result: {result}")
    if result['success']:
        return result['data']
    raise ChemioReadError(result['message'])
def coo_matrix_to_json(sp_mat):
    if not sp.sparse.isspmatrix_coo(sp_mat):
        raise ValueError('sp_mat must be a scipy.sparse.coo_matrix')
    sp_mat_dict = {
        'data': json_tricks.dumps(sp_mat.data),
        'row': json_tricks.dumps(sp_mat.row),
        'col': json_tricks.dumps(sp_mat.col),
        'shape': sp_mat._shape,
        'type': 'coo_matrix' }
    return json.dumps(sp_mat_dict)
Example #10
0
 def return_all_as_dict(self):
     return_dict = {}
     for key in dir(self):
         if key.isupper() is True or 'DEFAULT' in key:
             attr = getattr(self, key)
             try:
                 json.dumps(dict(key=attr))
             except TypeError as e:
                 attr = 'cannot be json dumped'
             return_dict[key] = attr
     return return_dict
def get_temps():
    while True:
        data = []
        now = datetime.datetime.now()
        temperature = (sensor.get_temperature())
        data.append({"temp": temperature, 'date': now})
        js_data = dumps(data)
        print(dumps(data))
        time.sleep(10)
        #sending post request and saving response as response object
        r = requests.post(url=API_ENDPOINT, data={'data': js_data})
        print(r)
Example #12
0
def test_tzaware_naive_date_time():
	json = dumps(DTOBJ, primitives=True)
	back = loads(json)
	for orig, bck in zip(DTOBJ, back):
		if isinstance(bck, (date, time, datetime,)):
			assert isinstance(bck, str if is_py3 else (str, unicode))
			assert bck == orig.isoformat()
		elif isinstance(bck, (timedelta,)):
			assert isinstance(bck, float)
			assert bck == orig.total_seconds()
	dt = pytz.timezone('Europe/Amsterdam').localize(datetime(year=1988, month=3, day=15, hour=8, minute=3, second=59, microsecond=7))
	assert dumps(dt, primitives=True).strip('"') == '1988-03-15T08:03:59.000007+01:00'
Example #13
0
def test_memory_order():
    arrC = array([[1., 2.], [3., 4.]], order='C')
    json = dumps(arrC)
    arr = loads(json)
    assert array_equal(arrC, arr)
    assert arrC.flags['C_CONTIGUOUS'] == arr.flags['C_CONTIGUOUS'] and \
     arrC.flags['F_CONTIGUOUS'] == arr.flags['F_CONTIGUOUS']
    arrF = array([[1., 2.], [3., 4.]], order='F')
    json = dumps(arrF)
    arr = loads(json)
    assert array_equal(arrF, arr)
    assert arrF.flags['C_CONTIGUOUS'] == arr.flags['C_CONTIGUOUS'] and \
     arrF.flags['F_CONTIGUOUS'] == arr.flags['F_CONTIGUOUS']
Example #14
0
def cut_send(name):
        print("Analisando imagem "+name)
        h, w, xOffSet, yOffSet = 120, 120, 60, 60
        path_imagens = "/media/administrador/SuperLeggera:0/Imagens_Habilitadas/imagens/"
        path_imagem = path_imagens+name
        imgOri = cv2.imread(path_imagem)
        ds = gdal.Open(path_imagem)
        producer = KafkaProducer(bootstrap_servers=['192.168.25.102:9092'])

        xOrigin, px_w, rot1, yOrigin, rot2, px_h = ds.GetGeoTransform()

        imagem = cv2.cvtColor(imgOri, cv2.COLOR_BGR2GRAY)

        Y = imagem.shape[0] #Altura da imagem
        X = imagem.shape[1] #Largura da imagem

        y0, y1 = 0, h
        
        while Y >= y1:
            x0, x1 = 0, w

            while X >= x1:
                imageCropped = imagem[y0:y1, x0:x1]
                
                json_imagem = {
                    "nome_arquivo":name,
                    "projection_ref":ds.GetProjectionRef(),
                    "x_origin":xOrigin,
                    "y_origin":yOrigin,
                    "px_w":px_w,
                    "px_h":px_h,
                    "X0":x0,
                    "Y0":y0,
                    "imagem":dumps(imageCropped)
                }
                
                '''
                json_imagem = {
                    "nome_arquivo":name,
                    "ds":ds.,
                    "imagem":dumps(imageCropped)
                }
                '''
                json_object = dumps(json_imagem)

                producer.send('landingqueue',bytes(json_object,'utf-8'))
                producer.flush()

                x0, x1 = (x0+xOffSet), (x1+xOffSet)

            y0, y1 = (y0+yOffSet),(y1+yOffSet)
Example #15
0
def write_json(json_obj, filename, pretty=True):
    """write_json will write a json object to file, pretty printed

       Arguments:
        - json_obj (dict) : the dict to print to json
        - filename (str) : the output file to write to
        - pretty (bool): if True, will use nicer formatting
    """
    with open(filename, "w") as filey:
        if pretty:
            filey.writelines(dumps(json_obj, indent=4, separators=(",", ": ")))
        else:
            filey.writelines(dumps(json_obj))
    return filename
Example #16
0
def test_pandas_series():
    for name, col in COLUMNS.items():
        ds = Series(data=col, name=name)
        txt = dumps(ds, allow_nan=True)
        back = loads(txt)
        assert (ds.equals(back))
        assert ds.dtype == back.dtype
    for name, col in COLUMNS.items():
        ds = Series(data=col, name=name)
        txt = dumps(ds, primitives=True, allow_nan=True)
        back = loads(txt)
        assert isinstance(back, dict)
        assert_equal(ds.index.values, back['index'])
        assert_equal(ds.values, back['data'])
Example #17
0
def test_with_value():
    obj = [
        CombineComplexTypesEnum.class_inst, CombineComplexTypesEnum.timepoint
    ]
    encoder = partial(enum_instance_encode, with_enum_value=True)
    txt = dumps(obj, extra_obj_encoders=(encoder, ))
    assert '"value":' in txt
    back = loads(txt, obj_pairs_hooks=())
    class_inst_encoding = loads(dumps(
        CombineComplexTypesEnum.class_inst.value),
                                obj_pairs_hooks=())
    timepoint_encoding = loads(dumps(CombineComplexTypesEnum.timepoint.value),
                               obj_pairs_hooks=())
    assert back[0]['__enum__']['value'] == class_inst_encoding
    assert back[1]['__enum__']['value'] == timepoint_encoding
Example #18
0
 def generate_parameters(self, parameter_id):
     '''
     generate one instance of hyperparameters
     '''
     if self.first_one:
         init_challenger = self.smbo_solver.nni_smac_start()
         self.total_data[parameter_id] = init_challenger
         json_tricks.dumps(init_challenger.get_dictionary())
         return init_challenger.get_dictionary()
     else:
         challengers = self.smbo_solver.nni_smac_request_challengers()
         for challenger in challengers:
             self.total_data[parameter_id] = challenger
             json_tricks.dumps(challenger.get_dictionary())
             return challenger.get_dictionary()
Example #19
0
def test_dump_np_scalars():
    data = [
        int8(-27),
        complex64(exp(1) + 37j),
        (
            {
                'alpha': float64(-exp(10)),
                'str-only': complex64(-1 - 1j),
            },
            uint32(123456789),
            float16(exp(-1)),
            set((
                int64(37),
                uint64(-0),
            )),
        ),
    ]
    replaced = encode_scalars_inplace(deepcopy(data))
    json = dumps(replaced)
    rec = loads(json)
    print(data)
    print(rec)
    assert data[0] == rec[0]
    assert data[1] == rec[1]
    assert data[2][0] == rec[2][0]
    assert data[2][1] == rec[2][1]
    assert data[2][2] == rec[2][2]
    assert data[2][3] == rec[2][3]
    assert data[2] == tuple(rec[2])
Example #20
0
def to_json(dm):
    """
	desc: |
		*Requires json_tricks*

		Creates (serializes) a `json` string from a DataMatrix.

	arguments:
		dm:
			desc:	A DataMatrix to serialize.
			type:	DataMatrix

	returns:
		desc:	A json string.
		type:	str
	"""

    import json_tricks

    return json_tricks.dumps(collections.OrderedDict([
        ('rowid', dm._rowid._l),
        ('columns',
         collections.OrderedDict([(name, (type(column).__name__, column._seq))
                                  for name, column in dm.columns]))
    ]),
                             allow_nan=True)
Example #21
0
 def __init__(self):
     print('Basic-v0 environment')
     dir_path = os.path.dirname(os.path.realpath(__file__))
     # ------------ load data ------------------ #
     self.data = pd.read_csv(dir_path+'/data/notifications.csv' )
             
     self.contexts = self.data[['postedDayOfWeek', 'postedTimeOfDay', 'contactSignificantContext']]
     self.notifications = self.data[['appPackage', 'category', 'priority', 'numberUpdates']]
     self.engagements = self.data[['action', 'response']]
     
     self.action_space = spaces.Discrete(2)
     self.observation_space = spaces.Discrete(len(self.data))
     
     self.epoch = 0
     self.openedNotifications = []
     self.openedActions = []
     self.openedContexts = []
     self.dismissedNotifications = []
     self.dismissedActions = []
     self.dismissedContexts = []
     self.correctlyOpened = 0
     self.correctlyDismissed = 0
     
     # ------------ prep ui ------------------ #
     eel.init(dir_path+'/web')
     eel.start('main.html', mode='chrome', block=False)
     eel.sleep(3)
     eel.initial_state(dumps({'notification': self.notifications.iloc[self.epoch].to_dict(),
                       'context':self.contexts.iloc[self.epoch].to_dict(), 
                       'size':len(self.data)}))
Example #22
0
    def _handle_intermediate_metric_data(self, data):
        if data['type'] != 'PERIODICAL':
            return True
        if self.assessor is None:
            return True

        trial_job_id = data['trial_job_id']
        if trial_job_id in _ended_trials:
            return True

        history = _trial_history[trial_job_id]
        history[data['sequence']] = data['value']
        ordered_history = _sort_history(history)
        if len(ordered_history
               ) < data['sequence']:  # no user-visible update since last time
            return True

        try:
            result = self.assessor.assess_trial(trial_job_id, ordered_history)
        except Exception as e:
            _logger.exception('Assessor error')

        if isinstance(result, bool):
            result = AssessResult.Good if result else AssessResult.Bad
        elif not isinstance(result, AssessResult):
            msg = 'Result of Assessor.assess_trial must be an object of AssessResult, not %s'
            raise RuntimeError(msg % type(result))

        if result is AssessResult.Bad:
            _logger.debug('BAD, kill %s', trial_job_id)
            send(CommandType.KillTrialJob, json_tricks.dumps(trial_job_id))
        else:
            _logger.debug('GOOD')
Example #23
0
def report_intermediate_result(metric):
    """
    Reports intermediate result to NNI.

    Parameters
    ----------
    metric:
        serializable object.
    """
    global _intermediate_seq
    assert _params or trial_env_vars.NNI_PLATFORM is None, \
        'nni.get_next_parameter() needs to be called before report_intermediate_result'
    metric = json_tricks.dumps({
        'parameter_id':
        _params['parameter_id'] if _params else None,
        'trial_job_id':
        trial_env_vars.NNI_TRIAL_JOB_ID,
        'type':
        'PERIODICAL',
        'sequence':
        _intermediate_seq,
        'value':
        metric
    })
    _intermediate_seq += 1
    platform.send_metric(metric)
Example #24
0
def test_pandas_dataframe():
    df = DataFrame(COLUMNS, columns=tuple(COLUMNS.keys()))
    txt = dumps(df, allow_nan=True)
    back = loads(txt)
    assert isnan(back.ix[0, -1])
    assert (df.equals(back))
    assert (df.dtypes == back.dtypes).all()
    df = DataFrame(COLUMNS, columns=tuple(COLUMNS.keys()))
    txt = dumps(df, primitives=True, allow_nan=True)
    back = loads(txt)
    assert isinstance(back, dict)
    assert isnan(back['special'][0])
    assert all(df.index.values == tuple(back.pop('index')))
    for name, col in back.items():
        assert name in COLUMNS
        assert_equal(list(COLUMNS[name]), col)
Example #25
0
def cluster_analysis(input_json):

    json = loads(input_json, preserve_order=True)

    csv = pd.read_csv(StringIO(json['Csv']))
    countCluster = json['CountCluster']

    raw1 = csv['X']
    raw2 = csv['Y']

    x = [[] for i in range(countCluster)]
    y = [[] for i in range(countCluster)]

    X = np.array(list(zip(raw1, raw2)))
    kmeans = KMeans(n_clusters=countCluster, random_state=0).fit(X)

    centers = kmeans.cluster_centers_
    Y = X[:, 1]
    X = X[:, 0]
    for i in range(len(Y)):
        distances = np.zeros(countCluster)
        for j in range(countCluster):
            distances[j] = dist(X[i], centers[j][0], Y[i], centers[j][1])
        indx = np.argmin(distances)
        x[indx].append(X[i])
        y[indx].append(Y[i])

    return dumps(Cluster(x, y, centers), primitives=True)
Example #26
0
 def run(args):
     if not args.filename:
         raise ValueError("No filename is given")
     data = utils.parse_args_data(args.data)
     calc_data = utils.parse_args_data(args.calc_data)
     import chemio
     if args.debug:
         chemio.set_loglevel('debug')
     for filename in args.filename:
         arrays = chemio.read(filename,
                              index=args.index,
                              compresslevel=args.compresslevel,
                              data=data,
                              calc_data=calc_data)
         if args.verbose:
             print(arrays)
         if args.key:
             # print(filename)
             try:
                 data = ExtDict(arrays)[args.key]
                 if args.no_show_filename:
                     print(data)
                 else:
                     print(filename, data)
             except:
                 print(None)
         else:
             print(json_tricks.dumps(arrays, allow_nan=True, indent=4))
Example #27
0
    def run_all_tests(self):
        '''
        Run all DropBot on-board self-diagnostic tests.

        Record test results as JSON and results summary as a Word document.

        .. versionadded:: 0.14

        .. versionchanged:: 0.16
            Prompt user to insert DropBot test board.
        '''
        results = db.self_test.self_test(self.control_board)
        results_dir = ph.path(self.diagnostics_results_dir)
        results_dir.makedirs_p()

        # Create unique output filenames based on current timestamp.
        timestamp = dt.datetime.now().isoformat().replace(':', '_')
        json_path = results_dir.joinpath('results-%s.json' % timestamp)
        report_path = results_dir.joinpath('results-%s.docx' % timestamp)

        # Write test results encoded as JSON.
        with json_path.open('w') as output:
            # XXX Use `json_tricks` rather than standard `json` to support
            # serializing [Numpy arrays and scalars][1].
            #
            # [1]: http://json-tricks.readthedocs.io/en/latest/#numpy-arrays
            output.write(json_tricks.dumps(results, indent=4))

        # Generate test result summary report as Word document.
        db.self_test.generate_report(results,
                                     output_path=report_path,
                                     force=True)
        # Launch Word document report.
        report_path.launch()
Example #28
0
def _dump(*, obj: Any, fp: Optional[Any], use_trace: bool,
          pickle_size_limit: int, allow_nan: bool,
          **json_tricks_kwargs) -> Union[str, bytes]:
    encoders = [
        # we don't need to check for dependency as many of those have already been required by NNI
        json_tricks.pathlib_encode,  # pathlib is a required dependency for NNI
        json_tricks.pandas_encode,  # pandas is a required dependency
        json_tricks.numpy_encode,  # required
        json_tricks.encoders.enum_instance_encode,
        json_tricks.json_date_time_encode,  # same as json_tricks
        json_tricks.json_complex_encode,
        json_tricks.json_set_encode,
        json_tricks.numeric_types_encode,
        functools.partial(_json_tricks_serializable_object_encode,
                          use_trace=use_trace),
        functools.partial(_json_tricks_func_or_cls_encode,
                          pickle_size_limit=pickle_size_limit),
        functools.partial(_json_tricks_any_object_encode,
                          pickle_size_limit=pickle_size_limit),
    ]

    json_tricks_kwargs['allow_nan'] = allow_nan

    if fp is not None:
        return json_tricks.dump(obj,
                                fp,
                                obj_encoders=encoders,
                                **json_tricks_kwargs)
    else:
        return json_tricks.dumps(obj,
                                 obj_encoders=encoders,
                                 **json_tricks_kwargs)
    def _request_one_trial_job(self):
        """get one trial job, i.e., one hyperparameter configuration."""
        if not self.generated_hyper_configs:
            if self.curr_s < 0:
                self.curr_s = self.s_max
            _logger.debug('create a new bracket, self.curr_s=%d', self.curr_s)
            self.brackets[self.curr_s] = Bracket(self.curr_s, self.s_max,
                                                 self.eta, self.R,
                                                 self.optimize_mode)
            next_n, next_r = self.brackets[self.curr_s].get_n_r()
            _logger.debug('new bracket, next_n=%d, next_r=%d', next_n, next_r)
            assert self.searchspace_json is not None and self.random_state is not None
            generated_hyper_configs = self.brackets[
                self.curr_s].get_hyperparameter_configurations(
                    next_n, next_r, self.searchspace_json, self.random_state)
            self.generated_hyper_configs = generated_hyper_configs.copy()
            self.curr_s -= 1

        assert self.generated_hyper_configs
        params = self.generated_hyper_configs.pop()
        ret = {
            'parameter_id': params[0],
            'parameter_source': 'algorithm',
            'parameters': params[1]
        }
        send(CommandType.NewTrialJob, json_tricks.dumps(ret))
Example #30
0
    def handle_trial_end(self, data):
        '''
        data: it has three keys: trial_job_id, event, hyper_params
            trial_job_id: the id generated by training service
            event: the job's state
            hyper_params: the hyperparameters (a string) generated and returned by tuner
        '''
        hyper_params = json_tricks.loads(data['hyper_params'])
        bracket_id, i, _ = hyper_params['parameter_id'].split('_')
        hyper_configs = self.brackets[int(bracket_id)].inform_trial_end(int(i))
        if hyper_configs is not None:
            _logger.debug('bracket %s next round %s, hyper_configs: %s',
                          bracket_id, i, hyper_configs)
            self.generated_hyper_configs = self.generated_hyper_configs + hyper_configs
            for _ in range(self.credit):
                if not self.generated_hyper_configs:
                    break
                params = self.generated_hyper_configs.pop()
                ret = {
                    'parameter_id': params[0],
                    'parameter_source': 'algorithm',
                    'parameters': params[1]
                }
                send(CommandType.NewTrialJob, json_tricks.dumps(ret))
                self.credit -= 1

        return True
 def storeFeaturesForFrame(self, features, timeframe):
     """
     Stores feature data
     """
     assert(self.server_address is not None)
     assert(self.uuid is not None)
     node_service = DVIDNodeService(self.server_address, self.uuid)
     node_service.create_keyvalue(self.keyvalue_store)
     node_service.put(self.keyvalue_store, "frame-{}".format(timeframe), json.dumps(features))
Example #32
0
def main(fp):
    '''
    takes a filepath to print out the parameters in it
    Args:
        fp: FilePath of the pickle or json file with params

    Returns:
        None, prints parameters in the shell

    '''
    try:
        from helper import depickler
        import json_tricks as json

        #get the extension
        ext=fp.split('.')[-1]

        #depickle it if in pickle
        if ext=='pickle':
            a = depickler(fp)
            try:
                parameters = a['parameters']
            except KeyError:
                print "key error"
                parameters = a['metadata']


        #json load if json
        elif ext=='json':
            with open(fp) as j:
                parameters=json.load(j)['parameters']

        #print it neatly using json dumps in a sorted fashion with nice indents
        print json.dumps(parameters, indent=4, sort_keys=True)

    except IOError as e:
        print 'no such file', e
Example #33
0
    def saveAsJson(self,
                   fileName=None,
                   encoding='utf-8',
                   fileCollisionMethod='rename'):
        """
        Serialize the object to the JSON format.

        Parameters
        ----------
        fileName: string, or None
            the name of the file to create or append. Can include a relative or
            absolute path. If `None`, will not write to a file, but return an
            in-memory JSON object.

        encoding : string, optional
            The encoding to use when writing the file. This parameter will be
            ignored if `append` is `False` and `fileName` ends with `.psydat`
            or `.npy` (i.e. if a binary file is to be written).

        fileCollisionMethod : string
            Collision method passed to
            :func:`~psychopy.tools.fileerrortools.handleFileCollision`. Can be
            either of `'rename'`, `'overwrite'`, or `'fail'`.

        Notes
        -----
        Currently, a copy of the object is created, and the copy's .origin
        attribute is set to an empty string before serializing
        because loading the created JSON file would sometimes fail otherwise.

        """
        self_copy = copy.deepcopy(self)
        self_copy.origin = ''
        msg = ('Setting attribute .origin to empty string during JSON '
               'serialization.')
        logging.warn(msg)

        if (fileName is None) or (fileName == 'stdout'):
            return json_tricks.dumps(self_copy)
        else:
            with openOutputFile(fileName=fileName,
                                fileCollisionMethod=fileCollisionMethod,
                                encoding=encoding) as f:
                json_tricks.dump(self_copy, f)

            logging.info('Saved JSON data to %s' % f.name)
Example #34
0
    def addMetadata(self):
        self.zipRepo()
        models=[parameters['object1'],parameters['object2'],parameters['modelTextureMap'],parameters['skyMap'],
                parameters['modelTextureMapNull'],parameters['skyMapNull'],
                parameters['redTexPath'],parameters['treeTexPath'],parameters['greenTexPath'],
                parameters['modelHeightMap']]
        self.appendModels(models)

        print "allo " + self.bagFilename
        bagName = self.bagFilename + ".bag"
        if self.bagType == 'full':
            time.sleep(5)  # so that bag file can be transfereed from memory

        # self.metadata= self.metadataGen()
        try:
            metadata = (json.dumps(self.metadata,ensure_ascii=False))
        except UnicodeDecodeError as e:
            print "FIX this" \
                  "x unicode error in stop", e

            metadata = "empty"
        metadata = String(metadata)
        # print "metadata is:", metadata

        def saver():
            with rosbag.Bag(bagName, 'a') as bag:
                i = 0
                for _, _, t in bag.read_messages():
                    if i == 0:
                        tstamp = t
                    i += 1
                    break
                bag.write('/metadata', metadata, tstamp)
                # datasave

        #large bags take more than 5 sefconds to save, if so wait and then try again
        try:
            saver()
        except rosbag.bag.ROSBagException:
            time.sleep(10)
            saver()
Example #35
0
def act():
    socketio.emit("updateModel", json_tricks.dumps(ooda.act(), primitives=True))
Example #36
0
def stepSim():
    socketio.emit("updateModel", json_tricks.dumps(ooda.stepSim(), primitives=True))