Exemplo n.º 1
0
    def _write_coco_keypoint_results(self, keypoints, res_file):
        data_pack = [
            {
                "cat_id": self._class_to_coco_ind[cls],
                "cls_ind": cls_ind,
                "cls": cls,
                "ann_type": "keypoints",
                "keypoints": keypoints,
            }
            for cls_ind, cls in enumerate(self.classes)
            if not cls == "__background__"
        ]

        results = self._coco_keypoint_results_one_category_kernel(data_pack[0])
        logger.info("=> writing results json to %s" % res_file)
        with open(res_file, "w") as f:
            json.dump(results, f, sort_keys=True, indent=4)
        try:
            json.load(open(res_file))
        except Exception:
            content = []
            with open(res_file, "r") as f:
                for line in f:
                    content.append(line)
            content[-1] = "]"
            with open(res_file, "w") as f:
                for c in content:
                    f.write(c)
Exemplo n.º 2
0
 def _write_coco_keypoint_results(self, keypoints, res_file):
     data_pack = [
         {
             'cat_id': self._class_to_coco_ind[cls],
             'cls_ind': cls_ind,
             'cls': cls,
             'ann_type': 'keypoints',
             'keypoints': keypoints
         }
         for cls_ind, cls in enumerate(self.classes) if not cls == '__background__'
     ]
     results = self._coco_keypoint_results_one_category_kernel(data_pack[0])
     logger.info('=> Writing results json to %s' % res_file)
     with open(res_file, 'w') as f:
         json.dump(results, f, sort_keys=True, indent=4)
     try:
         json.load(open(res_file))
     except Exception:
         content = []
         with open(res_file, 'r') as f:
             for line in f:
                 content.append(line)
         content[-1] = ']'
         with open(res_file, 'w') as f:
             for c in content:
                 f.write(c)
Exemplo n.º 3
0
    def generate_tasks(self, task_result: TaskResult) -> List[Task]:
        # initial/update temp config list
        if not self._temp_config_list:
            self._init_temp_config_list()
        else:
            score = self._tasks[task_result.task_id].score
            assert score is not None, 'SimulatedAnnealingTaskGenerator need each score is not None.'
            if not self._current_sparsity_list:
                self._current_sparsity_list = deepcopy(self._temp_sparsity_list)
                self._current_score = score
            else:
                delta_E = np.abs(score - self._current_score)
                probability = np.exp(-1 * delta_E / self.current_temperature)
                if self._current_score < score or np.random.uniform(0, 1) < probability:
                    self._current_score = score
                    self._current_sparsity_list = deepcopy(self._temp_sparsity_list)
                    self.current_temperature *= self.cool_down_rate
            if self.current_temperature < self.stop_temperature:
                return []
            self._update_with_perturbations()

        task_id = self._task_id_candidate
        new_config_list = self._recover_real_sparsity(deepcopy(self._temp_config_list))
        config_list_path = Path(self._intermediate_result_dir, '{}_config_list.json'.format(task_id))

        with Path(config_list_path).open('w') as f:
            json_tricks.dump(new_config_list, f, indent=4)

        task = Task(task_id, self.temp_model_path, self.temp_masks_path, config_list_path)

        self._tasks[task_id] = task

        self._task_id_candidate += 1

        return [task]
Exemplo n.º 4
0
def cli(train_size, test_size, output_file):
    result = preprocess_imdb(train_size=train_size, test_size=test_size)

    if output_file:
        json.dump(result, output_file, **JSON_ARGS)
    else:
        print(json.dumps(result, **JSON_ARGS))
Exemplo n.º 5
0
Arquivo: base.py Projeto: dercaft/XNAS
 def _dump_tasks_info(self):
     tasks = {
         task_id: task.to_dict()
         for task_id, task in self._tasks.items()
     }
     with Path(self._log_dir_root, '.tasks').open('w') as f:
         json_tricks.dump(tasks, f, indent=4)
Exemplo n.º 6
0
    def _write_keypoint_results(self, keypoints, output_file):
        # TODO turn list into numpy arrays
        if output_file.endswith('.json'):
            # Convert numpy arrays to Python lists
            for img_name, poses in keypoints.items():
                for pose in poses:
                    pose['center'] = pose['center'].tolist()
                    pose['scale'] = pose['scale'].tolist()
                    pose['keypoints'] = pose['keypoints'].ravel().tolist()
                    pose['origbox'] = pose['origbox'].tolist()

            with open(output_file, 'w') as f:
                json.dump(keypoints, f, sort_keys=True, indent=4)

        elif output_file.endswith('npy'):
            frame_ind = keypoints.keys()
            assert all([f.startswith('videocap#') for f in frame_ind])
            frame_ind = sorted(frame_ind,
                               key=lambda x: int(x[len('videocap#'):]))

            kps = []
            for f in frame_ind:
                assert len(
                    keypoints[f]
                ) == 1, 'Only images with a single pose are supported in numpy save mode, found: ' + str(
                    keypoints[f])
                kps.append(keypoints[f][0]['keypoints'])

            kps = np.stack(kps, axis=0)
            print("shape:" + str(kps.shape))
            np.save(output_file, kps)
        else:
            raise NotImplementedError('Unknown file ending: ' + output_file)
Exemplo n.º 7
0
def compute_labels_and_activations(model,
                                   dataset,
                                   files,
                                   batch_size,
                                   save_path=None):  #added files
    """ Load labels and compute activations with provided model on provided data loader
        Optionally store them.

        :param model: tf.keras model
        :param dataset: TrainingDataset (non-shuffleable to ensure that labels and activations match)
        :param save_path: if provided, store the labels and activations in json format.
                          Use load_labels_and_activations to reload them without needing to recompute
                          them.

        :return: labels, activations (numpy arrays)
    """
    assert not dataset._do_shuffle, "Use non-shuffleable dataset for ensuring labels and predictions match"
    labels = load_labels(files)
    tf_dataset = dataset.dataset()
    tf_dataset = tf_dataset.batch(batch_size)  #### ,drop_remainder=True  ???
    tf_dataset = tf_dataset.repeat()
    tf_dataset = tf_dataset.prefetch(tf.contrib.data.AUTOTUNE)

    activations = model.predict(tf_dataset,
                                steps=dataset.num_samples() // batch_size + 1,
                                verbose=1).squeeze()

    if save_path:
        results = {'labels': labels, 'activations': activations}
        with open(save_path, 'w') as json_file:
            jt.dump(results, json_file)

    return labels, activations
Exemplo n.º 8
0
def log_results(results, output_dir):
    '''
    .. versionchanged:: 1.28
        Use json_tricks.dumps_ to dump results.

        .. _json_tricks.dumps: http://json-tricks.readthedocs.io/en/latest/#dumps

    Parameters
    ----------
    results : dict
        Test results.
    output_dir : str
        Path to output directory.
    '''
    output_dir = ph.path(output_dir)

    # Make output directory if it doesn't exist.
    output_dir.makedirs_p()

    # Construct filename based on current UTC date and time.
    filepath = output_dir.joinpath(
        'results-%s.json' % dt.datetime.utcnow().isoformat().replace(':', '.'))

    # write the results to a file
    with filepath.open('w') as output:
        # XXX Use `json_tricks` rather than standard `json` to support
        # serializing [Numpy arrays and scalars][1].
        #
        # [1]: http://json-tricks.readthedocs.io/en/latest/#numpy-arrays
        json_tricks.dump(results, output)
Exemplo n.º 9
0
    def _save_as_v1(self, fp):
        """Save copy to specified path.

        This mainly exists for testing and making sure we still read the old
        saved files.
        """
        def _rewrite_dir(state):
            "Rewrite directories in the old format."
            state['files'] = [x[0] for x in state['files']]
            state['directories'] = [_rewrite_dir(d)
                                    for d in state['directories']]
            state.pop('relpath')
            state.pop('name')
            return state

        fp = open_file(fp, 'wb')
        media = [(key, self.get(key).to_dict()) for key in self._relpath2index]
        tags = [(t.name, t.type) for t in self.tags]
        root = _rewrite_dir(self.root.__getstate__())
        processors = [processor.dump(x) for x in self.processors]
        for k, m in media:
            m['_ctime'] = long_to_datetime(m['_ctime'])
            m['_mtime'] = long_to_datetime(m['_mtime'])
        data = dict(
            version=1, path=self.path, name=self.name,
            description=self.description, tags=tags, media=media,
            root=root, processors=processors
        )
        json_tricks.dump(data, fp, compression=True)
        fp.close()
        logger.info('Saved project: %s', self.name)
Exemplo n.º 10
0
def cli(data_file, output_file):
    data = json.loads(data_file.read())
    model = baseline_model(data["train_texts"], data["train_labels"])
    args = (model, data["test_texts"], data["test_labels"])
    if output_file:
        json.dump(evaluate(*args, output_dict=True), output_file)
    else:
        print(evaluate(*args))
Exemplo n.º 11
0
 def write_file(self):
     '''save config to local file'''
     try:
         with open(self.experiment_file, 'w') as file:
             json_tricks.dump(self.experiments, file, indent=4)
     except IOError as error:
         print('Error:', error)
         return ''
Exemplo n.º 12
0
 def save_model(self, file_name):
     file_extension = os.path.splitext(file_name)[1]
     filename = file_name if len(file_extension) > 0 else file_name+'.roa'
     fs_dict = {}
     fs_dict['optical_model'] = self
     with open(filename, 'w') as f:
         json_tricks.dump(fs_dict, f, indent=1,
                          separators=(',', ':'), allow_nan=True)
Exemplo n.º 13
0
 def dump_measurement(self, measurement, data, title='dump'):
     fname = '{0}_{1}.json'.format(title, self._counter)
     path = join(self._output_directory, fname)
     results = dict()
     results[measurement] = self.analyze_measurement(measurement, data)
     with open(path, 'w') as f:
         dump(results, f)
     self._counter += 1
     return path
Exemplo n.º 14
0
def save_annotations(annotations, opt):
    os.makedirs(opt.annot_folder, exist_ok=True)

    for a in annotations:
        fname = get_output_filename(a, opt.annot_folder, 'json')
        with open(fname, 'w') as f:
            dump(a, f, indent=2)

    print(f'saved annotations to "{opt.annot_folder}" directory')
Exemplo n.º 15
0
    def save_agents(self, name) -> None:
        data = {
            'agents':
            [agent.__class__.__name__ for ID, agent in self.agents.items()]
        }

        filename = f'saves/{name}.agents.json'
        with open(filename, mode='w') as file:
            jt.dump(data, file, indent=4)
Exemplo n.º 16
0
def cli(data_file, model_file, output_file):
    data = json.load(data_file)
    model = BertBinaryClassifier().cuda()
    model.load_state_dict(torch.load(model_file))
    predictions, _ = predict(model, data["test_tokens_ids"], BATCH_SIZE)

    json.dump(
        classification_report(data["test_y"], predictions, output_dict=True),
        output_file, **JSON_ARGS)
Exemplo n.º 17
0
 def _saveJSON(self):
     thisFileName = os.path.join(monitorFolder, self.name + ".json")
     # convert time structs to timestamps (floats)
     for calibName in self.calibs:
         calib = self.calibs[calibName]
         if isinstance(calib['calibDate'], time.struct_time):
             calib['calibDate'] = time.mktime(calib['calibDate'])
     with open(thisFileName, 'w') as outfile:
         json_tricks.dump(self.calibs, outfile, indent=2, allow_nan=True)
Exemplo n.º 18
0
def save_json(obj, outfile, allow_nan=True, compression=False):
    """Save an ssbio object as a JSON file using json_tricks"""
    if compression:
        with open(outfile, 'wb') as f:
            dump(obj, f, allow_nan=allow_nan, compression=compression)
    else:
        with open(outfile, 'w') as f:
            dump(obj, f, allow_nan=allow_nan, compression=compression)
    log.info('Saved {} (id: {}) to {}'.format(type(obj), obj.id, outfile))
Exemplo n.º 19
0
def track_with_manual_label_all(des_dir,
                                now_split=0,
                                label_name='object',
                                global_tracker_=''):
    global global_tracker
    global_tracker = global_tracker_

    info_list = generate_tracking_info(des_dir)

    info_list = info_list[now_split:]

    for info in info_list:
        print("NOW split:{}".format(now_split))

        frame_split_list, start_bbox, end_bbox = info

        r1, forward_bbox_l = track_with_manual_label(frame_split_list,
                                                     start_bbox, now_split)
        r2, backward_bbox_l = track_with_manual_label(frame_split_list[::-1],
                                                      end_bbox, now_split)

        now_split += 1

        if r1 == False or r2 == False:  # this will make it this sequence unchanged
            continue

        forward_bbox_l = forward_bbox_l[:
                                        -1]  # drop the last one, since it is manual labeled
        backward_bbox_l = backward_bbox_l[:-1]

        backward_bbox_l_to_forward = backward_bbox_l[::-1]

        for frame_f, forward_b, backward_b in zip(frame_split_list[1:-1],
                                                  forward_bbox_l,
                                                  backward_bbox_l_to_forward):
            print("NOW file:{}".format(frame_f))
            f_basename, f_no_suffix = split_the_abs_filename(frame_f)
            des_f = des_dir + '/' + f_no_suffix + '.json'
            with open(des_f, 'r') as f:
                info = json_tricks.load(f)

            assert 'manual_label' not in info or not info['manual_label']

            info['manual_label'] = False
            info['track_label'] = True

            info['label_list'] = [label_name]
            info['forward_bbox'] = forward_b
            info['backward_bbox'] = backward_b
            info['BBox_list'] = [(forward_b + backward_b) / 2]

            print(info['BBox_list'][0].shape)

            assert info['BBox_list'][0].shape == (4, 2)

            with open(des_f, 'w') as f:
                json_tricks.dump(info, f, sort_keys=True, indent=4)
Exemplo n.º 20
0
 def saveJSON(self):
     thisFileName = os.path.join(monitorFolder, self.name + ".json")
     # convert time structs to timestamps (floats)
     for calibName in self.calibs:
         calib = self.calibs[calibName]
         if isinstance(calib['calibDate'], time.struct_time):
             calib['calibDate'] = time.mktime(calib['calibDate'])
     with open(thisFileName, 'w') as outfile:
         json_tricks.dump(self.calibs, outfile, indent=2,
                          allow_nan=True)
Exemplo n.º 21
0
def write_json(json_obj, file_name):
    """
    Writes json object to a json file
    :param json_obj: json object
    :param file_name: str
    :return: None
    """
    with open(file_name, 'w') as f:
        json_tricks.dump(json_obj, f, indent=3)
    return None
Exemplo n.º 22
0
 def save(self):
     store_dict = {
         k: v
         for k, v in self.__dict__.items() if k in self.store_variables
     }
     filename = get_next_filename(self.storage_path,
                                  self.base_filename,
                                  extension="json")
     json_tricks.dump(store_dict, filename)
     print(f"Stored results in {filename} at {datetime.datetime.now()}")
Exemplo n.º 23
0
def save_to_json(obj: (list, dict), path=None, fp=None, file_name=None):
    jsonable_dict = convert_to_jsonable(dict_or_list=obj)
    if fp:
        json.dump(obj=jsonable_dict, fp=fp, indent=4, sort_keys=True)
        fp.close()
    else:
        if file_name is not None:
            path = os.path.join(path, file_name)
        with open(path, 'w') as f:
            json.dump(obj=obj, fp=f, indent=4, sort_keys=True)
Exemplo n.º 24
0
Arquivo: base.py Projeto: dercaft/XNAS
 def _save_data(self, folder_name: str, model: Module,
                masks: Dict[str, Dict[str,
                                      Tensor]], config_list: List[Dict]):
     Path(self._log_dir_root, folder_name).mkdir(parents=True,
                                                 exist_ok=True)
     torch.save(model, Path(self._log_dir_root, folder_name, 'model.pth'))
     torch.save(masks, Path(self._log_dir_root, folder_name, 'masks.pth'))
     with Path(self._log_dir_root, folder_name,
               'config_list.json').open('w') as f:
         json_tricks.dump(config_list, f, indent=4)
Exemplo n.º 25
0
    def write_json(self, filename):
        data = {}
        data["map_obstacle"] = self.get_map_numpy()
        data["list_cities"] = self.get()
        print(type(data["map_obstacle"]))
        print(type(data["list_cities"]))

        # print(json_tricks.dumps(data, indent=4))

        with open(filename, "w") as json_file:
            json_tricks.dump(data, fp=json_file, indent=4)
Exemplo n.º 26
0
    def _do_python_keypoint_eval(self, res_file, keypoint_accs, cfg):
        coco_dt = self.coco.loadRes(res_file)
        coco_eval = CustomEval(self.coco, coco_dt, 'keypoints')
        coco_eval.params.useSegm = None
        part, tfms_name, crop_size  = self._get_occluded(cfg)
        # coco_eval.params.occlusion = part
        stats_names = ['AP', 'Ap .5', 'AP .75', 'AP (M)', 'AP (L)', 'AR', 'AR .5', 'AR .75', 'AR (M)', 'AR (L)']

        ## Per keypoint evaluation
        metrics = {}
        if self.per_keypoint_eval:
            for i, keypoint in enumerate(keypoint_names["coco"]):
                coco_eval.params.keypoint = keypoint
                coco_eval.params.print = False

                coco_eval.evaluate()
                coco_eval.accumulate()
                coco_eval.summarize()
                info_str = []
                for ind, name in enumerate(stats_names):
                    info_str.append((name, coco_eval.stats[ind]))
                info_str.append(("[email protected]", keypoint_accs[i + 1].avg))
                metrics[keypoint] = dict(info_str)

        # All keypoint evaluation
        coco_eval.params.keypoint = "all"
        coco_eval.params.print = True

        coco_eval.evaluate()
        coco_eval.accumulate()
        coco_eval.summarize()

        info_str = []
        for ind, name in enumerate(stats_names):
            info_str.append((name, coco_eval.stats[ind]))

        info_str.append(("[email protected]", keypoint_accs[0].avg))

        if self.per_keypoint_eval:
            metrics["all"] = dict(info_str)
            file_path = self._create_results_save_folder()
            if crop_size != 0:
                file_path = os.path.join(file_path, "crop_size_"+str(crop_size))
                if not os.path.isdir(file_path):
                    os.mkdir(file_path)
                

            filename = part + '_' + tfms_name + '.json'
            file_path = os.path.join(file_path, filename)
            print(file_path)
            with open(file_path, 'w', encoding='utf-8') as f:
                json.dump(metrics, f, ensure_ascii=False, indent=4)

        return info_str
Exemplo n.º 27
0
def cli(texts_file, model_file, output_file, batch_size):
    texts: List[str] = json.load(texts_file)
    _, tokens_ids = tokenize(texts)

    model = BertBinaryClassifier().cuda()
    model.load_state_dict(torch.load(model_file))
    predictions, logits = predict(model, tokens_ids, batch_size)

    json.dump({
        "predictions": predictions,
        "logits": logits
    }, output_file, **JSON_ARGS)
Exemplo n.º 28
0
def save_calibration(channels, filename):
    from psi.util import get_tagged_values
    from json_tricks import dump
    settings = {}
    for channel in channels:
        metadata = get_tagged_values(channel.calibration, 'metadata')
        metadata['calibration_type'] = channel.calibration.__class__.__name__
        if 'source' in metadata:
            metadata['source'] = str(metadata['source'])
        settings[channel.name] = metadata
    with open(filename, 'w') as fh:
        dump(settings, fh, indent=4)
Exemplo n.º 29
0
def main(args):
    paths = list_dir(args.data_dir)

    for path in paths:
        print(path)
        x = loadmat(path)  # load mat file
        # x = h5py.File(path, 'r')
        # tables.openFile(path)
        # x = mat4py.loadmat(path)
        print(x["annolist"]["annorect"])
        json_fn = os.path.splitext(path)[0] + ".json"
        with open(json_fn, 'wt') as json_f:
            json.dump(x, json_f)
Exemplo n.º 30
0
def change_des_base_folder(des_folder, old_base_folder, new_base_folder, win2linux=True):
    des_all_files = preprocess.generate_all_abs_filenames(des_folder)
    for des_f in des_all_files:
        print("{} ...".format(des_f))
        with open(des_f, 'r') as f:
            info = json_tricks.load(f)

        abs_file_name = info['abs_file_name'] # type: str
        abs_file_name = change_file_prefix(abs_file_name, old_base_folder, new_base_folder, win2linux=True)
        info['abs_file_name'] = abs_file_name

        with open(des_f, 'w') as f:
            json_tricks.dump(info, f, sort_keys=True, indent=4)
Exemplo n.º 31
0
    def startBag(self):
        self.bagger()
        self.metadata = self.metadataGen()
        # pickler(self.metadata, self.bagFilename)

        try:
            pickler(self.metadata, self.bagFilename + '.pickle')

            with open(self.bagFilename + ".json", 'w') as outfile:
                json.dump(self.metadata, outfile, indent=4, sort_keys=True, separators=(',', ':'),ensure_ascii=False)

        except UnicodeDecodeError as e:
            print "error with unicode FIX IT", e
            pass
Exemplo n.º 32
0
    def _write_coco_keypoint_results(self, keypoints, res_file):
        data_pack = [{
            'cat_id': self._class_to_coco_ind[cls],
            'cls_ind': cls_ind,
            'cls': cls,
            'ann_type': 'keypoints',
            'keypoints': keypoints
        } for cls_ind, cls in enumerate(self.classes)
                     if not cls == '__background__']

        results = self._coco_keypoint_results_one_category_kernel(data_pack[0])
        logger.info('=> Writing results json to %s' % res_file)
        with open(res_file, 'w') as f:
            json.dump(results, f, sort_keys=True, indent=4)
Exemplo n.º 33
0
def write_json(data, path, file_name):
    """
    Write out data to a json file.

    Args:
        data: A dictionary representation of the data to write out
        path: The directory to output the file in
        file_name: The name of the file to write out
    """
    if os.path.exists(path) and not os.path.isdir(path):
        return
    elif not os.path.exists(path):
        mkdir_p(path)
    with open(os.path.join(path, file_name), 'w') as f:
        json_tricks.dump(data, f, indent=4, primitives=True, allow_nan=True)
Exemplo n.º 34
0
    def saveAsJson(self,
                   fileName=None,
                   encoding='utf-8',
                   fileCollisionMethod='rename'):
        """
        Serialize the object to the JSON format.

        Parameters
        ----------
        fileName: string, or None
            the name of the file to create or append. Can include a relative or
            absolute path. If `None`, will not write to a file, but return an
            in-memory JSON object.

        encoding : string, optional
            The encoding to use when writing the file. This parameter will be
            ignored if `append` is `False` and `fileName` ends with `.psydat`
            or `.npy` (i.e. if a binary file is to be written).

        fileCollisionMethod : string
            Collision method passed to
            :func:`~psychopy.tools.fileerrortools.handleFileCollision`. Can be
            either of `'rename'`, `'overwrite'`, or `'fail'`.

        Notes
        -----
        Currently, a copy of the object is created, and the copy's .origin
        attribute is set to an empty string before serializing
        because loading the created JSON file would sometimes fail otherwise.

        """
        self_copy = copy.deepcopy(self)
        self_copy.origin = ''
        msg = ('Setting attribute .origin to empty string during JSON '
               'serialization.')
        logging.warn(msg)

        if (fileName is None) or (fileName == 'stdout'):
            return json_tricks.dumps(self_copy)
        else:
            with openOutputFile(fileName=fileName,
                                fileCollisionMethod=fileCollisionMethod,
                                encoding=encoding) as f:
                json_tricks.dump(self_copy, f)

            logging.info('Saved JSON data to %s' % f.name)
Exemplo n.º 35
0
ax[2].grid()
ax[2].legend(frameon=False)
ax[2].set_xlabel('Wavelength [nm]')
ax[2].set_xlim(250,5000)
ax[2].set_xticks([320,400,500,600,750,1000,1240,1600,2500,4000])
ax[2].set_xticklabels([320,400,500,600,750,1000,1240,1600,2500,4000])


plt.savefig('/mnt/c/Users/sleblanc/Research/ORACLES/aero_v4_yr.png',dpi=600,transparent=True)


# In[78]:


n['asy'][0,:] = new_asy
n['ssa'][0,:] = new_ssa
n['ext'][0,:] = new_ext


# In[79]:


help(dump)


# In[80]:


dump(n,'/mnt/c/Users/sleblanc/Research/ORACLES/aero_file_v4.txt')