Пример #1
0
    def get_schema(self):
        basis_list = self.basis_list_to_precompute
        nbins = self.nbins
        bin_agg_function = self.bin_agg_function
        result = SimpleData.schema(self.adata)
        markers = result.get('markers', [])

        if self.markers is not None:  # add from file
            markers += get_markers(self.markers)

        markers = filter_markers(self.adata, markers)

        for marker in markers:
            if marker.get('id') is None:
                marker['id'] = unique_id()
            marker['readonly'] = True
        result['markers'] = markers
        result['format'] = self.output_format
        if self.stats:
            result['precomputed'] = True  # has stats
        if nbins is not None:
            result['embeddings'] = []
            for basis_name in basis_list:
                result['embeddings'].append({
                    'precomputed': True,
                    'name': basis_name,
                    'nbins': nbins,
                    'agg': bin_agg_function,
                    'dimensions': 2
                })
        return result
Пример #2
0
 def create_job(self, email, dataset_id, job_name, job_type, params):
     job_id = unique_id()
     self.job_id_to_job[job_id] = dict(id=job_id,
                                       dataset_id=dataset_id,
                                       name=job_name,
                                       type=job_type,
                                       params=params,
                                       status=None,
                                       submitted=str(
                                           datetime.datetime.utcnow()))
     return job_id
Пример #3
0
 def __upsert_entity(self, dataset_id, entity_id, kind, entity_dict):
     if entity_id is None:
         entity_id = unique_id()
     json_data = self.dataset_to_info[dataset_id]['json_data']
     entity = json_data[kind].get(entity_id)
     if entity is None:
         entity = {}
         json_data[kind][entity_id] = entity
     entity.update(entity_dict)
     write_json(json_data, self.dataset_to_info[dataset_id]['json_path'])
     return entity_id
Пример #4
0
    def get_schema(self):
        result = dataset_schema(self.dataset, n_features=self.group_nfeatures)
        markers = result.get('markers', [])

        if self.markers is not None:  # add results specified from file
            markers += get_markers(self.markers)
            markers = filter_markers(self.dataset, markers)

        for marker in markers:
            if marker.get('id') is None:
                marker['id'] = unique_id()
            marker['readonly'] = True
        result['markers'] = markers
        result['format'] = self.output_format
        return result
    def get_schema(self):
        result = dataset_schema(self.dataset, n_features=self.group_nfeatures)
        markers = result.get("markers", [])

        if self.markers is not None:  # add results specified from file
            markers += get_markers(self.markers)
            markers = filter_markers(self.dataset, markers)

        for marker in markers:
            if marker.get("id") is None:
                marker["id"] = unique_id()
            marker["readonly"] = True
        result["markers"] = markers
        result["format"] = self.output_format
        return result
Пример #6
0
 def upsert_feature_set(self, email, dataset_id, set_id, category, name,
                        features):
     if set_id is None:
         set_id = unique_id()
     else:
         self.delete_feature_set(email=email,
                                 dataset_id=dataset_id,
                                 set_id=set_id)
     json_data = self.dataset_to_info[dataset_id]['json_data']
     markers = json_data.get('markers')
     if markers is None:
         markers = []
         json_data['markers'] = markers
     markers.append(
         dict(id=set_id, features=features, name=name, category=category))
     write_json(json_data, self.dataset_to_info[dataset_id]['json_path'])
     return set_id
Пример #7
0
 def upsert_dataset_filter(self, email, dataset_id, filter_id, filter_name,
                           filter_notes, dataset_filter):
     if filter_id is None:
         filter_id = unique_id()
     json_data = self.dataset_to_info[dataset_id]['json_data']
     entity = json_data['filters'].get(filter_id)
     if entity is None:
         entity = {}
         json_data['filters'][filter_id] = entity
     if filter_name is not None:
         entity['name'] = filter_name
     if dataset_filter is not None:
         entity['value'] = json.dumps(dataset_filter)
     if email is not None:
         entity['email'] = email
     if dataset_id is not None:
         entity['dataset_id'] = dataset_id
     if filter_notes is not None:
         entity['notes'] = filter_notes
     write_json(json_data, self.dataset_to_info[dataset_id]['json_path'])
     return filter_id