def select(self, model_name, raw_filters=None): """ Select data from a certain type e.g. blocks or spiketrains from the G-Node REST API and convert the results to a list of model objects. :param model_name: The name of the model as string. :type model_name: str :param raw_filters: Filters as defined by the G-Node REST API. :type raw_filters: dict :returns: A list of (filtered) results. :rtype: list """ results = [] raw_filters = {} if raw_filters is None else raw_filters location = Model.get_location(model_name) url = urlparse.urljoin(self.location, location) headers = {} future = self.__session.get(url, headers=headers, params=raw_filters) response = future.result() self.raise_for_status(response) raw_results = convert.json_to_collections(response.content, as_list=True) for obj in raw_results: results.append(convert.collections_to_model(obj)) return results
def set(self, entity, avoid_collisions=False): """ Update or create an entity on the G-Node REST API. If an etag/guid is provided by the entity it will be included in the header with 'If-match' if avoid_collisions is True. :param entity: The entity to persist. :type entity: Model :param avoid_collisions: Try to avoid collisions (lost update problem) :type avoid_collisions: bool :returns: The updated entity. :rtype: Model :raises: RuntimeError If the changes collide with remote changes of the entity. """ if hasattr(entity, "location") and entity.location is not None: method = 'put' url = urlparse.urljoin(self.location, entity.location) else: method = 'post' url = urlparse.urljoin(self.location, Model.get_location(entity.model)) data = convert.model_to_json_response(entity) headers = {'Content-Type': 'application/json'} if avoid_collisions and entity.guid is not None: headers['If-match'] = entity.guid future = getattr(self.__session, method)(url, data=data, headers=headers) response = future.result() if response.status_code == 304: result = entity else: self.raise_for_status(response) result = convert.collections_to_model(convert.json_to_collections(response.content)) return result
def collections_to_model(collection, as_list=False): """ Converts objects of nested collections (list, dict) as produced by the json module into a model object. :param collection: The object or list of objects to convert. :type collection: dict|list :param as_list: If True the result is always a list. :type as_list: bool :returns: The converted object or a list of converted objects. :rtype: Model|list :raises: ValueError """ def clean(ref): if ref.lower().startswith('http'): return urlparse.urlparse(ref).path return ref models = [] # adjust json object if isinstance(collection, list): objects = collection elif 'selected' in collection: objects = collection['selected'] else: objects = [collection] # convert for obj in objects: if 'resource_uri' not in obj: raise ValueError("Object identifier is missing") location = urlparse.urlparse(obj['resource_uri']).path api, version, category, model_name, obj_id = location.strip('/').split('/') model_obj = Model.create(model_name) for field_name in model_obj: field = model_obj.get_field(field_name) if field.is_child: obj_field_name = field.name_mapping or field.type_info + '_set' else: obj_field_name = field.name_mapping or field_name if not obj_field_name in obj: continue if field.type_info in ['data', 'datafile']: value = obj[obj_field_name] field_val = { "units": obj.get(obj_field_name + '__unit', None), "data": float(value) if field.type_info == 'data' else value } elif field_name == 'model': field_val = model_name elif field.is_child and obj[obj_field_name] is not None: field_val = [clean(ref) for ref in obj[obj_field_name]] else: field_val = obj[obj_field_name] if field_val is not None: model_obj[field_name] = field_val models.append(model_obj) if not as_list: if len(models) > 0: models = models[0] else: models = None return models
def to_model(self, obj, in_memory=False): """ Converts a native neo or odml object into model object. :param obj: The object to convert. :type obj: object :param in_memory: store array data to disk (False) or keep inside the converted model (True) :returns: A new model object. :rtype: Model """ # TODO detect unbound (newly created and not persisted) related objects and throw an error # get type name and create a model model_obj = None for model_name in NativeDriver.RW_MAP: cls = NativeDriver.RW_MAP[model_name] if isinstance(obj, cls): model_obj = Model.create(model_name) break if model_obj is None: raise TypeError("The type of the native object (%s) is not a compatible type!" % type(obj)) # iterate over fields and set them on the model for field_name in model_obj: field = model_obj.get_field(field_name) if field.type_info != "datafile": # non-data fields if model_obj.model == 'section' and field_name \ in ['document', 'section']: if model_obj.document is not None or \ model_obj.section is not None: continue parent = getattr(obj, 'parent') if parent is not None and hasattr(parent, 'location'): if isinstance(parent, odml.doc.Document): model_obj['document'] = parent.location elif isinstance(parent, odml.section.BaseSection): model_obj['section'] = parent.location elif hasattr(obj, field_name): field_val = getattr(obj, field_name, field.default) # special treatment for the location field if field_name == "location" and field_val is not None: model_obj.location = field_val model_obj.id = field_val.split("/")[-1] # process all child relationships elif field.is_child: if field_val is not None: locations = [] for val in field_val: if hasattr(val, 'location'): locations.append(val.location) model_obj[field_name] = locations # process all parent relationships elif field.is_parent: if field_val is not None and hasattr(field_val, 'location'): model_obj[field_name] = field_val.location # data fields elif field.type_info == "data": if field_val is not None: data = float(field_val) units = field_val.dimensionality.string model_obj[field_name] = {"data": data, "units": units} # default else: if isinstance(field_val, numpy.ndarray): field_val = list(field_val) model_obj[field_name] = field_val else: # datafile fields is_array_1 = ( field_name == "signal" and model_obj.model in (Model.ANALOGSIGNAL, Model.ANALOGSIGNALARRAY, Model.IRREGULARLYSAMPLEDSIGNAL) ) is_array_2 = ( field_name == "times" and model_obj.model == Model.SPIKETRAIN ) data_array = None units = None if is_array_1 or is_array_2: data_array = obj elif hasattr(obj, field_name): data_array = getattr(obj, field_name, field.default) if data_array is None: continue if hasattr(data_array, "dimensionality"): units = data_array.dimensionality.string if in_memory: location_or_obj = data_array else: location_or_obj = self.store.set_array(data_array, temporary=True) model_obj[field_name] = {"units": units, "data": location_or_obj} return model_obj
def get_model_by_obj(cls, obj): for model_name, model in cls.RW_MAP.items(): if isinstance(obj, model): return Model.create(model_name)