def get_attribute_data(attr_ids, node_ids, **kwargs): """ For a given attribute or set of attributes, return all the resources and resource scenarios in the network """ node_attrs = DBSession.query(ResourceAttr).\ options(joinedload_all('attr')).\ filter(ResourceAttr.node_id.in_(node_ids), ResourceAttr.attr_id.in_(attr_ids)).all() ra_ids = [] for ra in node_attrs: ra_ids.append(ra.resource_attr_id) resource_scenarios = DBSession.query(ResourceScenario).filter( ResourceScenario.resource_attr_id.in_(ra_ids)).options( joinedload('resourceattr')).options( joinedload_all('dataset.metadata')).order_by( ResourceScenario.scenario_id).all() for rs in resource_scenarios: if rs.dataset.hidden == 'Y': try: rs.dataset.check_read_permission(kwargs.get('user_id')) except: rs.dataset.value = None rs.dataset.frequency = None rs.dataset.start_time = None DBSession.expunge(rs) return node_attrs, resource_scenarios
def get_attribute_data(attr_ids, node_ids, **kwargs): """ For a given attribute or set of attributes, return all the resources and resource scenarios in the network """ node_attrs = DBSession.query(ResourceAttr).\ options(joinedload_all('attr')).\ filter(ResourceAttr.node_id.in_(node_ids), ResourceAttr.attr_id.in_(attr_ids)).all() ra_ids = [] for ra in node_attrs: ra_ids.append(ra.resource_attr_id) resource_scenarios = DBSession.query(ResourceScenario).filter(ResourceScenario.resource_attr_id.in_(ra_ids)).options(joinedload('resourceattr')).options(joinedload_all('dataset.metadata')).order_by(ResourceScenario.scenario_id).all() for rs in resource_scenarios: if rs.dataset.hidden == 'Y': try: rs.dataset.check_read_permission(kwargs.get('user_id')) except: rs.dataset.value = None rs.dataset.frequency = None rs.dataset.start_time = None DBSession.expunge(rs) return node_attrs, resource_scenarios
def clone_dataset(dataset_id, **kwargs): """ Get a single dataset, by ID """ user_id = int(kwargs.get('user_id')) if dataset_id is None: return None dataset = DBSession.query(Dataset).filter( Dataset.dataset_id == dataset_id).options( joinedload_all('metadata')).first() if dataset is None: raise HydraError("Dataset %s does not exist." % (dataset_id)) if dataset is not None and dataset.created_by != user_id: owner = DBSession.query(DatasetOwner).filter( DatasetOwner.dataset_id == Dataset.dataset_id, DatasetOwner.user_id == user_id).first() if owner is None: raise PermissionError( "User %s is not an owner of dataset %s and therefore cannot clone it." % (user_id, dataset_id)) DBSession.expunge(dataset) make_transient(dataset) dataset.data_name = dataset.data_name + "(Clone)" dataset.dataset_id = None dataset.cr_date = None #Try to avoid duplicate metadata entries if the entry has been cloned previously for m in dataset.metadata: if m.metadata_name in ("clone_of", "cloned_by"): del (m) cloned_meta = Metadata() cloned_meta.metadata_name = "clone_of" cloned_meta.metadata_val = str(dataset_id) dataset.metadata.append(cloned_meta) cloned_meta = Metadata() cloned_meta.metadata_name = "cloned_by" cloned_meta.metadata_val = str(user_id) dataset.metadata.append(cloned_meta) dataset.set_hash() DBSession.add(dataset) DBSession.flush() cloned_dataset = DBSession.query(Dataset).filter( Dataset.dataset_id == dataset.dataset_id).first() return cloned_dataset
def clone_dataset(dataset_id,**kwargs): """ Get a single dataset, by ID """ user_id = int(kwargs.get('user_id')) if dataset_id is None: return None dataset = DBSession.query(Dataset).filter( Dataset.dataset_id==dataset_id).options(joinedload_all('metadata')).first() if dataset is None: raise HydraError("Dataset %s does not exist."%(dataset_id)) if dataset is not None and dataset.created_by != user_id: owner = DBSession.query(DatasetOwner).filter( DatasetOwner.dataset_id==Dataset.dataset_id, DatasetOwner.user_id==user_id).first() if owner is None: raise PermissionError("User %s is not an owner of dataset %s and therefore cannot clone it."%(user_id, dataset_id)) DBSession.expunge(dataset) make_transient(dataset) dataset.data_name = dataset.data_name + "(Clone)" dataset.dataset_id = None dataset.cr_date = None #Try to avoid duplicate metadata entries if the entry has been cloned previously for m in dataset.metadata: if m.metadata_name in ("clone_of", "cloned_by"): del(m) cloned_meta = Metadata() cloned_meta.metadata_name = "clone_of" cloned_meta.metadata_val = str(dataset_id) dataset.metadata.append(cloned_meta) cloned_meta = Metadata() cloned_meta.metadata_name = "cloned_by" cloned_meta.metadata_val = str(user_id) dataset.metadata.append(cloned_meta) dataset.set_hash() DBSession.add(dataset) DBSession.flush() cloned_dataset = DBSession.query(Dataset).filter( Dataset.dataset_id==dataset.dataset_id).first() return cloned_dataset