Exemple #1
0
def _make_new_dataset(dataset_dict):
    #If the user is not allowed to use the existing dataset, a new
    #one must be created. This means a unique hash must be created
    #To create a unique hash, add a unique piece of metadata.
    new_dataset = copy.deepcopy(dataset_dict)
    new_dataset['metadata']['created_at'] = datetime.datetime.now()
    new_hash = generate_data_hash(new_dataset)
    new_dataset['data_hash'] = new_hash

    return new_dataset
Exemple #2
0
def _make_new_dataset(dataset_dict):
    #If the user is not allowed to use the existing dataset, a new
    #one must be created. This means a unique hash must be created
    #To create a unique hash, add a unique piece of metadata.
    new_dataset = copy.deepcopy(dataset_dict)
    new_dataset['metadata']['created_at'] = datetime.datetime.now()
    new_hash = generate_data_hash(new_dataset)
    new_dataset['data_hash'] = new_hash

    return new_dataset
Exemple #3
0
def _process_incoming_data(data, user_id=None, source=None):

    datasets = {}

    for d in data:
        val = d.parse_value()

        if val is None:
            log.info(
                "Cannot parse data (dataset_id=%s). "
                "Value not available.", d)
            continue

        data_dict = {
            'data_type': d.type,
            'data_name': d.name,
            'data_units': d.unit,
            'created_by': user_id,
            'frequency': None,
            'start_time': None,
        }

        # Assign dimension if necessary
        if d.unit is not None and d.dimension in (None, 'dimensionless'):
            data_dict['data_dimen'] = hydra_units.get_unit_dimension(d.unit)
        else:
            data_dict['data_dimen'] = d.dimension

        db_val = _get_db_val(d.type, val)
        data_dict['value'] = db_val

        if d.metadata is not None:
            if isinstance(d.metadata, str) or isinstance(d.metadata, unicode):
                metadata_dict = json.loads(d.metadata)
            else:
                metadata_dict = d.metadata
        else:
            metadata_dict = {}

        metadata_keys = [k.lower() for k in metadata_dict]
        if user_id is not None and 'user_id' not in metadata_keys:
            metadata_dict[u'user_id'] = unicode(user_id)
        if source is not None and 'source' not in metadata_keys:
            metadata_dict[u'source'] = unicode(source)

        data_dict['metadata'] = metadata_dict

        d.data_hash = generate_data_hash(data_dict)

        data_dict['data_hash'] = d.data_hash
        datasets[d.data_hash] = data_dict

    return datasets
Exemple #4
0
def _process_incoming_data(data, user_id=None, source=None):

    datasets = {}

    for d in data:
        val = d.parse_value()

        if val is None:
            log.info("Cannot parse data (dataset_id=%s). "
                         "Value not available.",d)
            continue

        data_dict = {
            'data_type':d.type,
             'data_name':d.name,
            'data_units': d.unit,
            'created_by' : user_id,
            'frequency' : None,
            'start_time': None,
        }

        # Assign dimension if necessary
        if d.unit is not None and d.dimension in (None, 'dimensionless'):
            data_dict['data_dimen'] = hydra_units.get_unit_dimension(d.unit)
        else:
            data_dict['data_dimen'] = d.dimension

        db_val = _get_db_val(d.type, val)
        data_dict['value'] = db_val

        if d.metadata is not None:
            if isinstance(d.metadata, str) or isinstance(d.metadata, unicode):
                metadata_dict = json.loads(d.metadata)
            else:
                metadata_dict=d.metadata
        else:
            metadata_dict={}

        metadata_keys = [k.lower() for k in metadata_dict]
        if user_id is not None and 'user_id' not in metadata_keys:
            metadata_dict[u'user_id'] = unicode(user_id)
        if source is not None and 'source' not in metadata_keys:
            metadata_dict[u'source'] = unicode(source)

        data_dict['metadata'] = metadata_dict

        d.data_hash = generate_data_hash(data_dict)

        data_dict['data_hash'] = d.data_hash
        datasets[d.data_hash] = data_dict

    return datasets
Exemple #5
0
    def get_hash(self, val, metadata):

        if metadata is None:
            metadata = self.get_metadata_as_dict()

        if val is None:
            value = self.parse_value()
        else:
            value = val

        dataset_dict = {'data_name' : self.name,
                    'data_units': self.unit,
                    'data_dimen': self.dimension,
                    'data_type' : self.type.lower(),
                    'value'     : value,
                    'metadata'  : metadata,}

        data_hash = generate_data_hash(dataset_dict)

        return data_hash