def from_hash(self, obj): """Convert the hash to the object.""" super(TransactionRelease, self).from_hash(obj) self._set_only_if( 'transaction', obj, 'transaction', lambda: Transactions.get(Transactions.id == obj['transaction'])) self._set_only_if( 'authorized_person', obj, 'authorized_person', lambda: Users.get(Users.id == obj['authorized_person']))
def from_hash(self, obj): """Convert the hash into the object.""" super(AToolTransaction, self).from_hash(obj) self._set_only_if( 'analytical_tool_id', obj, 'analytical_tool', lambda: AnalyticalTools.get(AnalyticalTools.id == obj[ 'analytical_tool_id'])) self._set_only_if( 'transaction_id', obj, 'transaction', lambda: Transactions.get(Transactions.id == obj['transaction_id']))
def from_hash(self, obj): """Convert the hash into the object.""" super(TransactionKeyValue, self).from_hash(obj) self._set_only_if( 'transaction_id', obj, 'transaction', lambda: Transactions.get(Transactions.id == obj['transaction_id']) ) self._set_only_if( 'value_id', obj, 'value', lambda: Values.get( Values.id == obj['value_id']) ) self._set_only_if( 'key_id', obj, 'key', lambda: Keys.get(Keys.id == obj['key_id']) )
def base_create_dep_objs(cls): """Create all objects that TransactionKeyValue need.""" trans = Transactions() tool = AnalyticalTools() TestTransactions.base_create_dep_objs() trans.from_hash(SAMPLE_TRANSACTION_HASH) trans.save(force_insert=True) TestAnalyticalTools.base_create_dep_objs() tool.from_hash(SAMPLE_TOOL_HASH) tool.save(force_insert=True)
def base_create_dep_objs(cls): """Build the object and make dependent user object.""" auth_person = Users() TestUsers.base_create_dep_objs() auth_person.from_hash(SAMPLE_CREATOR_HASH) auth_person.save(force_insert=True) rel_trans = Transactions() TestTransactions.base_create_dep_objs() rel_trans.from_hash(SAMPLE_TRANSACTION_HASH) rel_trans.save(force_insert=True)
def base_create_dep_objs(cls): """Create all objects that TransactionKeyValue need.""" trans = Transactions() keys = Keys() values = Values() TestTransactions.base_create_dep_objs() trans.from_hash(SAMPLE_TRANSACTION_HASH) trans.save(force_insert=True) TestKeys.base_create_dep_objs() keys.from_hash(SAMPLE_KEY_HASH) keys.save(force_insert=True) TestValues.base_create_dep_objs() values.from_hash(SAMPLE_VALUE_HASH) values.save(force_insert=True)
def base_create_dep_objs(cls): """Build the object and make dependent user object.""" rel = Relationships() TestRelationships.base_create_dep_objs() rel.from_hash(SAMPLE_RELATIONSHIP_HASH) rel.save(force_insert=True) user3 = Users() TestUsers.base_create_dep_objs() user3.from_hash(SAMPLE_CREATOR_HASH) user3.save(force_insert=True) rel_trans = Transactions() TestTransactions.base_create_dep_objs() rel_trans.from_hash(SAMPLE_TRANSACTION_HASH) rel_trans.save(force_insert=True)
def base_create_dep_objs(cls): """Build the object and make dependent user object.""" proj = Projects() TestProjects.base_create_dep_objs() proj.from_hash(SAMPLE_PROJECT_HASH) proj.save(force_insert=True) submitter = Users() TestUsers.base_create_dep_objs() submitter.from_hash(SAMPLE_SUBMITTER_HASH) submitter.save(force_insert=True) atool = AnalyticalTools() TestAnalyticalTools.base_create_dep_objs() atool.from_hash(SAMPLE_TOOL_HASH) atool.save(force_insert=True) trans = Transactions() TestTransactions.base_create_dep_objs() trans.from_hash(SAMPLE_TRANSACTION_HASH) trans.save(force_insert=True)
def base_create_dep_objs(cls): """Build the object and make dependent user object.""" submitter, _created = Users().get_or_create( id=SAMPLE_SUBMITTER_HASH['_id']) TestUsers.base_create_dep_objs() submitter.from_hash(SAMPLE_SUBMITTER_HASH) submitter.save() prop = Proposals() TestProposals.base_create_dep_objs() prop.from_hash(SAMPLE_PROPOSAL_HASH) prop.save(force_insert=True) inst = Instruments() TestInstruments.base_create_dep_objs() inst.from_hash(SAMPLE_INSTRUMENT_HASH) inst.save(force_insert=True) trans = Transactions() TestTransactions.base_create_dep_objs() trans.from_hash(SAMPLE_TRANSACTION_HASH) trans.save(force_insert=True)
def PUT(): """Sample doc string to put data to the server.""" def pull_value_by_attr(json, table, attr): """Pull a value out of the json hash.""" return [ part[attr] for part in json if part['destinationTable'] == table ][0] def pull_kv_by_attr(json): """Yield key value pairs from the json hash.""" for part in json: if part['destinationTable'] == 'TransactionKeyValue': yield (part['key'], part['value']) def pull_fkv_by_attr(json): """Yield key values pairs for the file_key_value store from the json hash.""" for part in json: if part['destinationTable'] == 'FileKeyValue': yield (part['key'], part['value'], part['file_id']) def pull_file_by_attr(json): """Yield the files as a hash from the json hash.""" for part in json: if part['destinationTable'] == 'Files': ret = {} parts = [ 'name', 'subdir', 'mtime', 'ctime', 'size', 'mimetype', '_id', 'hashsum', 'hashtype' ] for key in parts: ret[key] = part[key] if not IngestAPI.validate_file_meta(ret): raise ValueError( 'Invalid metadata for file {0}'.format(ret['_id'])) yield ret def generate_tkvs(json): """Extract TransactionKeyValues as a hash from the json hash.""" keys = [] values = [] tkvs = [] for key, value in pull_kv_by_attr(json): keys.append({'key': key}) values.append({'value': value}) # pylint: disable=protected-access Keys()._set_or_create(keys) Values()._set_or_create(values) # pylint: enable=protected-access for key, value in pull_kv_by_attr(json): # key_obj = Keys.get(key=key) # value_obj = Values.get(value=value) tkvs.append({ 'key': Keys.get(key=key).id, 'transaction': transaction_hash['_id'], 'value': Values.get(value=value).id }) return tkvs def generate_fkvs(json): """Extract FileKeyValues as a hash from the json hash.""" file_keys = [] file_values = [] fkvs = [] for key, value, file_id in pull_fkv_by_attr(json): file_keys.append({'key': key}) file_values.append({'value': value}) # pylint: disable=protected-access Keys()._set_or_create(file_keys) Values()._set_or_create(file_values) # pylint: enable=protected-access for key, value, file_id in pull_fkv_by_attr(json): # key_obj = # value_obj = Values.get(value=value) fkvs.append({ 'key': Keys.get(key=key).id, 'value': Values.get(value=value).id, 'file': file_id }) return fkvs def extract_files(json): """Extract file entries as a hash from the json hash.""" files = [] for file_hash in pull_file_by_attr(json): file_hash['transaction'] = transaction_hash['_id'] files.append(file_hash) return files transaction_hash = { '_id': pull_value_by_attr(request.json, 'Transactions._id', 'value') } transsip_hash = { '_id': pull_value_by_attr(request.json, 'Transactions._id', 'value'), 'submitter': pull_value_by_attr(request.json, 'Transactions.submitter', 'value'), 'instrument': pull_value_by_attr(request.json, 'Transactions.instrument', 'value'), 'project': pull_value_by_attr(request.json, 'Transactions.project', 'value') } # pylint: disable=protected-access Transactions()._insert(transaction_hash) TransSIP()._insert(transsip_hash) TransactionKeyValue()._insert(generate_tkvs(request.json)) Files()._insert(extract_files(request.json)) FileKeyValue()._insert(generate_fkvs(request.json)) # pylint: enable=protected-access if not get_config().getboolean('notifications', 'disabled'): emit_event(eventType=get_config().get('notifications', 'ingest_eventtype'), source=get_config().get( 'notifications', 'ingest_source').format(_id=pull_value_by_attr( request.json, 'Transactions._id', 'value')), eventID=get_config().get( 'notifications', 'ingest_eventid').format(_id=pull_value_by_attr( request.json, 'Transactions._id', 'value')), data=request.json) return {'status': 'success'}
def base_create_dep_objs(cls): """Create all objects that Files depend on.""" trans = Transactions() TestTransactions.base_create_dep_objs() trans.from_hash(SAMPLE_TRANSACTION_HASH) trans.save(force_insert=True)
def trans_func(): """Return the transaction for the obj id.""" return Transactions.get(Transactions.id == obj['transaction_id'])
def PUT(): """Sample doc string to put data to the server.""" def pull_value_by_attr(json, table, attr): """Pull a value out of the json hash.""" return [ part[attr] for part in json if part['destinationTable'] == table ][0] def pull_kv_by_attr(json): """Yield key value pairs from the json hash.""" for part in json: if part['destinationTable'] == 'TransactionKeyValue': yield (part['key'], part['value']) def pull_fkv_by_attr(json): """Yield key values pairs for the file_key_value store from the json hash.""" for part in json: if part['destinationTable'] == 'FileKeyValue': yield (part['key'], part['value'], part['file_id']) def pull_file_by_attr(json): """Yield the files as a hash from the json hash.""" for part in json: if part['destinationTable'] == 'Files': ret = {} parts = [ 'name', 'subdir', 'mtime', 'ctime', 'size', 'mimetype', '_id', 'hashsum', 'hashtype' ] for key in parts: ret[key] = part[key] if not IngestAPI.validate_file_meta(ret): raise ValueError( 'Invalid metadata for file {0}'.format(ret['_id'])) yield ret def generate_tkvs(json): """Extract TransactionKeyValues as a hash from the json hash.""" keys = [] values = [] tkvs = [] for key, value in pull_kv_by_attr(json): keys.append({'key': key}) values.append({'value': value}) # pylint: disable=protected-access Keys()._set_or_create(keys) Values()._set_or_create(values) # pylint: enable=protected-access for key, value in pull_kv_by_attr(json): # key_obj = Keys.get(key=key) # value_obj = Values.get(value=value) tkvs.append({ 'key_id': Keys.get(key=key).id, 'transaction_id': transaction_hash['_id'], 'value_id': Values.get(value=value).id }) return tkvs def generate_fkvs(json): """Extract FileKeyValues as a hash from the json hash.""" file_keys = [] file_values = [] fkvs = [] for key, value, file_id in pull_fkv_by_attr(json): file_keys.append({'key': key}) file_values.append({'value': value}) # pylint: disable=protected-access Keys()._set_or_create(file_keys) Values()._set_or_create(file_values) # pylint: enable=protected-access for key, value, file_id in pull_fkv_by_attr(json): # key_obj = # value_obj = Values.get(value=value) fkvs.append({ 'key_id': Keys.get(key=key).id, 'value_id': Values.get(value=value).id, 'file_id': file_id }) return fkvs def extract_files(json): """Extract file entries as a hash from the json hash.""" files = [] for file_hash in pull_file_by_attr(json): file_hash['transaction_id'] = transaction_hash['_id'] files.append(file_hash) return files def emit_event(json): """Emit a cloud event that the data is now accepted.""" try: resp = requests.post( get_config().get('notifications', 'url'), data=dumps({ 'cloudEventsVersion': '0.1', 'eventType': getenv('CLOUDEVENT_TYPE', 'org.pacifica.metadata.ingest'), 'source': getenv( 'CLOUDEVENT_SOURCE_URL', 'http://metadata.pacifica.org/transactions?_id={}'. format( pull_value_by_attr(json, 'Transactions._id', 'value'))), 'eventID': 'metadata.ingest.{}'.format( pull_value_by_attr(json, 'Transactions._id', 'value')), 'eventTime': datetime.now().replace(microsecond=0).isoformat(), 'extensions': {}, 'contentType': 'application/json', 'data': json }), headers={'Content-Type': 'application/json'}) resp_major = int(int(resp.status_code) / 100) assert resp_major == 2 except (RequestException, AssertionError) as ex: logging.warning('Unable to send notification: %s', ex) transaction_hash = { '_id': pull_value_by_attr(request.json, 'Transactions._id', 'value') } transsip_hash = { '_id': pull_value_by_attr(request.json, 'Transactions._id', 'value'), 'submitter': pull_value_by_attr(request.json, 'Transactions.submitter', 'value'), 'instrument': pull_value_by_attr(request.json, 'Transactions.instrument', 'value'), 'proposal': pull_value_by_attr(request.json, 'Transactions.proposal', 'value') } # pylint: disable=protected-access Transactions()._insert(transaction_hash) TransSIP()._insert(transsip_hash) TransactionKeyValue()._insert(generate_tkvs(request.json)) Files()._insert(extract_files(request.json)) FileKeyValue()._insert(generate_fkvs(request.json)) # pylint: enable=protected-access if not get_config().getboolean('notifications', 'disabled'): emit_event(request.json) return {'status': 'success'}