def _get_staff_id(self): """Get staff id of associated staff if any.""" try: return self._payload[self._idx['pk_staff']] except KeyError: _log.debug('[%s]: .pk_staff should be added to the view', self.__class__.__name__) try: return self._payload[self._idx['pk_provider']] except KeyError: pass mod_by = None try: mod_by = self._payload[self._idx['modified_by_raw']] except KeyError: _log.debug('[%s]: .modified_by_raw should be added to the view', self.__class__.__name__) if mod_by is not None: # find by DB account args = {'db_u': mod_by} cmd = "SELECT pk FROM dem.staff WHERE db_user = %(db_u)s" rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': args }], get_col_idx=False) if len(rows) > 0: # logically, they are all the same provider, because they share the DB account return rows[0][0] mod_by = self._payload[self._idx['modified_by']] # is .modified_by a "<DB-account>" ? if mod_by.startswith('<') and mod_by.endswith('>'): # find by DB account args = {'db_u': mod_by.lstrip('<').rstrip('>')} cmd = "SELECT pk FROM dem.staff WHERE db_user = %(db_u)s" rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': args }], get_col_idx=False) if len(rows) > 0: # logically, they are all the same provider, because they share the DB account return rows[0][0] # .modified_by is probably dem.staff.short_alias args = {'alias': mod_by} cmd = "SELECT pk FROM dem.staff WHERE short_alias = %(alias)s" rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': args }], get_col_idx=False) if len(rows) > 0: # logically, they are all the same provider, because they share the DB account return rows[0][0] _log.error('[%s]: cannot retrieve staff ID for [%s]', self.__class__.__name__, mod_by) return None
def get_document_type_pk(document_type=None): args = {'typ': document_type.strip()} cmd = 'SELECT pk FROM blobs.doc_type WHERE name = %(typ)s' rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) if len(rows) == 0: cmd = 'SELECT pk FROM blobs.doc_type WHERE _(name) = %(typ)s' rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) if len(rows) == 0: return None return rows[0]['pk']
def get_document_type_pk(document_type=None): args = {"typ": document_type.strip()} cmd = u"SELECT pk FROM blobs.doc_type WHERE name = %(typ)s" rows, idx = gmPG2.run_ro_queries(queries=[{"cmd": cmd, "args": args}], get_col_idx=False) if len(rows) == 0: cmd = u"SELECT pk FROM blobs.doc_type WHERE _(name) = %(typ)s" rows, idx = gmPG2.run_ro_queries(queries=[{"cmd": cmd, "args": args}], get_col_idx=False) if len(rows) == 0: return None return rows[0]["pk"]
def getAllParams(self, user = None, workplace = cfg_DEFAULT): """Get names of all stored parameters for a given workplace/(user)/cookie-key. This will be used by the ConfigEditor object to create a parameter tree. """ # if no workplace given: any workplace (= cfg_DEFAULT) where_snippets = [ u'cfg_template.pk=cfg_item.fk_template', u'cfg_item.workplace=%(wplace)s' ] where_args = {'wplace': workplace} # if no user given: current db user if user is None: where_snippets.append(u'cfg_item.owner=CURRENT_USER') else: where_snippets.append(u'cfg_item.owner=%(usr)s') where_args['usr'] = user where_clause = u' and '.join(where_snippets) cmd = u""" select name, cookie, owner, type, description from cfg.cfg_template, cfg.cfg_item where %s""" % where_clause # retrieve option definition rows, idx = gmPG2.run_ro_queries(link_obj=self.ro_conn, queries = [{'cmd': cmd, 'args': where_args}], return_data=True) return rows
def create_relationship_type(relationship=None, genetic=None): args = {'rel': relationship, 'gen': genetic} # already exists ? cmd = u""" SELECT *, _(description) as l10n_description FROM clin.fhx_relation_type WHERE description = %(rel)s OR _(description) = %(rel)s """ rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}]) if len(rows) > 0: return rows[0] # create it cmd = u""" INSERT INTO clin.fhx_relation_type ( description, is_genetic ) VALUES ( i18n.i18n(gm.nullify_empty_string(%(rel)s)), %(gen)s ) RETURNING * """ rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True) return rows[0]
def loinc2data(loinc): cmd = u'SELECT * FROM ref.loinc WHERE code = %(loinc)s' args = {'loinc': loinc} rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) if len(rows) == 0: return [] return rows[0]
def get_patient_address_by_type(pk_patient=None, adr_type=None): cmd = u'SELECT * FROM dem.v_pat_addresses WHERE pk_identity = %(pat)s AND (address_type = %(typ)s OR l10n_address_type = %(typ)s)' args = {'pat': pk_patient, 'typ': adr_type} rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) if len(rows) == 0: return None return cPatientAddress(row = {'data': rows[0], 'idx': idx, 'pk_field': u'pk_address'})
def get_patient_address(pk_patient_address=None): cmd = u'SELECT * FROM dem.v_pat_addresses WHERE pk_lnk_person_org_address = %(pk)s' args = {'pk': pk_patient_address} rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) if len(rows) == 0: return None return cPatientAddress(row = {'data': rows[0], 'idx': idx, 'pk_field': u'pk_address'})
def create_address_type(address_type=None): args = {'typ': address_type} cmd = u'INSERT INTO dem.address_type (name) SELECT %(typ)s WHERE NOT EXISTS (SELECT 1 FROM dem.address_type WHERE name = %(typ)s OR _(name) = %(typ)s)' rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}]) cmd = u'SELECT id FROM dem.address_type WHERE name = %(typ)s OR _(name) = %(typ)s' rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) return rows[0][0]
def get_praxis_branch_by_org_unit(pk_org_unit=None): cmd = _SQL_get_praxis_branches % 'pk_org_unit = %(pk_ou)s' args = {'pk_ou': pk_org_unit} rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) if len(rows) == 0: return None return cPraxisBranch(row = {'data': rows[0], 'idx': idx, 'pk_field': 'pk_praxis_branch'})
def get_document_types(): rows, idx = gmPG2.run_ro_queries(queries=[{"cmd": u"SELECT * FROM blobs.v_doc_type"}], get_col_idx=True) doc_types = [] for row in rows: row_def = {"pk_field": "pk_doc_type", "idx": idx, "data": row} doc_types.append(cDocumentType(row=row_def)) return doc_types
def get_documents(self, doc_type=None, episodes=None, encounter=None, order_by=None, exclude_unsigned=False): """Return list of documents.""" args = {"pat": self.pk_patient, "type": doc_type, "enc": encounter} where_parts = [u"pk_patient = %(pat)s"] if doc_type is not None: try: int(doc_type) where_parts.append(u"pk_type = %(type)s") except (TypeError, ValueError): where_parts.append(u"pk_type = (SELECT pk FROM blobs.doc_type WHERE name = %(type)s)") if (episodes is not None) and (len(episodes) > 0): where_parts.append(u"pk_episode IN %(epi)s") args["epi"] = tuple(episodes) if encounter is not None: where_parts.append(u"pk_encounter = %(enc)s") if exclude_unsigned: where_parts.append( u"pk_doc IN (SELECT b_vo.pk_doc FROM blobs.v_obj4doc_no_data b_vo WHERE b_vo.pk_patient = %(pat)s AND b_vo.reviewed IS TRUE)" ) if order_by is None: order_by = u"ORDER BY clin_when" cmd = u"%s\n%s" % (_sql_fetch_document_fields % u" AND ".join(where_parts), order_by) rows, idx = gmPG2.run_ro_queries(queries=[{"cmd": cmd, "args": args}], get_col_idx=True) return [cDocument(row={"pk_field": "pk_doc", "idx": idx, "data": r}) for r in rows]
def get_doc_list(self, doc_type=None): """return flat list of document IDs""" args = {"ID": self.pk_patient, "TYP": doc_type} cmd = u""" select vdm.pk_doc from blobs.v_doc_med vdm where vdm.pk_patient = %%(ID)s %s order by vdm.clin_when""" if doc_type is None: cmd = cmd % u"" else: try: int(doc_type) cmd = cmd % u"and vdm.pk_type = %(TYP)s" except (TypeError, ValueError): cmd = cmd % u"and vdm.pk_type = (select pk from blobs.doc_type where name = %(TYP)s)" rows, idx = gmPG2.run_ro_queries(queries=[{"cmd": cmd, "args": args}]) doc_ids = [] for row in rows: doc_ids.append(row[0]) return doc_ids
def get_family_history(order_by=None, patient=None): args = {} where_parts = [] if patient is not None: where_parts.append(u'pk_patient = %(pat)s') args['pat'] = patient if order_by is None: if len(where_parts) == 0: order_by = u'true' else: order_by = u'' else: if len(where_parts) == 0: order_by = u'true ORDER BY %s' % order_by else: order_by = u'ORDER BY %s' % order_by cmd = _SQL_get_family_history % u' AND '.join( where_parts) + u' ' + order_by rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': args }], get_col_idx=True) return [ cFamilyHistory(row={ 'data': r, 'idx': idx, 'pk_field': 'pk_family_history' }) for r in rows ]
def get_reminders(pk_patient=None, order_by=None, return_pks=False): if order_by is None: order_by = '%s ORDER BY due_date, importance DESC, received_when DESC' else: order_by = '%%s ORDER BY %s' % order_by args = {'pat': pk_patient} where_parts = ['pk_patient = %(pat)s', 'due_date IS NOT NULL'] cmd = "SELECT * FROM dem.v_message_inbox WHERE %s" % ( order_by % ' AND '.join(where_parts)) _log.debug('SQL: %s', cmd) _log.debug('args: %s', args) rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': args }], get_col_idx=True) if return_pks: return [r['pk_inbox_message'] for r in rows] return [ cInboxMessage(row={ 'data': r, 'idx': idx, 'pk_field': 'pk_inbox_message' }) for r in rows ]
def lock_invoice_id(invoice_id): """Lock an invoice ID. The lock taken is an exclusive advisory lock in PostgreSQL. Because the data is short _and_ crc32/adler32 are fairly weak we assume that collisions can be created "easily". Therefore we apply both algorithms concurrently. NOT compatible with anything 1.8 or below. """ _log.debug('locking invoice ID: %s', invoice_id) token = __generate_invoice_id_lock_token(invoice_id) cmd = "SELECT pg_try_advisory_lock(%s)" % token try: rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd}]) except gmPG2.dbapi.ProgrammingError: _log.exception('cannot lock invoice ID: [%s] (%s)', invoice_id, token) return False if rows[0][0]: return True _log.error('cannot lock invoice ID: [%s] (%s)', invoice_id, token) return False
def delete_praxis_branches(pk_praxis_branches=None, except_pk_praxis_branches=None): if pk_praxis_branches is None: cmd = 'SELECT pk from dem.praxis_branch' rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) pks_to_lock = [ r[0] for r in rows ] else: pks_to_lock = pk_praxis_branches[:] if except_pk_praxis_branches is not None: for pk in except_pk_praxis_branches: try: pks_to_lock.remove(pk) except ValueError: pass for pk in pks_to_lock: if not lock_praxis_branch(pk_praxis_branch = pk, exclusive = True): return False args = {} where_parts = [] if pk_praxis_branches is not None: args['pks'] = pk_praxis_branches where_parts.append('pk = ANY(%(pks)s)') if except_pk_praxis_branches is not None: args['except'] = except_pk_praxis_branches where_parts.append('pk <> ALL(%(except)s)') if len(where_parts) == 0: cmd = "DELETE FROM dem.praxis_branch" else: cmd = "DELETE FROM dem.praxis_branch WHERE %s" % ' AND '.join(where_parts) gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}]) for pk in pks_to_lock: unlock_praxis_branch(pk_praxis_branch = pk, exclusive = True) return True
def unlock_invoice_id(invoice_id): _log.debug('unlocking invoice ID: %s', invoice_id) # remove in 1.9 / DB v23: if not __unlock_invoice_id_1_7_legacy(invoice_id): return False # unlock unsigned_crc32 = zlib.crc32(bytes(invoice_id, 'utf8')) & 0xffffffff _log.debug('unsigned crc32: %s', unsigned_crc32) data4adler32 = u'%s---[%s]' % (invoice_id, unsigned_crc32) _log.debug('data for adler32: %s', data4adler32) unsigned_adler32 = zlib.adler32(bytes(data4adler32, 'utf8'), unsigned_crc32) & 0xffffffff _log.debug('unsigned (crc32-chained) adler32: %s', unsigned_adler32) cmd = u"SELECT pg_advisory_unlock(%s)" % (unsigned_adler32) try: rows, idx = gmPG2.run_ro_queries(queries=[{'cmd': cmd}]) except gmPG2.dbapi.ProgrammingError: _log.exception('cannot unlock invoice ID: [%s] (%s)', invoice_id, unsigned_adler32) return False if rows[0][0]: return True _log.error('cannot unlock invoice ID: [%s] (%s)', invoice_id, unsigned_adler32) return False
def create_data_source(long_name=None, short_name=None, version=None, source=None, language=None): args = { 'lname': long_name, 'sname': short_name, 'ver': version, 'src': source, 'lang': language } cmd = u"SELECT pk FROM ref.data_source WHERE name_long = %(lname)s AND name_short = %(sname)s AND version = %(ver)s" rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}]) if len(rows) > 0: return rows[0]['pk'] cmd = u""" INSERT INTO ref.data_source (name_long, name_short, version, source, lang) VALUES ( %(lname)s, %(sname)s, %(ver)s, %(src)s, %(lang)s ) RETURNING pk """ rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True) return rows[0]['pk']
def get_comm_channels(self, comm_medium=None): args = {'pk': self.pk_obj, 'medium': comm_medium} if comm_medium is None: cmd = """ SELECT * FROM dem.v_org_unit_comms WHERE pk_org_unit = %(pk)s """ else: cmd = """ SELECT * FROM dem.v_org_unit_comms WHERE pk_org_unit = %(pk)s AND comm_type = %(medium)s """ rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) return [ gmDemographicRecord.cOrgCommChannel(row = { 'pk_field': 'pk_lnk_org_unit2comm', 'data': r, 'idx': idx }) for r in rows ]
def _get_revision_history(self, query, args, title): rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': query, 'args': args }], get_col_idx=True) lines = [] if rows == 0: lines.append('%s (no versions)' % title) else: lines.append('%s (%s versions)' % (title, rows[0]['row_version'] + 1)) headers = [ 'rev %s (%s)' % (r['row_version'], pydt_strftime(r['audit__action_when'], format='%Y %b %d %H:%M', none_str='live row')) for r in rows ] lines.extend( dicts2table(rows, left_margin=1, eol=None, keys2ignore=[ 'audit__action_when', 'row_version', 'pk_audit' ], show_only_changes=True, headers=headers, date_format='%Y %b %d %H:%M', equality_value=u_left_arrow)) return lines
def get_overdue_messages(pk_patient=None, order_by=None): if order_by is None: order_by = u'%s ORDER BY due_date, importance DESC, received_when DESC' else: order_by = u'%%s ORDER BY %s' % order_by args = {'pat': pk_patient} where_parts = [u'pk_patient = %(pat)s', u'is_overdue IS TRUE'] cmd = u"SELECT * FROM dem.v_message_inbox WHERE %s" % ( order_by % u' AND '.join(where_parts)) _log.debug('SQL: %s', cmd) _log.debug('args: %s', args) rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': args }], get_col_idx=True) return [ cInboxMessage(row={ 'data': r, 'idx': idx, 'pk_field': 'pk_inbox_message' }) for r in rows ]
def get_candidate_identities(self, can_create=False): old_idents = gmPerson.cDTO_person.get_candidate_identities( self, can_create=can_create) cmd = """ select pk_identity from dem.v_external_ids4identity where value = %(val)s and name = %(name)s and issuer = %(kk)s """ args = { 'val': self.insuree_number, 'name': EXTERNAL_ID_TYPE_VK_INSUREE_NUMBER, 'kk': EXTERNAL_ID_ISSUER_TEMPLATE % (self.insurance_company, self.insurance_number) } rows, idx = gmPG2.run_ro_queries(queries=[{'cmd': cmd, 'args': args}]) # weed out duplicates new_idents = [] for r in rows: for oid in old_idents: if r[0] == oid.ID: break new_idents.append(gmPerson.cPerson(aPK_obj=r['pk_identity'])) old_idents.extend(new_idents) return old_idents
def get_keyword_expansions(order_by=None, force_reload=False, return_pks=False): global __keyword_expansions if not force_reload: if __keyword_expansions is not None: return __keyword_expansions if order_by is None: order_by = 'true' else: order_by = 'true ORDER BY %s' % order_by cmd = _SQL_get_keyword_expansions % order_by rows, idx = gmPG2.run_ro_queries(queries=[{'cmd': cmd}], get_col_idx=True) if return_pks: return [r['pk_expansion'] for r in rows] __keyword_expansions = [ cKeywordExpansion(row={ 'data': r, 'idx': idx, 'pk_field': 'pk_expansion' }) for r in rows ] return __keyword_expansions
def get_expansion(keyword=None, textual_only=True, binary_only=False): if False not in [textual_only, binary_only]: raise ValueError( 'one of <textual_only> and <binary_only> must be False') where_parts = ['keyword = %(kwd)s'] args = {'kwd': keyword} if textual_only: where_parts.append('is_textual IS TRUE') cmd = _SQL_get_keyword_expansions % ' AND '.join(where_parts) rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': args }], get_col_idx=True) if len(rows) == 0: return None return cKeywordExpansion(row={ 'data': rows[0], 'idx': idx, 'pk_field': 'pk_expansion' })
def get_coded_terms(coding_systems=None, languages=None, order_by=None): where_snippets = [] args = {} if coding_systems is not None: where_snippets.append( "((coding_system = ANY(%(sys)s)) OR (coding_system_long = ANY(%(sys)s))" ) args['sys'] = coding_systems if languages is not None: where_snippets.append('lang = ANY(%(lang)s)') args['lang'] = languages cmd = 'select * from ref.v_coded_terms' if len(where_snippets) > 0: cmd += ' WHERE %s' % ' AND '.join(where_snippets) if order_by is not None: cmd += ' ORDER BY %s' % order_by rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': args }], get_col_idx=False) return rows
def __init__(self, aPK_obj=None, row=None): # by default get staff corresponding to CURRENT_USER if (aPK_obj is None) and (row is None): cmd = _SQL_get_staff_fields % "db_user = CURRENT_USER" try: rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd}], get_col_idx=True) except: _log.exception('cannot instantiate staff instance') gmLog2.log_stack_trace() raise ValueError('cannot instantiate staff instance for database account CURRENT_USER') if len(rows) == 0: raise ValueError('no staff record for database account CURRENT_USER') row = { 'pk_field': 'pk_staff', 'idx': idx, 'data': rows[0] } gmBusinessDBObject.cBusinessDBObject.__init__(self, row = row) else: gmBusinessDBObject.cBusinessDBObject.__init__(self, aPK_obj = aPK_obj, row = row) # are we SELF ? self.__is_current_user = (gmPG2.get_current_user() == self._payload[self._idx['db_user']]) self.__inbox = None
def refetch_payload(self, ignore_changes=False, link_obj=None): """Fetch field values from backend.""" if self._is_modified: compare_dict_likes(self.original_payload, self.fields_as_dict(date_format = None, none_string = None), 'original payload', 'modified payload') if ignore_changes: _log.critical('[%s:%s]: loosing payload changes' % (self.__class__.__name__, self.pk_obj)) #_log.debug('most recently fetched: %s' % self.payload_most_recently_fetched) #_log.debug('modified: %s' % self._payload) else: _log.critical('[%s:%s]: cannot reload, payload changed' % (self.__class__.__name__, self.pk_obj)) return False if type(self.pk_obj) == dict: arg = self.pk_obj else: arg = [self.pk_obj] rows, self._idx = gmPG2.run_ro_queries ( link_obj = link_obj, queries = [{'cmd': self.__class__._cmd_fetch_payload, 'args': arg}], get_col_idx = True ) if len(rows) == 0: _log.error('[%s:%s]: no such instance' % (self.__class__.__name__, self.pk_obj)) return False self._payload = rows[0] return True
def __unlock_invoice_id_1_7_legacy(invoice_id): _log.debug('legacy unlocking invoice ID: %s', invoice_id) py3_crc32 = zlib.crc32(bytes(invoice_id, 'utf8')) py3_adler32 = zlib.adler32(bytes(invoice_id, 'utf8')) signed_crc32 = py3_crc32 - (py3_crc32 & 0x80000000) * 2 signed_adler32 = py3_adler32 - (py3_adler32 & 0x80000000) * 2 _log.debug('crc32: %s (py3, unsigned) -> %s (py2.6+, signed)', py3_crc32, signed_crc32) _log.debug('adler32: %s (py3, unsigned) -> %s (py2.6+, signed)', py3_adler32, signed_adler32) cmd = u"""SELECT pg_advisory_unlock(%s, %s)""" % (signed_crc32, signed_adler32) try: rows, idx = gmPG2.run_ro_queries(queries=[{'cmd': cmd}]) except gmPG2.dbapi.ProgrammingError: _log.exception('cannot unlock invoice ID: [%s] (%s/%s)', invoice_id, signed_crc32, signed_adler32) return False if rows[0][0]: return True _log.error('cannot unlock invoice ID: [%s] (%s/%s)', invoice_id, signed_crc32, signed_adler32) return False
def search_for_documents(patient_id=None, type_id=None, external_reference=None, pk_episode=None, pk_types=None): """Searches for documents with the given patient and type ID.""" if (patient_id is None) and (pk_episode is None): raise ValueError('need patient_id or pk_episode to search for document') where_parts = [] args = { 'pat_id': patient_id, 'type_id': type_id, 'ref': external_reference, 'pk_epi': pk_episode } if patient_id is not None: where_parts.append('pk_patient = %(pat_id)s') if type_id is not None: where_parts.append('pk_type = %(type_id)s') if external_reference is not None: where_parts.append('ext_ref = %(ref)s') if pk_episode is not None: where_parts.append('pk_episode = %(pk_epi)s') if pk_types is not None: where_parts.append('pk_type IN %(pk_types)s') args['pk_types'] = tuple(pk_types) cmd = _SQL_get_document_fields % ' AND '.join(where_parts) rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) return [ cDocument(row = {'data': r, 'idx': idx, 'pk_field': 'pk_doc'}) for r in rows ]
def get_external_care_items(order_by=None, pk_identity=None, pk_health_issue=None, exclude_inactive=False): args = {'pk_pat': pk_identity, 'pk_issue': pk_health_issue} where_parts = [] if pk_identity is not None: where_parts.append('pk_identity = %(pk_pat)s') if pk_health_issue is not None: where_parts.append('pk_health_issue = %(pk_issue)s') if exclude_inactive is True: where_parts.append('inactive IS FALSE') if len(where_parts) == 0: where = 'TRUE' else: where = ' AND '.join(where_parts) if order_by is not None: where = '%s ORDER BY %s' % (where, order_by) cmd = _SQL_get_external_care_items % where rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': args }], get_col_idx=True) return [ cExternalCareItem(row={ 'data': r, 'idx': idx, 'pk_field': 'pk_external_care' }) for r in rows ]
def get_latest_mugshot(self): cmd = "SELECT pk_obj FROM blobs.v_latest_mugshot WHERE pk_patient = %s" rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_patient]}]) if len(rows) == 0: _log.info('no mugshots available for patient [%s]' % self.pk_patient) return None return cDocumentPart(aPK_obj = rows[0][0])
def get_latest_freediams_prescription(self): cmd = """ SELECT pk_doc FROM blobs.v_doc_med WHERE pk_patient = %(pat)s AND type = %(typ)s AND ext_ref = %(ref)s ORDER BY clin_when DESC LIMIT 1 """ args = { 'pat': self.pk_patient, 'typ': DOCUMENT_TYPE_PRESCRIPTION, 'ref': 'FreeDiams' } rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}]) if len(rows) == 0: _log.info('no FreeDiams prescription available for patient [%s]' % self.pk_patient) return None prescription = cDocument(aPK_obj = rows[0][0]) return prescription
def _get_all_document_org_units(self): cmd = gmOrganization._SQL_get_org_unit % ( 'pk_org_unit IN (SELECT DISTINCT ON (pk_org_unit) pk_org_unit FROM blobs.v_doc_med WHERE pk_patient = %(pat)s)' ) args = {'pat': self.pk_patient} rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) return [ gmOrganization.cOrgUnit(row = {'data': r, 'idx': idx, 'pk_field': 'pk_org_unit'}) for r in rows ]
def get_export_items(order_by=None, pk_identity=None, designation=None): args = { 'pat': pk_identity, 'desig': gmTools.coalesce(designation, PRINT_JOB_DESIGNATION) } where_parts = [] if pk_identity is not None: where_parts.append(u'pk_identity = %(pat)s') # note that invalidly linked items will be # auto-healed when instantiated if designation is None: where_parts.append(u"designation IS DISTINCT FROM %(desig)s") else: where_parts.append(u'designation = %(desig)s') if order_by is None: order_by = u'' else: order_by = u' ORDER BY %s' % order_by cmd = (_SQL_get_export_items % u' AND '.join(where_parts)) + order_by rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': args }], get_col_idx=True) return [ cExportItem(row={ 'data': r, 'idx': idx, 'pk_field': 'pk_export_item' }) for r in rows ]
def get_external_care_items(order_by=None, pk_identity=None, pk_health_issue=None, exclude_inactive=False, return_pks=False): args = { 'pk_pat': pk_identity, 'pk_issue': pk_health_issue } where_parts = [] if pk_identity is not None: where_parts.append('pk_identity = %(pk_pat)s') if pk_health_issue is not None: where_parts.append('pk_health_issue = %(pk_issue)s') if exclude_inactive is True: where_parts.append('inactive IS FALSE') if len(where_parts) == 0: where = 'TRUE' else: where = ' AND '.join(where_parts) if order_by is not None: where = '%s ORDER BY %s' % ( where, order_by ) cmd = _SQL_get_external_care_items % where rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) if return_pks: return [ r['pk_external_care'] for r in rows ] return [ cExternalCareItem(row = {'data': r, 'idx': idx, 'pk_field': 'pk_external_care'}) for r in rows ]
def get_org_units(order_by=None, org=None): if order_by is None: order_by = '' else: order_by = ' ORDER BY %s' % order_by if org is None: where_part = 'TRUE' else: where_part = 'pk_org = %(org)s' args = {'org': org} cmd = (_SQL_get_org_unit % where_part) + order_by rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': args }], get_col_idx=True) return [ cOrgUnit(row={ 'data': r, 'idx': idx, 'pk_field': 'pk_org_unit' }) for r in rows ]
def get_praxis_branch_by_org_unit(pk_org_unit=None): cmd = _SQL_get_praxis_branches % u'pk_org_unit = %(pk_ou)s' args = {'pk_ou': pk_org_unit} rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) if len(rows) == 0: return None return cPraxisBranch(row = {'data': rows[0], 'idx': idx, 'pk_field': 'pk_praxis_branch'})
def get_doc_list(self, doc_type=None): """return flat list of document IDs""" args = { 'ID': self.pk_patient, 'TYP': doc_type } cmd = """ select vdm.pk_doc from blobs.v_doc_med vdm where vdm.pk_patient = %%(ID)s %s order by vdm.clin_when""" if doc_type is None: cmd = cmd % '' else: try: int(doc_type) cmd = cmd % 'and vdm.pk_type = %(TYP)s' except (TypeError, ValueError): cmd = cmd % 'and vdm.pk_type = (select pk from blobs.doc_type where name = %(TYP)s)' rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}]) doc_ids = [] for row in rows: doc_ids.append(row[0]) return doc_ids
def search_for_documents(patient_id=None, type_id=None, external_reference=None): """Searches for documents with the given patient and type ID.""" if patient_id is None: raise ValueError('need patient id to search for document') args = { 'pat_id': patient_id, 'type_id': type_id, 'ref': external_reference } where_parts = [u'pk_patient = %(pat_id)s'] if type_id is not None: where_parts.append(u'pk_type = %(type_id)s') if external_reference is not None: where_parts.append(u'ext_ref = %(ref)s') cmd = _sql_fetch_document_fields % u' AND '.join(where_parts) rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': args }], get_col_idx=True) return [ cDocument(row={ 'data': r, 'idx': idx, 'pk_field': 'pk_doc' }) for r in rows ]
def create_data_source(long_name=None, short_name=None, version=None, source=None, language=None): args = { 'lname': long_name, 'sname': short_name, 'ver': version, 'src': source, 'lang': language } cmd = "SELECT pk FROM ref.data_source WHERE name_long = %(lname)s AND name_short = %(sname)s AND version = %(ver)s" rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}]) if len(rows) > 0: return rows[0]['pk'] cmd = """ INSERT INTO ref.data_source (name_long, name_short, version, source, lang) VALUES ( %(lname)s, %(sname)s, %(ver)s, %(src)s, %(lang)s ) RETURNING pk """ rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True) return rows[0]['pk']
def get_bills(order_by=None, pk_patient=None, return_pks=False): args = {'pat': pk_patient} where_parts = ['true'] if pk_patient is not None: where_parts.append('pk_patient = %(pat)s') if order_by is None: order_by = '' else: order_by = ' ORDER BY %s' % order_by cmd = (_SQL_get_bill_fields % ' AND '.join(where_parts)) + order_by rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': args }], get_col_idx=True) if return_pks: return [r['pk_bill'] for r in rows] return [ cBill(row={ 'data': r, 'idx': idx, 'pk_field': 'pk_bill' }) for r in rows ]
def loinc2data(loinc): cmd = 'SELECT * FROM ref.loinc WHERE code = %(loinc)s' args = {'loinc': loinc} rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) if len(rows) == 0: return None return rows[0]
def get_candidate_identities(self, can_create = False): old_idents = gmPerson.cDTO_person.get_candidate_identities(self, can_create = can_create) cmd = """ select pk_identity from dem.v_external_ids4identity where value = %(val)s and name = %(name)s and issuer = %(kk)s """ args = { 'val': self.insuree_number, 'name': EXTERNAL_ID_TYPE_VK_INSUREE_NUMBER, 'kk': EXTERNAL_ID_ISSUER_TEMPLATE % (self.insurance_company, self.insurance_number) } rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}]) # weed out duplicates new_idents = [] for r in rows: for oid in old_idents: if r[0] == oid.ID: break new_idents.append(gmPerson.cPerson(aPK_obj = r['pk_identity'])) old_idents.extend(new_idents) return old_idents
def get_latest_freediams_prescription(self): cmd = """ SELECT pk_doc FROM blobs.v_doc_med WHERE pk_patient = %(pat)s AND type = %(typ)s AND ext_ref = %(ref)s ORDER BY clin_when DESC LIMIT 1 """ args = { 'pat': self.pk_patient, 'typ': DOCUMENT_TYPE_PRESCRIPTION, 'ref': 'FreeDiams' } rows, idx = gmPG2.run_ro_queries(queries=[{'cmd': cmd, 'args': args}]) if len(rows) == 0: _log.info('no FreeDiams prescription available for patient [%s]' % self.pk_patient) return None prescription = cDocument(aPK_obj=rows[0][0]) return prescription
def get_candidate_identities(self, can_create = False): old_idents = gmPerson.cDTO_person.get_candidate_identities(self, can_create = can_create) # look for candidates based on their Insuree Number if not self.card_is_rejected: cmd = """ SELECT pk_identity FROM dem.v_external_ids4identity WHERE value = %(val)s AND name = %(name)s AND issuer = %(kk)s """ args = { 'val': self.insuree_number, 'name': '%s (%s)' % ( EXTERNAL_ID_TYPE_VK_INSUREE_NUMBER, self.raw_data['Karte'] ), 'kk': EXTERNAL_ID_ISSUER_TEMPLATE % (self.raw_data['KostentraegerName'], self.raw_data['Kostentraegerkennung']) } rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = None) # weed out duplicates name_candidate_ids = [ o.ID for o in old_idents ] for r in rows: if r[0] not in name_candidate_ids: old_idents.append(gmPerson.cPerson(aPK_obj = r[0])) return old_idents
def address_exists(country_code=None, region_code=None, urb=None, postcode=None, street=None, number=None, subunit=None): cmd = """SELECT dem.address_exists(%(country_code)s, %(region_code)s, %(urb)s, %(postcode)s, %(street)s, %(number)s, %(subunit)s)""" args = { 'country_code': country_code, 'region_code': region_code, 'urb': urb, 'postcode': postcode, 'street': street, 'number': number, 'subunit': subunit } rows, idx = gmPG2.run_ro_queries(queries=[{'cmd': cmd, 'args': args}]) if rows[0][0] is None: _log.debug('address does not exist') for key, val in args.items(): _log.debug('%s: %s', key, val) return None return rows[0][0]
def get_doc_list(self, doc_type=None): """return flat list of document IDs""" args = {'ID': self.pk_patient, 'TYP': doc_type} cmd = """ select vdm.pk_doc from blobs.v_doc_med vdm where vdm.pk_patient = %%(ID)s %s order by vdm.clin_when""" if doc_type is None: cmd = cmd % '' else: try: int(doc_type) cmd = cmd % 'and vdm.pk_type = %(TYP)s' except (TypeError, ValueError): cmd = cmd % 'and vdm.pk_type = (select pk from blobs.doc_type where name = %(TYP)s)' rows, idx = gmPG2.run_ro_queries(queries=[{'cmd': cmd, 'args': args}]) doc_ids = [] for row in rows: doc_ids.append(row[0]) return doc_ids
def getAllParams(self, user=None, workplace=cfg_DEFAULT): """Get names of all stored parameters for a given workplace/(user)/cookie-key. This will be used by the ConfigEditor object to create a parameter tree. """ # if no workplace given: any workplace (= cfg_DEFAULT) where_snippets = [ 'cfg_template.pk=cfg_item.fk_template', 'cfg_item.workplace=%(wplace)s' ] where_args = {'wplace': workplace} # if no user given: current db user if user is None: where_snippets.append('cfg_item.owner=CURRENT_USER') else: where_snippets.append('cfg_item.owner=%(usr)s') where_args['usr'] = user where_clause = ' and '.join(where_snippets) cmd = """ select name, cookie, owner, type, description from cfg.cfg_template, cfg.cfg_item where %s""" % where_clause # retrieve option definition rows, idx = gmPG2.run_ro_queries(queries=[{ 'cmd': cmd, 'args': where_args }], return_data=True) return rows
def refetch_payload(self, ignore_changes=False, link_obj=None): """Fetch field values from backend.""" if self._is_modified: compare_dict_likes(self.original_payload, self.fields_as_dict(date_format = None, none_string = None), 'original payload', 'modified payload') if ignore_changes: _log.critical('[%s:%s]: loosing payload changes' % (self.__class__.__name__, self.pk_obj)) #_log.debug('most recently fetched: %s' % self.payload_most_recently_fetched) #_log.debug('modified: %s' % self._payload) else: _log.critical('[%s:%s]: cannot reload, payload changed' % (self.__class__.__name__, self.pk_obj)) return False if isinstance(self.pk_obj, dict): args = self.pk_obj else: args = [self.pk_obj] rows, self._idx = gmPG2.run_ro_queries ( link_obj = link_obj, queries = [{'cmd': self.__class__._cmd_fetch_payload, 'args': args}], get_col_idx = True ) if len(rows) == 0: _log.error('[%s:%s]: no such instance' % (self.__class__.__name__, self.pk_obj)) return False if len(rows) > 1: raise AssertionError('[%s:%s]: %s instances !' % (self.__class__.__name__, self.pk_obj, len(rows))) self._payload = rows[0] return True
def _get_revision_history(self, query, args, title): rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': query, 'args': args}], get_col_idx = True) lines = [] lines.append(u'%s (%s versions)' % (title, rows[0]['row_version'] + 1)) if len(rows) == 1: lines.append(u'') lines.extend(format_dict_like ( rows[0], left_margin = 1, tabular = True, value_delimiters = None, eol = None )) return lines for row_idx in range(len(rows)-1): lines.append(u'') row_older = rows[row_idx + 1] row_newer = rows[row_idx] lines.extend(format_dict_likes_comparison ( row_older, row_newer, title_left = _('Revision #%s') % row_older['row_version'], title_right = _('Revision #%s') % row_newer['row_version'], left_margin = 0, key_delim = u' | ', data_delim = u' | ', missing_string = u'', ignore_diff_in_keys = ['audit__action_applied', 'audit__action_when', 'audit__action_by', 'pk_audit', 'row_version', 'modified_when', 'modified_by'] )) return lines
def refetch_payload(self, ignore_changes=False, link_obj=None): """Fetch field values from backend. """ if self._is_modified: if ignore_changes: _log.critical('[%s:%s]: loosing payload changes' % (self.__class__.__name__, self.pk_obj)) _log.debug('original: %s' % self.original_payload) _log.debug('modified: %s' % self._payload) else: _log.critical('[%s:%s]: cannot reload, payload changed' % (self.__class__.__name__, self.pk_obj)) return False if type(self.pk_obj) == types.DictType: arg = self.pk_obj else: arg = [self.pk_obj] rows, self._idx = gmPG2.run_ro_queries ( link_obj = link_obj, queries = [{'cmd': self.__class__._cmd_fetch_payload, 'args': arg}], get_col_idx = True ) if len(rows) == 0: _log.error('[%s:%s]: no such instance' % (self.__class__.__name__, self.pk_obj)) return False self._payload = rows[0] return True
def get_bill_items(pk_patient=None, non_invoiced_only=False): if non_invoiced_only: cmd = _SQL_fetch_bill_item_fields % u"pk_patient = %(pat)s AND pk_bill IS NULL" else: cmd = _SQL_fetch_bill_item_fields % u"pk_patient = %(pat)s" args = {'pat': pk_patient} rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) return [ cBillItem(row = {'data': r, 'idx': idx, 'pk_field': 'pk_bill_item'}) for r in rows ]
def get_incoming_data(order_by=None): if order_by is None: order_by = u'true' else: order_by = u'true ORDER BY %s' % order_by cmd = _SQL_get_incoming_data % order_by rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = True) return [ cIncomingData(row = {'data': r, 'idx': idx, 'pk_field': 'pk_incoming_data_unmatched'}) for r in rows ]
def _get_db_lang(self): rows, idx = gmPG2.run_ro_queries ( queries = [{ 'cmd': 'select i18n.get_curr_lang(%(usr)s)', 'args': {'usr': self._payload[self._idx['db_user']]} }] ) return rows[0][0]
def _get_generic_codes(self): if len(self._payload[self._idx['pk_generic_codes']]) == 0: return [] cmd = gmCoding._SQL_get_generic_linked_codes % 'pk_generic_code IN %(pks)s' args = {'pks': tuple(self._payload[self._idx['pk_generic_codes']])} rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) return [ gmCoding.cGenericLinkedCode(row = {'data': r, 'idx': idx, 'pk_field': 'pk_lnk_code2item'}) for r in rows ]
def __setitem__(self, attribute, value): if attribute == 'pk_type': if value in ['allergy', 'sensitivity']: cmd = 'select pk from clin._enum_allergy_type where value=%s' rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [value]}]) value = rows[0][0] gmBusinessDBObject.cBusinessDBObject.__setitem__(self, attribute, value)