示例#1
0
def create_inbox_message(message_type=None, subject=None, patient=None, staff=None, message_category='clinical'):

	success, pk_type = gmTools.input2int(initial = message_type)
	if not success:
		pk_type = create_inbox_item_type(message_type = message_type, category = message_category)

	cmd = """
		INSERT INTO dem.message_inbox (
			fk_staff,
			fk_patient,
			fk_inbox_item_type,
			comment
		) VALUES (
			%(staff)s,
			%(pat)s,
			%(type)s,
			gm.nullify_empty_string(%(subject)s)
		)
		RETURNING pk
	"""
	args = {
		'staff': staff,
		'pat': patient,
		'type': pk_type,
		'subject': subject
	}
	rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True, get_col_idx = False)

	return cInboxMessage(aPK_obj = rows[0]['pk'])
示例#2
0
def create_inbox_item_type(message_type=None, category=u'clinical'):

    # determine category PK
    success, pk_cat = gmTools.input2int(initial=category)
    if not success:
        args = {u'cat': category}
        cmd = u"""SELECT COALESCE (
			(SELECT pk FROM dem.inbox_item_category WHERE _(description) = %(cat)s),
			(SELECT pk FROM dem.inbox_item_category WHERE description = %(cat)s)
		) AS pk"""
        rows, idx = gmPG2.run_ro_queries(queries=[{'cmd': cmd, 'args': args}])
        if rows[0]['pk'] is None:
            cmd = u"INSERT INTO dem.inbox_item_category (description) VALUES (%(cat)s) RETURNING pk"
            rows, idx = gmPG2.run_rw_queries(queries=[{
                'cmd': cmd,
                'args': args
            }],
                                             return_data=True)
            pk_cat = rows[0]['pk']
        else:
            pk_cat = rows[0]['pk']

    # find type PK or create type
    args = {u'pk_cat': pk_cat, u'type': message_type}
    cmd = u"""SELECT COALESCE (
		(SELECT pk FROM dem.inbox_item_type where fk_inbox_item_category = %(pk_cat)s AND _(description) = %(type)s),
		(SELECT pk FROM dem.inbox_item_type where fk_inbox_item_category = %(pk_cat)s AND description = %(type)s)
	) AS pk"""
    rows, idx = gmPG2.run_ro_queries(queries=[{'cmd': cmd, 'args': args}])
    if rows[0]['pk'] is None:
        cmd = u"""
			INSERT INTO dem.inbox_item_type (
				fk_inbox_item_category,
				description,
				is_user
			) VALUES (
				%(pk_cat)s,
				%(type)s,
				TRUE
			) RETURNING pk"""
        rows, idx = gmPG2.run_rw_queries(queries=[{
            'cmd': cmd,
            'args': args
        }],
                                         return_data=True)

    return rows[0]['pk']
示例#3
0
def create_inbox_item_type(message_type=None, category='clinical'):

	# determine category PK
	success, pk_cat = gmTools.input2int(initial = category)
	if not success:
		args = {'cat': category}
		cmd = """SELECT COALESCE (
			(SELECT pk FROM dem.inbox_item_category WHERE _(description) = %(cat)s),
			(SELECT pk FROM dem.inbox_item_category WHERE description = %(cat)s)
		) AS pk"""
		rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}])
		if rows[0]['pk'] is None:
			cmd = "INSERT INTO dem.inbox_item_category (description) VALUES (%(cat)s) RETURNING pk"
			rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True)
			pk_cat = rows[0]['pk']
		else:
			pk_cat = rows[0]['pk']

	# find type PK or create type
	args = {'pk_cat': pk_cat, 'type': message_type}
	cmd = """SELECT COALESCE (
		(SELECT pk FROM dem.inbox_item_type where fk_inbox_item_category = %(pk_cat)s AND _(description) = %(type)s),
		(SELECT pk FROM dem.inbox_item_type where fk_inbox_item_category = %(pk_cat)s AND description = %(type)s)
	) AS pk"""
	rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}])
	if rows[0]['pk'] is None:
		cmd = """
			INSERT INTO dem.inbox_item_type (
				fk_inbox_item_category,
				description,
				is_user
			) VALUES (
				%(pk_cat)s,
				%(type)s,
				TRUE
			) RETURNING pk"""
		rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True)

	return rows[0]['pk']
示例#4
0
def set_active_patient(patient=None, forced_reload=False):

	# already active ?
	if isinstance(patient, gmPerson.gmCurrentPatient):
		return True

	if isinstance(patient, gmPerson.cPatient):
		if patient['is_deleted']:
			_log.error('patient is disabled, will not use as active patient: %s', patient)
			return False
	elif isinstance(patient, gmPerson.cPerson):
		if patient['is_deleted']:
			_log.error('patient is disabled, will not use as active patient: %s', patient)
			return False
		patient = patient.as_patient
	elif patient == -1:
		pass
	else:
		# maybe integer ?
		success, pk = gmTools.input2int(initial = patient, minval = 1)
		if not success:
			raise ValueError('<patient> must be either -1, >0, or a cPatient, cPerson or gmCurrentPatient instance, is: %s' % patient)
		# but also valid patient ID ?
		try:
			patient = gmPerson.cPatient(aPK_obj = pk)
		except Exception:
			_log.exception('error changing active patient to [%s]' % patient)
			return False

	if not _verify_staff_chart_access(patient = patient):
		return False

	success = gmPerson.set_active_patient(patient = patient, forced_reload = forced_reload)

	if not success:
		return False

	wx.CallAfter(_do_after_setting_active_patient, patient)
	return True
示例#5
0
def create_inbox_message(message_type=None,
                         subject=None,
                         patient=None,
                         staff=None,
                         message_category=u'clinical'):

    success, pk_type = gmTools.input2int(initial=message_type)
    if not success:
        pk_type = create_inbox_item_type(message_type=message_type,
                                         category=message_category)

    cmd = u"""
		INSERT INTO dem.message_inbox (
			fk_staff,
			fk_patient,
			fk_inbox_item_type,
			comment
		) VALUES (
			%(staff)s,
			%(pat)s,
			%(type)s,
			gm.nullify_empty_string(%(subject)s)
		)
		RETURNING pk
	"""
    args = {
        u'staff': staff,
        u'pat': patient,
        u'type': pk_type,
        u'subject': subject
    }
    rows, idx = gmPG2.run_rw_queries(queries=[{
        'cmd': cmd,
        'args': args
    }],
                                     return_data=True,
                                     get_col_idx=False)

    return cInboxMessage(aPK_obj=rows[0]['pk'])
示例#6
0
def set_active_patient(patient=None, forced_reload=False):

	# already active ?
	if isinstance(patient, gmPerson.gmCurrentPatient):
		return True

	if isinstance(patient, gmPerson.cPatient):
		if patient['is_deleted']:
			_log.error('patient is disabled, will not use as active patient: %s', patient)
			return False
	elif isinstance(patient, gmPerson.cPerson):
		if patient['is_deleted']:
			_log.error('patient is disabled, will not use as active patient: %s', patient)
			return False
		patient = patient.as_patient
	elif patient == -1:
		pass
	else:
		# maybe integer ?
		success, pk = gmTools.input2int(initial = patient, minval = 1)
		if not success:
			raise ValueError('<patient> must be either -1, >0, or a cPatient, cPerson or gmCurrentPatient instance, is: %s' % patient)
		# but also valid patient ID ?
		try:
			patient = gmPerson.cPatient(aPK_obj = pk)
		except:
			_log.exception('error changing active patient to [%s]' % patient)
			return False

	if not _verify_staff_chart_access(patient = patient):
		return False

	success = gmPerson.set_active_patient(patient = patient, forced_reload = forced_reload)

	if not success:
		return False

	wx.CallAfter(_do_after_setting_active_patient, patient)
	return True
示例#7
0
def load_data_packs_list():

	dbcfg = gmCfg.cCfgSQL()
	dpl_url = dbcfg.get2 (
		option = dpl_url_option,
		workplace = gmPraxis.gmCurrentPraxisBranch().active_workplace,
		bias = 'workplace',
		default = default_dpl_url
	)

	items = []
	data = []

	dpl_fname = gmNetworkTools.download_data_packs_list(dpl_url)
	if dpl_fname is None:
		return (items, data)
	try:
		_cfg.add_file_source(source = 'data-packs', file = dpl_fname)
	except (UnicodeDecodeError):
		_log.exception("cannot read data pack list from [%s]", dpl_fname)
		return (items, data)

	packs = _cfg.get('data packs', 'data packs', source_order = [('data-packs', 'return')])
	if packs is None:
		_log.info('no data packs listed in data packs list file')
		_cfg.remove_source('data-packs')
		return (items, data)

	for pack in packs:
		_log.debug('reading pack [%s] metadata', pack)
		pack_group = u'pack %s' % pack
		name = _cfg.get(pack_group, u'name', source_order = [('data-packs', 'return')])
		pack_url = _cfg.get(pack_group, u'URL', source_order = [('data-packs', 'return')])
		md5_url = pack_url + u'.md5'
		db_min = _cfg.get(pack_group, u'minimum database version', source_order = [('data-packs', 'return')])
		converted, db_min = gmTools.input2int (
			db_min,
			# here we introduced data packs:
			#16,
			0,
			# no use looking at data packs requiring a database > the current database:
			_cfg.get(option = 'database_version')
		)
		if not converted:
			_log.error('cannot convert minimum database version [%s]', db_min)
			continue

		db_max = _cfg.get(pack_group, u'maximum database version', source_order = [('data-packs', 'return')])
		if db_max is None:
			db_max = sys.maxint
		converted, db_max = gmTools.input2int (
			db_max,
			db_min		# max version must be at least db_min
		)
		if not converted:
			_log.error('cannot convert maximum database version [%s]', db_max)
			continue

		if _cfg.get(option = 'database_version') < db_min:
			_log.error('ignoring data pack: current database version (%s) < minimum required database version (%s)', _cfg.get(option = 'database_version'), db_min)
			continue

		if _cfg.get(option = 'database_version') > db_max:
			_log.error('ignoring data pack: current database version (%s) > maximum allowable database version (%s)', _cfg.get(option = 'database_version'), db_max)
			continue

		items.append([name, u'v%s' % db_min, u'v%s' % db_max, pack_url])
		data.append ({
			'name': name,
			'pack_url': pack_url,
			'md5_url': md5_url,
			'db_min': db_min,
			'db_max': db_max
		})

	_cfg.remove_source('data-packs')
	return (items, data)
示例#8
0
def process_options():

    if _cfg.get(option='-h', source_order=[('cli', 'return')]):
        show_usage()
        sys.exit(0)

    if _cfg.get(option='-?', source_order=[('cli', 'return')]):
        show_usage()
        sys.exit(0)

    if _cfg.get(option='--help', source_order=[('cli', 'return')]):
        show_usage()
        sys.exit(0)

    file2import = _cfg.get(option='--file2import',
                           source_order=[('cli', 'return')])
    if file2import is None:
        exit_with_message('ERROR: option --file2import missing')
    if file2import is True:
        exit_with_message('ERROR: data file missing in option --file2import=')
    try:
        open(file2import).close()
    except IOError:
        _log.exception('cannot open data file')
        exit_with_message(
            'ERROR: cannot open data file in option --file2import=%s' %
            file2import)

    datatype = _cfg.get(option='--data-type', source_order=[('cli', 'return')])
    if datatype is None:
        exit_with_message('ERROR: option --data-type missing')
    if datatype is True:
        exit_with_message('ERROR: data type missing in option --data-type=')
    if datatype.strip() == '':
        exit_with_message(
            'ERROR: invalid data type in option --data-type=>>>%s<<<' %
            datatype)

    db_user = _cfg.get(option='--user', source_order=[('cli', 'return')])
    if db_user is None:
        exit_with_message('ERROR: option --user missing')
    if db_user is True:
        exit_with_message('ERROR: user name missing in option --user='******'':
        exit_with_message(
            'ERROR: invalid user name in option --user=>>>%s<<<' % db_user)

    db_host = _cfg.get(option='--host', source_order=[('cli', 'return')])
    if db_host is None:
        _log.debug(
            'option --host not set, using <UNIX domain socket> on <localhost>')
    elif db_host is True:
        exit_with_message('ERROR: host name missing in option --host=')
    elif db_host.strip() == '':
        _log.debug(
            'option --host set to "", using <UNIX domain socket> on <localhost>'
        )
        db_host = None

    db_port = _cfg.get(option='--port', source_order=[('cli', 'return')])
    if db_port is None:
        _log.debug(
            'option --port not set, using <UNIX domain socket> on <localhost>')
    elif db_port is True:
        exit_with_message('ERROR: port value missing in option --port=')
    elif db_port.strip() == '':
        _log.debug(
            'option --port set to "", using <UNIX domain socket> on <localhost>'
        )
        db_port = None
    else:
        converted, db_port = gmTools.input2int(initial=db_port,
                                               minval=1024,
                                               maxval=65535)
        if not converted:
            exit_with_message(
                'ERROR: invalid port in option --port=%s (must be 1024...65535)'
                % db_port)

    gmPG2.log_auth_environment()
    dsn = gmPG2.make_psycopg2_dsn(
        database=gmPG2.default_database,
        host=db_host,
        port=db_port,
        user=db_user,
        password=
        None  # None = force not-required (TRUST/IDENT/PEER) or use-.pgpass-or-$PGPASSFILE
    )
    gmPG2._default_dsn = dsn

    return datatype, file2import
示例#9
0
def process_options():

	if _cfg.get(option = '-h', source_order = [('cli', 'return')]):
		show_usage()
		sys.exit(0)

	if _cfg.get(option = '-?', source_order = [('cli', 'return')]):
		show_usage()
		sys.exit(0)

	if _cfg.get(option = '--help', source_order = [('cli', 'return')]):
		show_usage()
		sys.exit(0)

	file2import = _cfg.get(option = '--file2import', source_order = [('cli', 'return')])
	if file2import is None:
		exit_with_message('ERROR: option --file2import missing')
	if file2import is True:
		exit_with_message('ERROR: data file missing in option --file2import=')
	try:
		open(file2import).close()
	except IOError:
		_log.exception('cannot open data file')
		exit_with_message('ERROR: cannot open data file in option --file2import=%s' % file2import)

	datatype = _cfg.get(option = '--data-type', source_order = [('cli', 'return')])
	if datatype is None:
		exit_with_message('ERROR: option --data-type missing')
	if datatype is True:
		exit_with_message('ERROR: data type missing in option --data-type=')
	if datatype.strip() == '':
		exit_with_message('ERROR: invalid data type in option --data-type=>>>%s<<<' % datatype)

	db_user = _cfg.get(option = '--user', source_order = [('cli', 'return')])
	if db_user is None:
		exit_with_message('ERROR: option --user missing')
	if db_user is True:
		exit_with_message('ERROR: user name missing in option --user='******'':
		exit_with_message('ERROR: invalid user name in option --user=>>>%s<<<' % db_user)

	db_host = _cfg.get(option = '--host', source_order = [('cli', 'return')])
	if db_host is None:
		_log.debug('option --host not set, using <UNIX domain socket> on <localhost>')
	elif db_host is True:
		exit_with_message('ERROR: host name missing in option --host=')
	elif db_host.strip() == '':
		_log.debug('option --host set to "", using <UNIX domain socket> on <localhost>')
		db_host = None

	db_port = _cfg.get(option = '--port', source_order = [('cli', 'return')])
	if db_port is None:
		_log.debug('option --port not set, using <UNIX domain socket> on <localhost>')
	elif db_port is True:
		exit_with_message('ERROR: port value missing in option --port=')
	elif db_port.strip() == '':
		_log.debug('option --port set to "", using <UNIX domain socket> on <localhost>')
		db_port = None
	else:
		converted, db_port = gmTools.input2int(initial = db_port, minval = 1024, maxval = 65535)
		if not converted:
			exit_with_message('ERROR: invalid port in option --port=%s (must be 1024...65535)' % db_port)

	gmPG2.log_auth_environment()
	dsn = gmPG2.make_psycopg2_dsn (
		database = gmPG2.default_database,
		host = db_host,
		port = db_port,
		user = db_user,
		password = None			# None = force not-required (TRUST/IDENT/PEER) or use-.pgpass-or-$PGPASSFILE
	)
	gmPG2._default_dsn = dsn

	return datatype, file2import
示例#10
0
def load_data_packs_list():

    dbcfg = gmCfg.cCfgSQL()
    dpl_url = dbcfg.get2(
        option=dpl_url_option,
        workplace=gmPraxis.gmCurrentPraxisBranch().active_workplace,
        bias='workplace',
        default=default_dpl_url)

    items = []
    data = []

    dpl_fname = gmNetworkTools.download_data_packs_list(dpl_url)
    if dpl_fname is None:
        return (items, data)
    try:
        _cfg.add_file_source(source='data-packs', file=dpl_fname)
    except (UnicodeDecodeError):
        _log.exception("cannot read data pack list from [%s]", dpl_fname)
        return (items, data)

    packs = _cfg.get('data packs',
                     'data packs',
                     source_order=[('data-packs', 'return')])
    if packs is None:
        _log.info('no data packs listed in data packs list file')
        _cfg.remove_source('data-packs')
        return (items, data)

    for pack in packs:
        _log.debug('reading pack [%s] metadata', pack)
        pack_group = u'pack %s' % pack
        name = _cfg.get(pack_group,
                        u'name',
                        source_order=[('data-packs', 'return')])
        pack_url = _cfg.get(pack_group,
                            u'URL',
                            source_order=[('data-packs', 'return')])
        md5_url = pack_url + u'.md5'
        db_min = _cfg.get(pack_group,
                          u'minimum database version',
                          source_order=[('data-packs', 'return')])
        converted, db_min = gmTools.input2int(
            db_min,
            # here we introduced data packs:
            #16,
            0,
            # no use looking at data packs requiring a database > the current database:
            _cfg.get(option='database_version'))
        if not converted:
            _log.error('cannot convert minimum database version [%s]', db_min)
            continue

        db_max = _cfg.get(pack_group,
                          u'maximum database version',
                          source_order=[('data-packs', 'return')])
        if db_max is None:
            db_max = sys.maxint
        converted, db_max = gmTools.input2int(
            db_max,
            db_min  # max version must be at least db_min
        )
        if not converted:
            _log.error('cannot convert maximum database version [%s]', db_max)
            continue

        if _cfg.get(option='database_version') < db_min:
            _log.error(
                'ignoring data pack: current database version (%s) < minimum required database version (%s)',
                _cfg.get(option='database_version'), db_min)
            continue

        if _cfg.get(option='database_version') > db_max:
            _log.error(
                'ignoring data pack: current database version (%s) > maximum allowable database version (%s)',
                _cfg.get(option='database_version'), db_max)
            continue

        items.append([name, u'v%s' % db_min, u'v%s' % db_max, pack_url])
        data.append({
            'name': name,
            'pack_url': pack_url,
            'md5_url': md5_url,
            'db_min': db_min,
            'db_max': db_max
        })

    _cfg.remove_source('data-packs')
    return (items, data)
示例#11
0
def process_options():

    if _cfg.get(option='-h', source_order=[('cli', 'return')]):
        show_usage()
        sys.exit(0)

    if _cfg.get(option='-?', source_order=[('cli', 'return')]):
        show_usage()
        sys.exit(0)

    if _cfg.get(option='--help', source_order=[('cli', 'return')]):
        show_usage()
        sys.exit(0)

    file2import = _cfg.get(option='--file2import',
                           source_order=[('cli', 'return')])
    if file2import is None:
        exit_with_message('ERROR: option --file2import missing')
    if file2import is True:
        exit_with_message('ERROR: data file missing in option --file2import=')
    try:
        open(file2import).close()
    except IOError:
        _log.exception('cannot open data file')
        exit_with_message(
            'ERROR: cannot open data file in option --file2import=%s' %
            file2import)

    datatype = _cfg.get(option='--data-type', source_order=[('cli', 'return')])
    if datatype is None:
        exit_with_message('ERROR: option --data-type missing')
    if datatype is True:
        exit_with_message('ERROR: data type missing in option --data-type=')
    if datatype.strip() == '':
        exit_with_message(
            'ERROR: invalid data type in option --data-type=>>>%s<<<' %
            datatype)

    db_user = _cfg.get(option='--user', source_order=[('cli', 'return')])
    if db_user is None:
        exit_with_message('ERROR: option --user missing')
    if db_user is True:
        exit_with_message('ERROR: user name missing in option --user='******'':
        exit_with_message(
            'ERROR: invalid user name in option --user=>>>%s<<<' % db_user)

    db_host = _cfg.get(option='--host', source_order=[('cli', 'return')])
    if db_host is None:
        _log.debug(
            'option --host not set, using <UNIX domain socket> on <localhost>')
    elif db_host is True:
        exit_with_message('ERROR: host name missing in option --host=')
    elif db_host.strip() == '':
        _log.debug(
            'option --host set to "", using <UNIX domain socket> on <localhost>'
        )
        db_host = None

    db_port = _cfg.get(option='--port', source_order=[('cli', 'return')])
    if db_port is None:
        _log.debug(
            'option --port not set, using <UNIX domain socket> on <localhost>')
    elif db_port is True:
        exit_with_message('ERROR: port value missing in option --port=')
    elif db_port.strip() == '':
        _log.debug(
            'option --port set to "", using <UNIX domain socket> on <localhost>'
        )
        db_port = None
    else:
        converted, db_port = gmTools.input2int(initial=db_port,
                                               minval=1024,
                                               maxval=65535)
        if not converted:
            exit_with_message(
                'ERROR: invalid port in option --port=%s (must be 1024...65535)'
                % db_port)

    return datatype, file2import