def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")
    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))

    # create a "test_region" and a couple bogus facilities
    f1 = Facility(county='KING',
                  npi=10987,
                  zip='12345',
                  organization_name='Reason Medical Center',
                  local_code='RMC')
    f2 = Facility(county='POND',
                  npi=65432,
                  zip='67890',
                  organization_name='No-Reason Medical Center',
                  local_code='NMC')
    conn = db_connection(CONFIG_SECTION)
    conn.session.add(f1)
    conn.session.add(f2)
    conn.session.commit()
    rr1 = ReportableRegion(region_name='test_region', dim_facility_pk=10987)
    conn.session.add(rr1)
    conn.session.commit()
    conn.disconnect()
Example #2
0
    def transfer_file(self, compress_with=None):
        """Initiate transfer via HTTPS

        :param compress_with: if document isn't already compressed and
          this is set, compress the file before transfering.

        """
        filename = self.document['filename']

        config = Config()
        upload_url = config.get('distribute', 'upload_url')
        user = config.get('distribute', 'username')
        pw = config.get('distribute', 'password')
        payload = {'siteShortName': self.document['reportable_region']}

        content = self.extract_content(compress_with)
        files = {'userfile': content}

        if inProduction():
            logging.info("POST %s to %s" % (filename, upload_url))
            r = requests.post(upload_url, auth=(user, pw), files=files,
                              data=payload)
            # We only get redirected if successful!
            if r.status_code != 302:
                logging.error("failed distrbute POST")
                #Can't rely on status_code, as distribute returns 200
                #with problems in the body.
                logging.error("".join(list(r.iter_content())))
                r.raise_for_status()
            else:
                self.record_transfer()
        else:
            logging.warn("inProduction() check failed, not POSTing "
                         "file '%s' to '%s'", filename, upload_url)
Example #3
0
 def __init__(self):
     config = Config()
     self.db = config.get('phinms', 'database')
     self.username = config.get('phinms', 'user')
     self.passwd = config.get('phinms', 'password')
     self.workerqueue = config.get('phinms', 'workerqueue')
     self.feedertable = self.workerqueue + '_feeder'
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))
 def setUp(self):
     c = Config()
     cfg_value = lambda v: c.get('warehouse', v)
     self.alchemy = AlchemyAccess(database=cfg_value('database'),
                                  host='localhost',
                                  user=cfg_value('database_user'),
                                  password=cfg_value('database_password'))
     self.session = self.alchemy.session
Example #6
0
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))
Example #7
0
 def setUp(self):
     c = Config()
     cfg_value = lambda v: c.get('warehouse', v)
     self.alchemy = AlchemyAccess(database=cfg_value('database'),
                                  host='localhost',
                                  user=cfg_value('database_user'),
                                  password=cfg_value('database_password'))
     self.session = self.alchemy.session
Example #8
0
    def __init__(self):
        self.config = Config()

        # obtain list of files to process
        path = os.path.abspath(
            os.path.join(os.path.dirname(__file__),
                         "../../../test_hl7_batchfiles"))
        self.filenames = [
            os.path.join(path, file) for file in os.listdir(path)
        ]
Example #9
0
def inProduction():
    """Simple state check to avoid uploading files to thrid party
    servers and what not when not 'in production'.
    """
    config = Config()
    try:
        return config.get('general', 'in_production')
    except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):  # pragma: no cover
        raise ValueError("Config file doesn't specify "
                         "`[general]in_production` "
                         "unsafe to guess, can't continue")
Example #10
0
 def setUp(self):
     c = Config()
     cfg_value = lambda v: c.get("longitudinal", v)
     self.alchemy = AlchemyAccess(
         database=cfg_value("database"),
         host="localhost",
         user=cfg_value("database_user"),
         password=cfg_value("database_password"),
     )
     self.session = self.alchemy.session
     self.remove_after_test = []
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    cfg_value = lambda v: c.get('warehouse', v)
    create_tables(cfg_value('create_table_user'),
                  cfg_value('create_table_password'),
                  cfg_value('database'),
                  enable_delete=True)
Example #12
0
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    cfg_value = lambda v: c.get('warehouse', v)
    create_tables(cfg_value('create_table_user'),
                  cfg_value('create_table_password'),
                  cfg_value('database'),
                  enable_delete=True)
Example #13
0
def inProduction():
    """Simple state check to avoid uploading files to thrid party
    servers and what not when not 'in production'.
    """
    config = Config()
    try:
        return config.get('general', 'in_production')
    except (ConfigParser.NoSectionError,
            ConfigParser.NoOptionError):  # pragma: no cover
        raise ValueError("Config file doesn't specify "
                         "`[general]in_production` "
                         "unsafe to guess, can't continue")
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")
    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))

    # Load in all the static data in anonymized form
    static_data_file = open(os.path.join(os.path.dirname(
        os.path.abspath(__file__)), 'anon_static_db_data.yaml'), 'r')
    load_file(static_data_file)
Example #15
0
 def test_override(self):
     "same key in second file should override"
     section = 'SECTION'
     key = 'unittest'
     for i in range(2):
         cp = ConfigParser.RawConfigParser()
         cp.add_section(section)
         cp.set(section, key, i)
         with open(self.config_files[i], 'w') as f:
             cp.write(f)
     c = Config(self.config_files)
     self.assertEquals(1, c.get(section, key))
Example #16
0
 def test_override(self):
     "same key in second file should override"
     section = 'SECTION'
     key = 'unittest'
     for i in range(2):
         cp = ConfigParser.RawConfigParser()
         cp.add_section(section)
         cp.set(section, key, i)
         with open(self.config_files[i], 'w') as f:
             cp.write(f)
     c = Config(self.config_files)
     self.assertEquals(1, c.get(section, key))
Example #17
0
 def test_tilde(self):
     "support tilde in directory paths"
     section = 'SECTION'
     key = 'unittest'
     value = "~/tempfile"
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     cp.set(section, key, value)
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     self.assertEquals(os.path.expanduser("~/tempfile"),
                       c.get(section, key))
Example #18
0
 def test_tilde(self):
     "support tilde in directory paths"
     section = 'SECTION'
     key = 'unittest'
     value = "~/tempfile"
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     cp.set(section, key, value)
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     self.assertEquals(os.path.expanduser("~/tempfile"),
                       c.get(section, key))
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")
    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))

    # Load in all the static data in anonymized form
    static_data_file = open(
        os.path.join(os.path.dirname(os.path.abspath(__file__)),
                     'anon_static_db_data.yaml'), 'r')
    load_file(static_data_file)
Example #20
0
def db_params(section):
    """Return dict of database connection values from named config section

    Returned dict includes:
    - user
    - password
    - database  (name of database)

    """
    config = Config()
    database = config.get(section, 'database')
    user = config.get(section, 'database_user')
    password = config.get(section, 'database_password')
    return {'user': user, 'password': password, 'database': database}
Example #21
0
 def test_float(self):
     "Looks like a float, should be one"
     section = 'SECTION'
     key = 'unittest'
     values = ['0.01', '42.0', '-67.3']
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key + str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(float(values[i]), c.get(section, key + str(i)))
Example #22
0
def main():  # pragma: no cover
    """Entry point to (re)create the table using config settings"""
    config = Config()
    database = config.get('longitudinal', 'database')
    print "destroy and recreate database %s ? "\
        "('destroy' to continue): " % database,
    answer = sys.stdin.readline().rstrip()
    if answer != 'destroy':
        print "aborting..."
        sys.exit(1)

    user = config.get('longitudinal', 'database_user')
    password = config.get('longitudinal', 'database_password')
    create_tables(user, password, database)
Example #23
0
 def test_falseness(self):
     "false values should be case insensitive"
     section = 'SECTION'
     key = 'unittest'
     values = ['FALSE', 'false', 'False', ' f ']
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key + str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(False, c.get(section, key + str(i)))
Example #24
0
def db_params(section):
    """Return dict of database connection values from named config section

    Returned dict includes:
    - user
    - password
    - database  (name of database)

    """
    config = Config()
    database = config.get(section, 'database')
    user = config.get(section, 'database_user')
    password = config.get(section, 'database_password')
    return {'user': user, 'password': password, 'database': database}
Example #25
0
 def test_falseness(self):
     "false values should be case insensitive"
     section = 'SECTION'
     key = 'unittest'
     values = ['FALSE', 'false', 'False', ' f ']
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key+str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(False, c.get(section, key+str(i)))
Example #26
0
 def test_int(self):
     "Looks like an int, should be one (including 0,1)"
     section = 'SECTION'
     key = 'unittest'
     values = ['0', '1', '-67', 42, -1]
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key + str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(int(values[i]), c.get(section, key + str(i)))
Example #27
0
 def test_truthiness(self):
     "truth value should be case insensitive"
     section = 'SECTION'
     key = 'unittest'
     values = ['TRUE', 'true', 'True', ' t ']
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key+str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(True, c.get(section, key+str(i)))
Example #28
0
 def test_truthiness(self):
     "truth value should be case insensitive"
     section = 'SECTION'
     key = 'unittest'
     values = ['TRUE', 'true', 'True', ' t ']
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key + str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(True, c.get(section, key + str(i)))
Example #29
0
 def test_int(self):
     "Looks like an int, should be one (including 0,1)"
     section = 'SECTION'
     key = 'unittest'
     values = ['0', '1', '-67', 42, -1]
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key+str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(int(values[i]),
                           c.get(section, key+str(i)))
Example #30
0
def main():  # pragma: no cover
    """Entry point to (re)create the table using config settings"""
    config = Config()
    dbname = config.get('warehouse', 'database')
    print "destroy and recreate database %s ? "\
        "('destroy' to continue): " % dbname,
    answer = sys.stdin.readline().rstrip()
    if answer != 'destroy':
        print "aborting..."
        sys.exit(1)

    user = config.get('warehouse', 'create_table_user')
    print "password for PostgreSQL user:", user
    password = getpass.getpass()
    create_tables(user, password, dbname)
Example #31
0
 def test_float(self):
     "Looks like a float, should be one"
     section = 'SECTION'
     key = 'unittest'
     values = ['0.01', '42.0', '-67.3']
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key+str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(float(values[i]),
                           c.get(section, key+str(i)))
Example #32
0
def main():  # pragma: no cover
    """Entry point to (re)create the table using config settings"""
    config = Config()
    dbname = config.get('warehouse', 'database')
    print "destroy and recreate database %s ? "\
        "('destroy' to continue): " % dbname,
    answer = sys.stdin.readline().rstrip()
    if answer != 'destroy':
        print "aborting..."
        sys.exit(1)

    user = config.get('warehouse', 'create_table_user')
    print "password for PostgreSQL user:", user
    password = getpass.getpass()
    create_tables(user, password, dbname)
Example #33
0
    def testPhinmsTransfer(self):
        # need a document in the db
        self.create_test_file(compression='gzip',
                              report_type='longitudinal')

        # fake a transfer of this object
        context = PHINMS_Transfer(testing.DummyRequest())
        context.request.fs = self.fs
        context.request.document_store = self.document_store
        context = context[str(self.oid)]
        self.assertFalse(inProduction())  # avoid accidental transfers!
        context.transfer_file()

        self.assertEqual(self.report_type, 'longitudinal')
        config = Config()
        path = config.get('phinms', self.report_type)
        self.assertEqual(context.outbound_dir, path)
Example #34
0
    def _generate_output_filename(self, start_date=None, end_date=None):
        start_date = self.criteria.start_date if start_date is None\
            else start_date
        end_date = self.criteria.end_date if end_date is None else end_date

        datestr = end_date.strftime('%Y%m%d')
        if start_date != end_date:
            datestr = '-'.join(
                (start_date.strftime('%Y%m%d'), end_date.strftime('%Y%m%d')))

        filename = self.criteria.report_method + '-' + datestr + '.txt'

        config = Config()
        tmp_dir = config.get('general', 'tmp_dir', default='/tmp')

        filepath = os.path.join(tmp_dir, filename)
        return filepath
Example #35
0
def test_configure_logging():
    logfile = configure_logging(verbosity=2,
                                logfile='unittest.log',
                                append=False)
    logging.debug("just testing")
    # Thanks to nose working so hard to capture logging, it's quite
    # difficult to test - hand verified.
    assert (logfile == os.path.join(Config().get('general', 'log_dir'),
                                    'unittest.log'))
    def __init__(self):
        self.config = Config()

        # obtain list of files to process
        path = os.path.abspath(
            os.path.join(os.path.dirname(__file__),
                         "../../../test_hl7_batchfiles"))
        self.filenames = [os.path.join(path, file) for file in 
                          os.listdir(path)]
    def _generate_output_filename(self, start_date=None,
                                  end_date=None):
        start_date = self.criteria.start_date if start_date is None\
            else start_date
        end_date = self.criteria.end_date if end_date is None else end_date

        datestr = end_date.strftime('%Y%m%d')
        if start_date != end_date:
            datestr = '-'.join((start_date.strftime('%Y%m%d'),
                         end_date.strftime('%Y%m%d')))

        filename = self.criteria.report_method + '-' + datestr + '.txt'

        config = Config()
        tmp_dir = config.get('general', 'tmp_dir', default='/tmp')

        filepath = os.path.join(tmp_dir, filename)
        return filepath
def transform_channels():
    """Apply default transform to PHEME channels"""

    doc = """
    Mirth channels can be easily exported in XML format.  This utility
    provides a mechanims to alter an export for subsequent import.
    Useful for altering details such as database name and user
    authentication.

    NB - values defined in the project configuration file will be used
    unless provided as optional arguments.  See
    `pheme.util.config.Config`
    """
    config = Config()
    ap = argparse.ArgumentParser(description=doc)
    ap.add_argument("-d", "--database", dest="db",
                    default=config.get('warehouse', 'database'),
                    help="name of database (overrides "
                    "[warehouse]database)")
    ap.add_argument("-u", "--user", dest="user",
                    default=config.get('warehouse', 'database_user'),
                    help="database user (overrides "
                    "[warehouse]database_user)")
    ap.add_argument("-p", "--password", dest="password",
                    default=config.get('warehouse', 'database_password'),
                    help="database password (overrides [warehouse]"
                    "database_password)")
    ap.add_argument("--input_dir", dest="input_dir",
                    default=config.get('warehouse', 'input_dir'),
                    help="filesystem directory for channel to poll "
                    "(overrides [warehouse]input_dir)")
    ap.add_argument("--output_dir", dest="output_dir",
                    default=config.get('warehouse', 'output_dir'),
                    help="filesystem directory for channel output "
                    "(overrides [warehouse]output_dir)")
    ap.add_argument("--error_dir", dest="error_dir",
                    default=config.get('warehouse', 'error_dir'),
                    help="filesystem directory for channel errors "
                    "(overrides [warehouse]error_dir)")
    ap.add_argument("source_directory",
                    help="directory containing source channel "
                    "definition files")
    ap.add_argument("target_directory",
                    help="directory to write transformed channel "
                    "definition files")
    args = ap.parse_args()
    source_dir = os.path.realpath(args.source_directory)
    target_dir = os.path.realpath(args.target_directory)

    transformer = TransformManager(src=None,
                                   target_dir=target_dir,
                                   options=args)
    for c in CHANNELS:
        transformer.src = os.path.join(source_dir, '%s.xml' % c)
        transformer()
    # no transformation on codetemplates at this time - but the
    # importer expects the codetemplates.xml file to be in the same
    # directory, so copy it over.
    shutil.copy(os.path.join(source_dir, 'codetemplates.xml'), target_dir)
Example #39
0
def setup_module():
    """Populate database with test data for module tests"""

    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    "Pull in the filesystem dump from a previous mirth run"
    mi = MirthInteraction()
    mi.restore_database()

    "Run a quick sanity check, whole module requires a populated db"
    connection = db_connection('warehouse')
    count = connection.session.query(HL7_Msh).count()
    connection.disconnect()

    if count < 4000:
        err = "Minimal expected count of records not present. "\
            "Be sure to run 'process_testfiles_via_mirth' as a prerequisite"
        raise RuntimeError(err)
Example #40
0
    def __init__(self, verbosity=0, source_db=None):
        self.verbosity = verbosity
        self.source_db = source_db
        config = Config()
        self.phinms_receiving_dir = config.get('phinms', 'receiving_dir')
        self.phinms_archive_dir = config.get('phinms', 'archive_dir')

        self.source_dir = self.phinms_receiving_dir

        # Confirm the required directories are present
        if not os.path.isdir(self.phinms_receiving_dir):
            raise ValueError("Can't find required directory %s" %
                             self.phinms_receiving_dir)

        UPLOAD_PORT = config.get('pheme_http_receiver', 'port')
        UPLOAD_HOST = config.get('pheme_http_receiver', 'host')
        self.http_pool = HTTPConnectionPool(host=UPLOAD_HOST,
                                            port=UPLOAD_PORT,
                                            timeout=20)
        self._copy_tempdir = None
Example #41
0
    def __init__(self, verbosity=0, source_db=None):
        self.verbosity = verbosity
        self.source_db = source_db
        config = Config()
        self.phinms_receiving_dir = config.get('phinms', 'receiving_dir')
        self.phinms_archive_dir = config.get('phinms', 'archive_dir')

        self.source_dir = self.phinms_receiving_dir

        # Confirm the required directories are present
        if not os.path.isdir(self.phinms_receiving_dir):
            raise ValueError("Can't find required directory %s" %
                             self.phinms_receiving_dir)

        UPLOAD_PORT = config.get('pheme_http_receiver', 'port')
        UPLOAD_HOST = config.get('pheme_http_receiver', 'host')
        self.http_pool = HTTPConnectionPool(host=UPLOAD_HOST,
                                            port=UPLOAD_PORT,
                                            timeout=20)
        self._copy_tempdir = None
def setup_module():
    """Populate database with test data for module tests"""

    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    "Pull in the filesystem dump from a previous mirth run"
    mi = MirthInteraction()
    mi.restore_database()

    "Run a quick sanity check, whole module requires a populated db"
    connection = db_connection('warehouse')
    count = connection.session.query(HL7_Msh).count()
    connection.disconnect()

    if count < 4000:
        err = "Minimal expected count of records not present. "\
            "Be sure to run 'process_testfiles_via_mirth' as a prerequisite"
        raise RuntimeError(err)
Example #43
0
def transfer_document(document_id, transfer_agent,
                      compress_with=None):
    """Web API client call to request transfer of given document ID

    :param document_id: the document ID to transfer, likely returned
      from a document_store() call on the same Web API

    :param transfer_agent: such as 'phin-ms' or 'distribute'.

    :param compress_with: if additional compression is desired, this
      may be set to 'zip' or 'gzip', to be performed before sending.

    """
    query_params = dict()
    if compress_with is not None:
        query_params['compress_with'] = compress_with

    config = Config()
    parts = dict()
    parts['doc'] = document_id
    parts['host'] = config.get("WebAPI", "host")
    parts['port'] = config.get("WebAPI", "port")
    parts['agent'] = transfer_agent

    url = 'http://%(host)s:%(port)s/%(agent)s/%(doc)s' % parts
    if query_params:
        url = url + '?' +\
            '&'.join([k+'='+v for k, v in query_params.items()])

    # Initiate request, wait on response
    r = requests.post(url)
    if r.status_code != 200:  # pragma no cover
        failure = "Failed POST (%d) for transfer request: %s" %\
            (r.status_code, url)
        logging.error(failure)
        logging.error(r.text)
        raise RuntimeError(failure)
Example #44
0
def url_builder(predicate=None, resource=None, view=None, query_params={}):
    """Build webAPI url from config and passed values

    :param predicate: desired action or type of document
    :param resource: filename or object identifier
    :param view: specilized view, such as metadata
    :param query_params: dictionary of key, values to append

    returns URL ready for request, post, etc.

    """
    config = Config()
    url = 'http://%s:%s' % (config.get("WebAPI", "host"),
                            config.get("WebAPI", "port"))
    if predicate:
        url = '/'.join((url, predicate))
    if resource:
        url = '/'.join((url, resource))
    if view:
        url = '/'.join((url, '@@' + view))
    if query_params:
        url = '?'.join((url,
                        '&'.join([k+'='+v for k, v in query_params.items()])))
    return url
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")
    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))

    # create a "test_region" and a couple bogus facilities
    f1 = Facility(county='KING', npi=10987, zip='12345',
                  organization_name='Reason Medical Center',
                  local_code='RMC')
    f2 = Facility(county='POND', npi=65432, zip='67890',
                  organization_name='No-Reason Medical Center',
                  local_code='NMC')
    conn = db_connection(CONFIG_SECTION)
    conn.session.add(f1)
    conn.session.add(f2)
    conn.session.commit()
    rr1 = ReportableRegion(region_name='test_region',
                           dim_facility_pk=10987)
    conn.session.add(rr1)
    conn.session.commit()
    conn.disconnect()
Example #46
0
def create_tables(user, password, dbname, enable_delete=False):
    """Create the warehouse database tables.

    NB the config [warehouse]database_user is granted SELECT and 
    INSERT permissions (plus DELET if enable_delete is set).

    :param user: database user with table creation grants
    :param password: the database password
    :param dbname: the database name to populate
    :param enable_delete: testing hook, override for testing needs

    """
    engine = create_engine("postgresql://%s:%s@localhost/%s" %\
                               (user, password, dbname))
    metadata.drop_all(bind=engine)
    metadata.create_all(bind=engine)

    def bless_user(user):
        engine.execute("""BEGIN; GRANT SELECT, INSERT, UPDATE %(delete)s ON
                       hl7_raw_message,
                       hl7_msh,
                       hl7_visit,
                       hl7_dx,
                       hl7_obr,
                       hl7_obx,
                       hl7_nte,
                       hl7_spm TO %(user)s;
                       COMMIT;""" % {
            'delete': ", DELETE" if enable_delete else '',
            'user': user
        })
        # Sequences also require UPDATE
        engine.execute("""BEGIN; GRANT SELECT, UPDATE ON
                       hl7_dx_hl7_dx_id_seq,
                       hl7_msh_hl7_msh_id_seq,
                       hl7_obr_hl7_obr_id_seq,
                       hl7_obx_hl7_obx_id_seq,
                       hl7_nte_hl7_nte_id_seq,
                       hl7_spm_hl7_spm_id_seq,
                       hl7_raw_message_hl7_raw_message_id_seq,
                       hl7_visit_hl7_visit_id_seq TO %(user)s; COMMIT;""" %
                       {'user': user})

    # Bless the mirth user with the minimal set of privileges
    # Mirth only SELECTs and INSERTs at this time

    bless_user(Config().get('warehouse', 'database_user'))
Example #47
0
 def test_default(self):
     "Asking for missing value with a default"
     c = Config(self.config_files)
     self.assertEquals(42, c.get('Lifes', 'Answer', 42))
class MirthInteraction(object):
    """Abstraction to interact with Mirth Connect for testing"""
    WAIT_INTERVAL = 15
    TIMEOUT = 300

    def __init__(self):
        self.config = Config()

        # obtain list of files to process
        path = os.path.abspath(
            os.path.join(os.path.dirname(__file__),
                         "../../../test_hl7_batchfiles"))
        self.filenames = [os.path.join(path, file) for file in 
                          os.listdir(path)]


    def prepare_filesystem(self):
        # create clean database (includes non produciton sanity check)
        setup_module()

        # wipe previous run(s) files
        mirth_user = self.config.get('mirth', 'mirth_system_user')
        for dir in ('input_dir', 'output_dir', 'error_dir'):
            wipe_dir_contents(self.config.get('warehouse', dir),
                              mirth_user)


    def process_batchfiles(self):
        """Feed the testfiles to mirth - block till done"""
        self.prepare_filesystem()
        require_mirth()
        for batchfile in self.filenames:
            copy_file_to_dir(batchfile, 
                             self.config.get('warehouse', 'input_dir'),
                             self.config.get('mirth', 'mirth_system_user'))

        # wait for all files to appear in error our output dirs
        # providing occasional output and raising if we appear hung
        last_count = 0
        last_time = time.time()
        output_dir = self.config.get('warehouse', 'output_dir')
        error_dir = self.config.get('warehouse', 'error_dir')
        while last_count < len(self.filenames):
            time.sleep(self.WAIT_INTERVAL)
            count = len(os.listdir(output_dir)) + len(os.listdir(error_dir))
            if count > last_count:
                last_count = count
                last_time = time.time()
            if time.time() - self.TIMEOUT > last_time:
                raise RuntimeError("TIMEOUT exceeded waiting on Mirth")
            print "Waiting on mirth to process files...(%d of %d)" %\
                (last_count, len(self.filenames))

    def persist_database(self):
        """Write the database contents to disk"""
        fsp = FilesystemPersistence(\
            database=self.config.get('warehouse', 'database'),
            user=self.config.get('warehouse', 'database_user'),
            password=self.config.get('warehouse', 'database_password'))
        fsp.persist()

    def restore_database(self):
        """Pull previously persisted data into database"""
        fsp = FilesystemPersistence(\
            database=self.config.get('warehouse', 'database'),
            user=self.config.get('warehouse', 'database_user'),
            password=self.config.get('warehouse', 'database_password'))
        fsp.restore()
Example #49
0
class MirthInteraction(object):
    """Abstraction to interact with Mirth Connect for testing"""
    WAIT_INTERVAL = 15
    TIMEOUT = 300

    def __init__(self):
        self.config = Config()

        # obtain list of files to process
        path = os.path.abspath(
            os.path.join(os.path.dirname(__file__),
                         "../../../test_hl7_batchfiles"))
        self.filenames = [
            os.path.join(path, file) for file in os.listdir(path)
        ]

    def prepare_filesystem(self):
        # create clean database (includes non produciton sanity check)
        setup_module()

        # wipe previous run(s) files
        mirth_user = self.config.get('mirth', 'mirth_system_user')
        for dir in ('input_dir', 'output_dir', 'error_dir'):
            wipe_dir_contents(self.config.get('warehouse', dir), mirth_user)

    def process_batchfiles(self):
        """Feed the testfiles to mirth - block till done"""
        self.prepare_filesystem()
        require_mirth()
        for batchfile in self.filenames:
            copy_file_to_dir(batchfile,
                             self.config.get('warehouse', 'input_dir'),
                             self.config.get('mirth', 'mirth_system_user'))

        # wait for all files to appear in error our output dirs
        # providing occasional output and raising if we appear hung
        last_count = 0
        last_time = time.time()
        output_dir = self.config.get('warehouse', 'output_dir')
        error_dir = self.config.get('warehouse', 'error_dir')
        while last_count < len(self.filenames):
            time.sleep(self.WAIT_INTERVAL)
            count = len(os.listdir(output_dir)) + len(os.listdir(error_dir))
            if count > last_count:
                last_count = count
                last_time = time.time()
            if time.time() - self.TIMEOUT > last_time:
                raise RuntimeError("TIMEOUT exceeded waiting on Mirth")
            print "Waiting on mirth to process files...(%d of %d)" %\
                (last_count, len(self.filenames))

    def persist_database(self):
        """Write the database contents to disk"""
        fsp = FilesystemPersistence(\
            database=self.config.get('warehouse', 'database'),
            user=self.config.get('warehouse', 'database_user'),
            password=self.config.get('warehouse', 'database_password'))
        fsp.persist()

    def restore_database(self):
        """Pull previously persisted data into database"""
        fsp = FilesystemPersistence(\
            database=self.config.get('warehouse', 'database'),
            user=self.config.get('warehouse', 'database_user'),
            password=self.config.get('warehouse', 'database_password'))
        fsp.restore()
Example #50
0
 def _set_report_type(self, report_type, patient_class=None):
     if report_type == 'essence' and patient_class:
         report_type += '_pc' + patient_class
     config = Config()
     self._outbound_dir = config.get('phinms', report_type)
Example #51
0
 def test_default(self):
     "Asking for missing value with a default"
     c = Config(self.config_files)
     self.assertEquals(42, c.get('Lifes', 'Answer',  42))
def transform_channels():
    """Apply default transform to PHEME channels"""

    doc = """
    Mirth channels can be easily exported in XML format.  This utility
    provides a mechanims to alter an export for subsequent import.
    Useful for altering details such as database name and user
    authentication.

    NB - values defined in the project configuration file will be used
    unless provided as optional arguments.  See
    `pheme.util.config.Config`
    """
    config = Config()
    ap = argparse.ArgumentParser(description=doc)
    ap.add_argument("-d",
                    "--database",
                    dest="db",
                    default=config.get('warehouse', 'database'),
                    help="name of database (overrides "
                    "[warehouse]database)")
    ap.add_argument("-u",
                    "--user",
                    dest="user",
                    default=config.get('warehouse', 'database_user'),
                    help="database user (overrides "
                    "[warehouse]database_user)")
    ap.add_argument("-p",
                    "--password",
                    dest="password",
                    default=config.get('warehouse', 'database_password'),
                    help="database password (overrides [warehouse]"
                    "database_password)")
    ap.add_argument("--input_dir",
                    dest="input_dir",
                    default=config.get('warehouse', 'input_dir'),
                    help="filesystem directory for channel to poll "
                    "(overrides [warehouse]input_dir)")
    ap.add_argument("--output_dir",
                    dest="output_dir",
                    default=config.get('warehouse', 'output_dir'),
                    help="filesystem directory for channel output "
                    "(overrides [warehouse]output_dir)")
    ap.add_argument("--error_dir",
                    dest="error_dir",
                    default=config.get('warehouse', 'error_dir'),
                    help="filesystem directory for channel errors "
                    "(overrides [warehouse]error_dir)")
    ap.add_argument("source_directory",
                    help="directory containing source channel "
                    "definition files")
    ap.add_argument("target_directory",
                    help="directory to write transformed channel "
                    "definition files")
    args = ap.parse_args()
    source_dir = os.path.realpath(args.source_directory)
    target_dir = os.path.realpath(args.target_directory)

    transformer = TransformManager(src=None,
                                   target_dir=target_dir,
                                   options=args)
    for c in CHANNELS:
        transformer.src = os.path.join(source_dir, '%s.xml' % c)
        transformer()
    # no transformation on codetemplates at this time - but the
    # importer expects the codetemplates.xml file to be in the same
    # directory, so copy it over.
    shutil.copy(os.path.join(source_dir, 'codetemplates.xml'), target_dir)
class MirthShell(object):
    """Sets up and executes common tasks via the mirth shell

    We have a few common patterns, such as deploying test and
    production versions of the mirth channels.  This manages pulling
    in configuration details and setting up the necessary paths to
    make it scriptable.

    """
    def __init__(self):
        self.config = Config()
        self.mirth_home = self.config.get('mirth', 'mirth_home')
        self.mirth_system_user = self.config.get('mirth', 'mirth_system_user')

    def write_script(
        self,
        script_file,
        imports=[],
        exports=[],
    ):
        """Generate script for mirth shell to run

        :param script_file: An open file handle to a writable file
        :param imports: The mirth channel(s) XML export to import
        :param exports: The list of (channel name, output path) to
          export.

        Writes the necessary mirth shell instructions to the
        script_file so that the file can be executed via the mirth
        shell.  NB - this takes the liberty of relaxing the 0600 mode
        settings hardcoded in NamedTemporaryFile so mirth shell will
        have read access if running as a different user.

        Codetemplates are also imported or exported (if imports or
        exports are defined) into the same directory as the first named
        import / export in a file named "codetemplates.xml".

        """
        for channel in imports:
            assert (os.path.exists(channel))
            script_file.write("import %s force\n" % channel)
        for channel, output in exports:
            script_file.write("export %s %s\n" % (channel, output))
        if imports:
            codetemplates = os.path.join(os.path.dirname(imports[0]),
                                         "codetemplates.xml")
            script_file.write("importcodetemplates %s\n" % codetemplates)
            script_file.write("deploy\n")
            script_file.write("status\n")
        if exports:
            codetemplates = os.path.join(os.path.dirname(exports[0][1]),
                                         "codetemplates.xml")
            script_file.write("exportcodetemplates %s\n" % codetemplates)
        script_file.flush()
        script_file.seek(0)
        os.chmod(script_file.name, 0644)

    def execute_script(self, script):
        "Execute the given script via the mirth shell"
        try:
            orig_dir = os.getcwd()
            os.chdir(self.mirth_home)
            sudo = ''
            if getpass.getuser() != self.mirth_system_user:
                sudo = 'sudo -H -u %s' % self.mirth_system_user
            command = "%s ./mccommand -s %s" %\
                (sudo, script.name)
            print command
            os.system(command)
        except:
            raise
        finally:
            os.chdir(orig_dir)
Example #54
0
 def test_absent(self):
     "no config files present shouldn't raise"
     c = Config(self.config_files)
     self.assertTrue(c)
 def __init__(self):
     self.config = Config()
     self.mirth_home = self.config.get('mirth', 'mirth_home')
     self.mirth_system_user = self.config.get('mirth', 'mirth_system_user')
Example #56
0
class GenerateReport(object):
    """ Process options and generate the requested report.  Optionally
    persists the file to the filesystem, and uploads to the DOH sftp
    server.

    """
    __version__ = '0.2'
    config = Config()
    IGNORE_SITE = config.get('longitudinal', 'ignore_site', default='')

    # Order matters, create a tuple of paired values (reportColumn,
    # essenceColumn) - NB, the Diagnosis column is being bastardized.
    # Previously there was an SQL function to do the subselect, but it
    # ran way too slow.  Now contains the foreign key to join w/ the
    # diagnosis for the respective visit.

    diagnosis_column_index = 7
    patient_class_column_index = 11
    columns = (
        ('Hosp', 'hospital'),
        ('Reg Date', 'visit_date'),
        ('Time', 'visit_time'),
        ('Sex', 'gender'),
        ('Age', 'age'),
        ('Reason For Visit', 'chief_complaint'),
        ('Zip Code', 'zip'),
        ('Diagnosis', 'visit_pk'),
        ('Admit Status', 'gipse_disposition'),
        ('Medical Record No.', 'patient_id'),
        ('Visit Record No.', 'visit_id'),
        ('Service Area', 'patient_class'),
    )
    assert (columns[diagnosis_column_index][1] == 'visit_pk')
    assert (columns[patient_class_column_index][1] == 'patient_class')

    def __init__(self,
                 user=None,
                 password=None,
                 report_criteria=None,
                 datefile=None):
        """Initialize report generation.

        :param user: database user
        :param password: database password
        :param report_criteria: ReportCriteria defining specifics
        :param datefile: useful for persistent walks through time

        """
        self.user = user
        self.password = password
        self.criteria = report_criteria
        self.database = self.criteria.database
        if datefile:
            assert ((self.criteria.start_date,
                     self.criteria.end_date) == datefile.get_date_range())
            self.datePersistence = datefile

        self._diags = {}
        self._prepare_output_file()
        self._prepare_columns()
        self._set_transport()

    def _prepare_columns(self):
        # Don't include the patient_class column if splitting out by
        # patient_class
        if self.criteria.patient_class:
            len_b4 = len(self.columns)
            self.columns =\
                self.columns[:self.patient_class_column_index] \
                + self.columns[self.patient_class_column_index + 1:]
            assert (len(self.columns) + 1 == len_b4)

    def _set_transport(self):
        """Plug in the appropriate transport mechanism"""
        # Transport strategies differ for the different reports
        if self.criteria.reportable_region:
            self._transport = Distribute_client(zip_first=True)
        else:
            self._transport = PHINMS_client(zip_first=True)

    def _generate_output_filename(self, start_date=None, end_date=None):
        start_date = self.criteria.start_date if start_date is None\
            else start_date
        end_date = self.criteria.end_date if end_date is None else end_date

        datestr = end_date.strftime('%Y%m%d')
        if start_date != end_date:
            datestr = '-'.join(
                (start_date.strftime('%Y%m%d'), end_date.strftime('%Y%m%d')))

        filename = self.criteria.report_method + '-' + datestr + '.txt'

        config = Config()
        tmp_dir = config.get('general', 'tmp_dir', default='/tmp')

        filepath = os.path.join(tmp_dir, filename)
        return filepath

    def _prepare_output_file(self):
        """Open the local filesystem file for output"""
        filepath = self.\
            _generate_output_filename(start_date=self.criteria.start_date,
                                      end_date=self.criteria.end_date)

        # watch for oversight errors; notify if like report exists -
        # unless it's size zero (from a previous failed run)
        if os.path.exists(filepath) and os.path.getsize(filepath):
            logging.warning("Found requested report file already "\
                             "exists - overwriting: '%s'"\
                             % filepath)

        self.output = open(filepath, 'w')
        self._output_filename = self.output.name

    @property
    def output_filename(self):
        if not hasattr(self, '_output_filename'):
            raise RuntimeError("prerequisite call to "\
                               "_prepare_output_file() "\
                "didn't happen!")
        return self._output_filename

    def _header(self):
        if self.criteria.include_vitals:
            columns = [c[0] for c in self.columns]
            columns += ('Measured Temperature', 'O2 Saturation',
                        'Self-Reported Influenza Vaccine',
                        'Self-Reported H1N1 Vaccine')
            return '|'.join(columns)

        else:
            return '|'.join([c[0] for c in self.columns])

    def _build_join_tables(self):
        """ Scope continues to grow, build all join tables necessary
        for the query.  Some are only necessary with certain features
        on.
        """
        # Always need the list of reportable visits
        self._build_visit_join_table()

        if self.criteria.include_vitals:
            self._build_vitals_join_table()

    def _build_visit_join_table(self):
        """ Helper in selection of visits for the report - this method
        builds a temporary table and populates it with the visit_pks
        that belong in the report.  This should include all visit_pks
        with the matching admit_datetime as well as any that have
        received updates since the last like report was produced.

        """
        # If include_vitals is on, we also need the visit_id to keep
        # the joins managable.  vitals don't have a patient class, so
        # you can't join on the same values.

        sql = "CREATE TEMPORARY TABLE reportable_pks (pk "\
            "integer not null unique)"
        selectCols = "fact_visit.pk"

        self._getConn()
        self.access.raw_query(sql)

        # If we're only selecting those facilites in a region, the SQL
        # is more complicated - build up the respective clauses.
        joinClause = regionClause = ""
        if self.criteria.reportable_region:
            joinClause = "JOIN internal_reportable_region ON "\
                "internal_reportable_region.dim_facility_pk = "\
                "fact_visit.dim_facility_pk"
            regionClause = "AND region_name = '%s'" %\
                self.criteria.reportable_region

        # Another HACK!  One site is not even wanted by the state DOH,
        # as it's being duplicated from another source, and ESSENCE
        # can't help but count them twice.  Remove this one site
        # regardless
        else:
            joinClause = "JOIN internal_reportable_region ON "\
                "internal_reportable_region.dim_facility_pk = "\
                "fact_visit.dim_facility_pk"
            regionClause = "AND region_name = '%s'" % self.IGNORE_SITE

        # Limit by patient_class if requested.  Note we may still end
        # up with visit ids that have changed patient classes, so more
        # pruning later is necessary.
        pc_limit = ""
        if self.criteria.patient_class:
            pc_limit = "AND patient_class = '%c'" %\
                self.criteria.patient_class

        # Start with all visits for the requested date range
        sql = "INSERT INTO reportable_pks SELECT %s FROM "\
              "fact_visit %s WHERE admit_datetime BETWEEN '%s' AND "\
              "'%s' %s %s" %\
              (selectCols, joinClause, self.criteria.start_date,
               self.criteria.end_date + timedelta(days=1),
               pc_limit, regionClause)

        self.access.raw_query(sql)

        if self.criteria.include_updates:
            # In this case, add all visits with updates since the
            # last run, but no newer than the requested date (in case
            # we're building reports forward from historical data)
            sql = "SELECT max(processed_datetime) FROM internal_report "\
                  "WHERE report_method = '%s'" % self.criteria.report_method

            cursor = self.access.raw_query(sql)
            last_report_generated = cursor.fetchall()[0][0]
            if last_report_generated is None:
                last_report_generated = '2009-01-01'  # our epoch
            logging.debug("including updates, last_report_generated: "\
                              "%s", last_report_generated)
            sql = "INSERT INTO reportable_pks SELECT %(sel_cols)s FROM "\
                  "fact_visit %(join_clause)s LEFT JOIN reportable_pks ON "\
                  "reportable_pks.pk = fact_visit.pk WHERE "\
                  "last_updated > '%(last_report)s' AND admit_datetime "\
                  "< '%(date)s' AND reportable_pks.pk IS NULL "\
                  "%(pc_limit)s %(region_clause)s" %\
                  {'sel_cols': selectCols,
                   'last_report': last_report_generated,
                   'date': self.criteria.end_date + timedelta(days=1),
                   'pc_limit': pc_limit,
                   'join_clause': joinClause,
                   'region_clause': regionClause}
            self.access.raw_query(sql)

        cursor = self.access.raw_query("SELECT COUNT(*) FROM "\
                                           "reportable_pks")
        logging.debug("%d visits to report on", cursor.fetchall()[0][0])

    def _build_vitals_join_table(self):
        """When report is to include vitals - we use an additional
        temporary table (visit_loinc_data) to hold the data for more
        timely queries.

        Like the rest of the report, the list of interesting visits is
        limited to the rows in the reportable_pks - see
        _build_join_table() for details.

        """
        raise ValueError('not ported yet')
        sql = """
          CREATE TEMPORARY TABLE visit_loinc_data (
            visit_id VARCHAR(255) not null,
            patient_class CHAR(1) default null,
            observation_id VARCHAR(255) not null,
            observation_result VARCHAR(255) not null)
          """
        self._getConn()
        self.access.raw_query(sql)

        sql = """
          INSERT INTO visit_loinc_data (visit_id, patient_class,
          observation_id, observation_result) SELECT visit.visit_id,
          visit.patient_class, observation_id,
          observation_result FROM visit JOIN hl7_visit ON
          visit.visit_id = hl7_visit.visit_id JOIN hl7_obx ON
          hl7_visit.hl7_msh_id = hl7_obx.hl7_msh_id JOIN
          reportable_pks ON reportable_pks.visit_id = visit.visit_id
          AND reportable_pks.patient_class = visit.patient_class
          WHERE
          observation_id in ('8310-5', '20564-1', '46077-4',
          '29544-4')
          """
        self.access.raw_query(sql)

    def _select_from_essence_view(self):
        """Build up the SQL select statement to be used in gathering
        the data for this report.

        """
        stmt = """SELECT %s FROM essence e JOIN reportable_pks ri
        ON e.visit_pk = ri.pk""" %\
            (','.join(['e.' + c[1] for c in self.columns]))
        return stmt

    def _select_diagnosis(self):
        """ Need to pull in all the diagnosis data for this report.
        This is saved in an instance dictionary for use in
        self._diagnosis to generate the list of diagnoses for each
        respective visit.

        A list of unique diagnoses ordered by rank is required.
        """
        # We order descending on dx_datetime as the most recent should
        # be best.  Add any others as the persistence mechanism only
        # saves a unique icd9 dx that has changed status.
        stmt = "SELECT fact_visit_pk, rank, icd9 "\
               "FROM assoc_visit_dx JOIN "\
               "dim_dx ON dim_dx_pk = dim_dx.pk JOIN "\
               "reportable_pks ON "\
               "assoc_visit_dx.fact_visit_pk = reportable_pks.pk "\
               "ORDER BY dx_datetime DESC"
        cursor = self.access.raw_query(stmt)
        for row in cursor.fetchall():
            visit_pk = row[0]
            if visit_pk in self._diags:
                self._diags[visit_pk].add(row[0], row[1], row[2])
            else:
                self._diags[visit_pk] = \
                    SortedDiagnosis(row[0], row[1], row[2])

    def _diagnosis(self, visit_pk):
        if visit_pk in self._diags:
            return [
                self._diags[visit_pk].__repr__(),
            ]
        else:
            return [
                '',
            ]

    def _select_vitals(self):
        """ Need to pull in all the vitals data for this report.
        This is saved in an instance dictionary for use in
        self._vitals_for_visit to generate the list of vitals for each
        respective visit.

        This is an effective NOP when self.criteria.include_vitals = False

        """
        if not self.criteria.include_vitals:
            return None

        self._vitals = {}
        stmt = """SELECT reportable_pks.visit_pk,
          observation_id, observation_result
          FROM visit_loinc_data JOIN reportable_pks ON
          reportable_pks.visit_id = visit_loinc_data.visit_id"""

        cursor = self.access.raw_query(stmt)
        for row in cursor.fetchall():
            visit_pk = row[0]
            if visit_pk in self._vitals:
                self._vitals[visit_pk].add(row[1], row[2])
            else:
                self._vitals[visit_pk] = \
                    Vitals(row[1], row[2])

    def _vitals_for_visit(self, visit_pk):
        """Returns the list of vitals for the visit in question.

        This is an effective NOP when self.criteria.include_vitals = False

        """
        if not self.criteria.include_vitals:
            return []

        if visit_pk in self._vitals:
            return self._vitals[visit_pk].__repr__()
        else:
            return Vitals().__repr__()

    def _write_report(self, save_report=False):
        """ Write out and potentially store the results.

        Generate results via database queries and write the results to
        self.output.

        :param save_report: If set, persist the document and related
          metadata to the mbds archive.

        returns the document ID, the mbds archive key, if saved

        """
        out = self.output
        print >> out, self._header()
        self._build_join_tables()
        self._select_diagnosis()
        self._select_vitals()
        cursor = self.access.raw_query(self._select_from_essence_view())
        for row in cursor.fetchall():
            # Each row is the colums up to the diagnosis + the
            # comma separated diagnosis + the rest of the columns
            # and finally with vitals if configured for such
            visit_pk = row[self.diagnosis_column_index]  # yuck, but true
            print >> out,\
                '|'.join([strSansNone(column) for column in
                          row[:self.diagnosis_column_index]] +
                         self._diagnosis(visit_pk) +
                         [strSansNone(column) for column in
                          row[self.diagnosis_column_index + 1:]] +
                         self._vitals_for_visit(visit_pk))

        # Close the file and persist to the document archive if
        # requested
        self.output.close()
        if save_report:
            metadata = {
                k: v
                for k, v in self.criteria._crit.items() if v is not None
            }

            # At this point, all documents are of 'essence' type
            return document_store(document=self.output.name,
                                  allow_duplicate_filename=True,
                                  document_type='essence',
                                  **metadata)

    def _record_report(self, report_oid):
        """Record the details from this report generation in the db"""
        if not report_oid:
            return
        report = Report(processed_datetime=datetime.now(),
                        file_path=report_oid,
                        report_method=self.criteria.report_method)

        alchemy = AlchemyAccess(database=self.database)
        alchemy.session.add(report)
        alchemy.session.commit()
        alchemy.disconnect()

    def _transmit_report(self, report):
        """Transmit report using self._transport()"""
        logging.info("initiate upload of %s", report)
        self._transport.transfer_file(report)

    def _transmit_differences(self, report):
        """Compute differences from yesterday's like report; transport"""

        # This option really only makes sense on date range reports,
        # as updates hit older data than just 'yesterday'.
        if self.criteria.start_date == self.criteria.end_date:
            raise ValueError("difference calculation not supported on "\
                             "single day reports")
        # See if we can find a similar report in the archive from
        # yesterday
        search_criteria = {
            'report_method': self.criteria.report_method,
            'start_date': self.criteria.start_date - timedelta(days=1),
            'end_date': self.criteria.end_date - timedelta(days=1)
        }
        old_doc = document_find(search_criteria, limit=1)
        if old_doc is None:
            logging.info("No comparable report found for difference "\
                         "generation")
            self._transmit_report(report)
        else:
            target_filename = self.\
                _generate_output_filename(start_date=self.criteria.start_date,
                                          end_date=self.criteria.end_date)
            # RemoveDuplicates not yet ported!!
            raise ValueError("RemoveDuplicates not ported")
            #from pheme.essence.remove_duplicates import RemoveDuplicates
            #rd = RemoveDuplicates(new_report=report,
            #                      old_report=old_doc,
            #                      out=target_filename)
            #rd.generate_report()
            #logging.info("initiate upload of difference %s", target_filename)
            #self._transport.transfer_file(target_filename)

    def _getConn(self):
        """ Local wrapper to get database connection
        """
        if hasattr(self, 'access'):
            return
        self.access = DirectAccess(database=self.database,
                                   user=self.user,
                                   password=self.password)

    def _closeConn(self):
        """ Local wrapper to close database connection
        """
        if hasattr(self, 'access'):
            self.access.close()

    def tearDown(self):
        "Public interface to clean up internals"
        self._closeConn()

    def execute(self,
                save_report=False,
                transmit_report=False,
                transmit_differences=False):
        """Execute the report generation
        """
        logging.info("Initiate ESSENCE report generation [%s-%s] for %s",
                     self.criteria.start_date, self.criteria.end_date,
                     self.criteria.report_method)

        self._getConn()
        report_oid = self._write_report(save_report)
        self._record_report(report_oid)
        if transmit_report:
            self._transmit_report(report_oid)
        if transmit_differences:
            self._transmit_differences(report_oid)
        self._closeConn()
        if hasattr(self, 'datePersistence'):
            self.datePersistence.bump_date()

        logging.info("Completed ESSENCE report generation [%s-%s] for %s",
                     self.criteria.start_date, self.criteria.end_date,
                     self.criteria.report_method)
Example #57
0
def create_tables(user=None, password=None, database=None,
                  enable_delete=False):
    """Create the longitudinal database tables.

    NB - the config [longitudinal]database_user is granted SELECT
    INSERT and UPDATE permissions (plus DELETE if enable_delete is set).

    :param user: database user with table creation grants
    :param password: the database password
    :param database: the database name to populate
    :param enable_delete: testing hook, override for testing needs

    """
    engine = create_engine("postgresql://%s:%s@localhost/%s" %
                           (user, password, database))

    # Can't find a way to code the cascade to a view
    # drop manually as the createTables will otherwise fail.
    try:
        engine.execute(text("DROP VIEW IF EXISTS essence"))
    except OperationalError:  # pragma: no cover
        # Assume it's a new database.  Otherwise subsequent failures
        # will catch any real trouble.
        pass

    metadata.drop_all(bind=engine)
    metadata.create_all(bind=engine)

    def bless_user(user):
        engine.execute("""BEGIN; GRANT SELECT, INSERT, UPDATE %(delete)s ON
                       assoc_visit_dx,
                       assoc_visit_lab,
                       dim_admission_o2sat,
                       dim_admission_source,
                       dim_admission_temp,
                       dim_assigned_location,
                       dim_cc,
                       dim_disposition,
                       dim_dx,
                       dim_facility,
                       dim_flu_vaccine,
                       dim_h1n1_vaccine,
                       dim_lab_flag,
                       dim_lab_result,
                       dim_location,
                       dim_note,
                       dim_order_number,
                       dim_performing_lab,
                       dim_pregnancy,
                       dim_race,
                       dim_ref_range,
                       dim_service_area,
                       dim_specimen_source,
                       fact_visit,
                       internal_export_delta,
                       internal_message_processed,
                       internal_report,
                       internal_reportable_region
                       TO %(user)s; COMMIT;""" %
                       {'delete': ", DELETE" if enable_delete else '',
                        'user': user})

        # Sequences also require UPDATE
        engine.execute("""BEGIN; GRANT SELECT, UPDATE ON
                       dim_admission_o2sat_pk_seq,
                       dim_admission_temp_pk_seq,
                       dim_assigned_location_pk_seq,
                       dim_cc_pk_seq,
                       dim_dx_pk_seq,
                       dim_flu_vaccine_pk_seq,
                       dim_h1n1_vaccine_pk_seq,
                       dim_lab_flag_pk_seq,
                       dim_lab_result_pk_seq,
                       dim_location_pk_seq,
                       dim_note_pk_seq,
                       dim_order_number_pk_seq,
                       dim_performing_lab_pk_seq,
                       dim_pregnancy_pk_seq,
                       dim_race_pk_seq,
                       dim_ref_range_pk_seq,
                       dim_service_area_pk_seq,
                       dim_specimen_source_pk_seq,
                       fact_visit_pk_seq,
                       internal_export_delta_pk_seq,
                       internal_report_pk_seq
                       TO %(user)s; COMMIT;""" % {'user': user})

    # Provide configured user necessary privileges
    bless_user(Config().get('longitudinal', 'database_user'))

    # Add any views
    create_essence_view(engine)
    def process_args(self):
        """Process any optional arguments and possitional parameters

        Using the values provided, assemble ReportCriteria and
        Datefile instances to control report generation.

        """
        parser = OptionParser(usage=usage)
        # Provide the ReportCriteria instance an error callback so any
        # command line errors provoke the standard graceful exit with
        # warning text.
        self.criteria.error_callback = parser.error

        parser.add_option("-u", "--user", dest="user",
                          default=self.user, help="database user")
        parser.add_option("-p", "--password", dest="password",
                          default=self.password,
                          help="database password, or file containing "\
                              "just the password")
        parser.add_option("-c", "--countdown", dest="countdown",
                          default=None,
                          help="count {down,up} the start and end dates "\
                              "set to 'forwards' or 'backwards' "\
                              "if desired")
        parser.add_option("-i", "--include-updates",
                          action='store_true', dest="includeUpdates",
                          default=False, help="include "\
                              "visits updated since last similar report")
        parser.add_option("--include-vitals",
                          action='store_true', dest="includeVitals",
                          default=False, help="include "\
                              "vitals (measured temperature, O2 "\
                              "saturation, influenza and H1N1 vaccine "\
                              "data) as additional columns in the "\
                              "report")
        parser.add_option("-k", "--patient-class",
                          dest="patient_class",
                          default=None, help="use "\
                          "to filter report on a specific patient "\
                          "class [E,I,O]")
        parser.add_option("-r", "--region", dest="region",
                          default=None,
                          help="reportable region defining limited set "\
                              "of facilities to include, by default "\
                              "all  facilities are included")
        parser.add_option("-s", "--save-and-upload",
                          action='store_true', dest="save_upload",
                          default=False, help="save file and upload to "\
                              "DOH")
        parser.add_option("-x", "--save-without-upload",
                          action='store_true', dest="save_only",
                          default=False, help="save file but don't upload")
        parser.add_option("-d", "--upload-diff",
                          action='store_true', dest="upload_diff",
                          default=False, help="upload differences only "\
                              "(from yesterdays like report) to DOH")
        parser.add_option("-t", "--thirty-days",
                          action='store_true', dest="thirty_days",
                          default=False, help="include 30 days up to "\
                              "requested date ")
        parser.add_option("-v", "--verbose", dest="verbosity",
                          action="count", default=self.verbosity,
                          help="increase output verbosity")

        (options, args) = parser.parse_args()
        if len(args) != 2:
            parser.error("incorrect number of arguments")

        # Database to query
        self.criteria.database = args[0]
        self.user = options.user
        self.password = options.password
        self.criteria.credentials(user=self.user,
                                  password=self.password)

        # Potential region restriction
        self.criteria.reportable_region = options.region

        # Potential patient class restriction
        self.criteria.patient_class = options.patient_class

        # Potential to include vitals (not tied to gipse format)
        self.criteria.include_vitals = options.includeVitals

        # Potential inclusion of updates
        self.criteria.include_updates = options.includeUpdates

        # Report date(s) and potential step direction.
        # NB - several options affect report_method and must be set
        # first!

        initial_date = parseDate(args[1])
        config = Config()
        ps_file = os.path.join(config.get('general', 'tmp_dir',
                                default='/tmp'),
                                self.criteria.report_method)
        step = options.thirty_days and 30 or None
        direction = options.countdown
        self.datefile = Datefile(initial_date=initial_date,
                                 persistence_file=ps_file,
                                 direction=direction,
                                 step=step)
        self.criteria.start_date, self.criteria.end_date =\
            self.datefile.get_date_range()

        # What to do once report is completed.  Complicated, protect
        # user from themselves!
        self.save_report = options.save_upload or \
            options.save_only or options.upload_diff
        self.transmit_report = options.save_upload
        self.transmit_differences = options.upload_diff

        if options.save_only and options.save_upload:
            parser.error("save-without-upload and save-and-upload "\
                         "are mutually exclusive")
        if options.save_only and options.upload_diff:
            parser.error("save-without-upload and upload-diff "\
                         "are mutually exclusive")
        if options.upload_diff and options.save_upload:
            parser.error("upload-diff and save-and-upload"\
                         "are mutually exclusive")

        # Can't transmit w/o saving
        if options.save_upload or options.upload_diff:
            assert(self.save_report)
        # Sanity check
        if options.save_only:
            assert(self.save_report and not self.transmit_report and
                   not self.transmit_differences)

        # How verbosely to log
        self.verbosity = options.verbosity
Example #59
0
    def process_args(self):
        """Process any optional arguments and possitional parameters

        Using the values provided, assemble ReportCriteria and
        Datefile instances to control report generation.

        """
        parser = OptionParser(usage=usage)
        # Provide the ReportCriteria instance an error callback so any
        # command line errors provoke the standard graceful exit with
        # warning text.
        self.criteria.error_callback = parser.error

        parser.add_option("-u",
                          "--user",
                          dest="user",
                          default=self.user,
                          help="database user")
        parser.add_option("-p", "--password", dest="password",
                          default=self.password,
                          help="database password, or file containing "\
                              "just the password")
        parser.add_option("-c", "--countdown", dest="countdown",
                          default=None,
                          help="count {down,up} the start and end dates "\
                              "set to 'forwards' or 'backwards' "\
                              "if desired")
        parser.add_option("-i", "--include-updates",
                          action='store_true', dest="includeUpdates",
                          default=False, help="include "\
                              "visits updated since last similar report")
        parser.add_option("--include-vitals",
                          action='store_true', dest="includeVitals",
                          default=False, help="include "\
                              "vitals (measured temperature, O2 "\
                              "saturation, influenza and H1N1 vaccine "\
                              "data) as additional columns in the "\
                              "report")
        parser.add_option("-k", "--patient-class",
                          dest="patient_class",
                          default=None, help="use "\
                          "to filter report on a specific patient "\
                          "class [E,I,O]")
        parser.add_option("-r", "--region", dest="region",
                          default=None,
                          help="reportable region defining limited set "\
                              "of facilities to include, by default "\
                              "all  facilities are included")
        parser.add_option("-s", "--save-and-upload",
                          action='store_true', dest="save_upload",
                          default=False, help="save file and upload to "\
                              "DOH")
        parser.add_option("-x",
                          "--save-without-upload",
                          action='store_true',
                          dest="save_only",
                          default=False,
                          help="save file but don't upload")
        parser.add_option("-d", "--upload-diff",
                          action='store_true', dest="upload_diff",
                          default=False, help="upload differences only "\
                              "(from yesterdays like report) to DOH")
        parser.add_option("-t", "--thirty-days",
                          action='store_true', dest="thirty_days",
                          default=False, help="include 30 days up to "\
                              "requested date ")
        parser.add_option("-v",
                          "--verbose",
                          dest="verbosity",
                          action="count",
                          default=self.verbosity,
                          help="increase output verbosity")

        (options, args) = parser.parse_args()
        if len(args) != 2:
            parser.error("incorrect number of arguments")

        # Database to query
        self.criteria.database = args[0]
        self.user = options.user
        self.password = options.password
        self.criteria.credentials(user=self.user, password=self.password)

        # Potential region restriction
        self.criteria.reportable_region = options.region

        # Potential patient class restriction
        self.criteria.patient_class = options.patient_class

        # Potential to include vitals (not tied to gipse format)
        self.criteria.include_vitals = options.includeVitals

        # Potential inclusion of updates
        self.criteria.include_updates = options.includeUpdates

        # Report date(s) and potential step direction.
        # NB - several options affect report_method and must be set
        # first!

        initial_date = parseDate(args[1])
        config = Config()
        ps_file = os.path.join(
            config.get('general', 'tmp_dir', default='/tmp'),
            self.criteria.report_method)
        step = options.thirty_days and 30 or None
        direction = options.countdown
        self.datefile = Datefile(initial_date=initial_date,
                                 persistence_file=ps_file,
                                 direction=direction,
                                 step=step)
        self.criteria.start_date, self.criteria.end_date =\
            self.datefile.get_date_range()

        # What to do once report is completed.  Complicated, protect
        # user from themselves!
        self.save_report = options.save_upload or \
            options.save_only or options.upload_diff
        self.transmit_report = options.save_upload
        self.transmit_differences = options.upload_diff

        if options.save_only and options.save_upload:
            parser.error("save-without-upload and save-and-upload "\
                         "are mutually exclusive")
        if options.save_only and options.upload_diff:
            parser.error("save-without-upload and upload-diff "\
                         "are mutually exclusive")
        if options.upload_diff and options.save_upload:
            parser.error("upload-diff and save-and-upload"\
                         "are mutually exclusive")

        # Can't transmit w/o saving
        if options.save_upload or options.upload_diff:
            assert (self.save_report)
        # Sanity check
        if options.save_only:
            assert (self.save_report and not self.transmit_report
                    and not self.transmit_differences)

        # How verbosely to log
        self.verbosity = options.verbosity