コード例 #1
0
ファイル: resources.py プロジェクト: pbugni/pheme.webAPI
    def transfer_file(self, compress_with=None):
        """Initiate transfer via HTTPS

        :param compress_with: if document isn't already compressed and
          this is set, compress the file before transfering.

        """
        filename = self.document['filename']

        config = Config()
        upload_url = config.get('distribute', 'upload_url')
        user = config.get('distribute', 'username')
        pw = config.get('distribute', 'password')
        payload = {'siteShortName': self.document['reportable_region']}

        content = self.extract_content(compress_with)
        files = {'userfile': content}

        if inProduction():
            logging.info("POST %s to %s" % (filename, upload_url))
            r = requests.post(upload_url, auth=(user, pw), files=files,
                              data=payload)
            # We only get redirected if successful!
            if r.status_code != 302:
                logging.error("failed distrbute POST")
                #Can't rely on status_code, as distribute returns 200
                #with problems in the body.
                logging.error("".join(list(r.iter_content())))
                r.raise_for_status()
            else:
                self.record_transfer()
        else:
            logging.warn("inProduction() check failed, not POSTing "
                         "file '%s' to '%s'", filename, upload_url)
コード例 #2
0
 def __init__(self):
     config = Config()
     self.db = config.get('phinms', 'database')
     self.username = config.get('phinms', 'user')
     self.passwd = config.get('phinms', 'password')
     self.workerqueue = config.get('phinms', 'workerqueue')
     self.feedertable = self.workerqueue + '_feeder'
コード例 #3
0
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    cfg_value = lambda v: c.get('warehouse', v)
    create_tables(cfg_value('create_table_user'),
                  cfg_value('create_table_password'),
                  cfg_value('database'),
                  enable_delete=True)
コード例 #4
0
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    cfg_value = lambda v: c.get('warehouse', v)
    create_tables(cfg_value('create_table_user'),
                  cfg_value('create_table_password'),
                  cfg_value('database'),
                  enable_delete=True)
コード例 #5
0
ファイル: pg_access.py プロジェクト: pbugni/pheme.util
def db_params(section):
    """Return dict of database connection values from named config section

    Returned dict includes:
    - user
    - password
    - database  (name of database)

    """
    config = Config()
    database = config.get(section, 'database')
    user = config.get(section, 'database_user')
    password = config.get(section, 'database_password')
    return {'user': user, 'password': password, 'database': database}
コード例 #6
0
ファイル: pg_access.py プロジェクト: pbugni/pheme.util
def db_params(section):
    """Return dict of database connection values from named config section

    Returned dict includes:
    - user
    - password
    - database  (name of database)

    """
    config = Config()
    database = config.get(section, 'database')
    user = config.get(section, 'database_user')
    password = config.get(section, 'database_password')
    return {'user': user, 'password': password, 'database': database}
コード例 #7
0
def main():  # pragma: no cover
    """Entry point to (re)create the table using config settings"""
    config = Config()
    database = config.get('longitudinal', 'database')
    print "destroy and recreate database %s ? "\
        "('destroy' to continue): " % database,
    answer = sys.stdin.readline().rstrip()
    if answer != 'destroy':
        print "aborting..."
        sys.exit(1)

    user = config.get('longitudinal', 'database_user')
    password = config.get('longitudinal', 'database_password')
    create_tables(user, password, database)
コード例 #8
0
ファイル: tables.py プロジェクト: pbugni/pheme.warehouse
def main():  # pragma: no cover
    """Entry point to (re)create the table using config settings"""
    config = Config()
    dbname = config.get('warehouse', 'database')
    print "destroy and recreate database %s ? "\
        "('destroy' to continue): " % dbname,
    answer = sys.stdin.readline().rstrip()
    if answer != 'destroy':
        print "aborting..."
        sys.exit(1)

    user = config.get('warehouse', 'create_table_user')
    print "password for PostgreSQL user:", user
    password = getpass.getpass()
    create_tables(user, password, dbname)
コード例 #9
0
ファイル: tables.py プロジェクト: pbugni/pheme.warehouse
def main():  # pragma: no cover
    """Entry point to (re)create the table using config settings"""
    config = Config()
    dbname = config.get('warehouse', 'database')
    print "destroy and recreate database %s ? "\
        "('destroy' to continue): " % dbname,
    answer = sys.stdin.readline().rstrip()
    if answer != 'destroy':
        print "aborting..."
        sys.exit(1)

    user = config.get('warehouse', 'create_table_user')
    print "password for PostgreSQL user:", user
    password = getpass.getpass()
    create_tables(user, password, dbname)
コード例 #10
0
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")
    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))

    # create a "test_region" and a couple bogus facilities
    f1 = Facility(county='KING',
                  npi=10987,
                  zip='12345',
                  organization_name='Reason Medical Center',
                  local_code='RMC')
    f2 = Facility(county='POND',
                  npi=65432,
                  zip='67890',
                  organization_name='No-Reason Medical Center',
                  local_code='NMC')
    conn = db_connection(CONFIG_SECTION)
    conn.session.add(f1)
    conn.session.add(f2)
    conn.session.commit()
    rr1 = ReportableRegion(region_name='test_region', dim_facility_pk=10987)
    conn.session.add(rr1)
    conn.session.commit()
    conn.disconnect()
コード例 #11
0
 def setUp(self):
     c = Config()
     cfg_value = lambda v: c.get('warehouse', v)
     self.alchemy = AlchemyAccess(database=cfg_value('database'),
                                  host='localhost',
                                  user=cfg_value('database_user'),
                                  password=cfg_value('database_password'))
     self.session = self.alchemy.session
コード例 #12
0
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))
コード例 #13
0
 def setUp(self):
     c = Config()
     cfg_value = lambda v: c.get('warehouse', v)
     self.alchemy = AlchemyAccess(database=cfg_value('database'),
                                  host='localhost',
                                  user=cfg_value('database_user'),
                                  password=cfg_value('database_password'))
     self.session = self.alchemy.session
コード例 #14
0
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))
コード例 #15
0
 def setUp(self):
     c = Config()
     cfg_value = lambda v: c.get("longitudinal", v)
     self.alchemy = AlchemyAccess(
         database=cfg_value("database"),
         host="localhost",
         user=cfg_value("database_user"),
         password=cfg_value("database_password"),
     )
     self.session = self.alchemy.session
     self.remove_after_test = []
コード例 #16
0
ファイル: upload.py プロジェクト: pbugni/pheme.phinms
    def __init__(self, verbosity=0, source_db=None):
        self.verbosity = verbosity
        self.source_db = source_db
        config = Config()
        self.phinms_receiving_dir = config.get('phinms', 'receiving_dir')
        self.phinms_archive_dir = config.get('phinms', 'archive_dir')

        self.source_dir = self.phinms_receiving_dir

        # Confirm the required directories are present
        if not os.path.isdir(self.phinms_receiving_dir):
            raise ValueError("Can't find required directory %s" %
                             self.phinms_receiving_dir)

        UPLOAD_PORT = config.get('pheme_http_receiver', 'port')
        UPLOAD_HOST = config.get('pheme_http_receiver', 'host')
        self.http_pool = HTTPConnectionPool(host=UPLOAD_HOST,
                                            port=UPLOAD_PORT,
                                            timeout=20)
        self._copy_tempdir = None
コード例 #17
0
ファイル: upload.py プロジェクト: pbugni/pheme.phinms
    def __init__(self, verbosity=0, source_db=None):
        self.verbosity = verbosity
        self.source_db = source_db
        config = Config()
        self.phinms_receiving_dir = config.get('phinms', 'receiving_dir')
        self.phinms_archive_dir = config.get('phinms', 'archive_dir')

        self.source_dir = self.phinms_receiving_dir

        # Confirm the required directories are present
        if not os.path.isdir(self.phinms_receiving_dir):
            raise ValueError("Can't find required directory %s" %
                             self.phinms_receiving_dir)

        UPLOAD_PORT = config.get('pheme_http_receiver', 'port')
        UPLOAD_HOST = config.get('pheme_http_receiver', 'host')
        self.http_pool = HTTPConnectionPool(host=UPLOAD_HOST,
                                            port=UPLOAD_PORT,
                                            timeout=20)
        self._copy_tempdir = None
コード例 #18
0
ファイル: util.py プロジェクト: pbugni/pheme.util
def inProduction():
    """Simple state check to avoid uploading files to thrid party
    servers and what not when not 'in production'.
    """
    config = Config()
    try:
        return config.get('general', 'in_production')
    except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):  # pragma: no cover
        raise ValueError("Config file doesn't specify "
                         "`[general]in_production` "
                         "unsafe to guess, can't continue")
コード例 #19
0
ファイル: test_config.py プロジェクト: pbugni/pheme.util
 def test_override(self):
     "same key in second file should override"
     section = 'SECTION'
     key = 'unittest'
     for i in range(2):
         cp = ConfigParser.RawConfigParser()
         cp.add_section(section)
         cp.set(section, key, i)
         with open(self.config_files[i], 'w') as f:
             cp.write(f)
     c = Config(self.config_files)
     self.assertEquals(1, c.get(section, key))
コード例 #20
0
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")
    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))

    # Load in all the static data in anonymized form
    static_data_file = open(os.path.join(os.path.dirname(
        os.path.abspath(__file__)), 'anon_static_db_data.yaml'), 'r')
    load_file(static_data_file)
コード例 #21
0
ファイル: util.py プロジェクト: pbugni/pheme.util
def inProduction():
    """Simple state check to avoid uploading files to thrid party
    servers and what not when not 'in production'.
    """
    config = Config()
    try:
        return config.get('general', 'in_production')
    except (ConfigParser.NoSectionError,
            ConfigParser.NoOptionError):  # pragma: no cover
        raise ValueError("Config file doesn't specify "
                         "`[general]in_production` "
                         "unsafe to guess, can't continue")
コード例 #22
0
 def test_override(self):
     "same key in second file should override"
     section = 'SECTION'
     key = 'unittest'
     for i in range(2):
         cp = ConfigParser.RawConfigParser()
         cp.add_section(section)
         cp.set(section, key, i)
         with open(self.config_files[i], 'w') as f:
             cp.write(f)
     c = Config(self.config_files)
     self.assertEquals(1, c.get(section, key))
コード例 #23
0
def transform_channels():
    """Apply default transform to PHEME channels"""

    doc = """
    Mirth channels can be easily exported in XML format.  This utility
    provides a mechanims to alter an export for subsequent import.
    Useful for altering details such as database name and user
    authentication.

    NB - values defined in the project configuration file will be used
    unless provided as optional arguments.  See
    `pheme.util.config.Config`
    """
    config = Config()
    ap = argparse.ArgumentParser(description=doc)
    ap.add_argument("-d", "--database", dest="db",
                    default=config.get('warehouse', 'database'),
                    help="name of database (overrides "
                    "[warehouse]database)")
    ap.add_argument("-u", "--user", dest="user",
                    default=config.get('warehouse', 'database_user'),
                    help="database user (overrides "
                    "[warehouse]database_user)")
    ap.add_argument("-p", "--password", dest="password",
                    default=config.get('warehouse', 'database_password'),
                    help="database password (overrides [warehouse]"
                    "database_password)")
    ap.add_argument("--input_dir", dest="input_dir",
                    default=config.get('warehouse', 'input_dir'),
                    help="filesystem directory for channel to poll "
                    "(overrides [warehouse]input_dir)")
    ap.add_argument("--output_dir", dest="output_dir",
                    default=config.get('warehouse', 'output_dir'),
                    help="filesystem directory for channel output "
                    "(overrides [warehouse]output_dir)")
    ap.add_argument("--error_dir", dest="error_dir",
                    default=config.get('warehouse', 'error_dir'),
                    help="filesystem directory for channel errors "
                    "(overrides [warehouse]error_dir)")
    ap.add_argument("source_directory",
                    help="directory containing source channel "
                    "definition files")
    ap.add_argument("target_directory",
                    help="directory to write transformed channel "
                    "definition files")
    args = ap.parse_args()
    source_dir = os.path.realpath(args.source_directory)
    target_dir = os.path.realpath(args.target_directory)

    transformer = TransformManager(src=None,
                                   target_dir=target_dir,
                                   options=args)
    for c in CHANNELS:
        transformer.src = os.path.join(source_dir, '%s.xml' % c)
        transformer()
    # no transformation on codetemplates at this time - but the
    # importer expects the codetemplates.xml file to be in the same
    # directory, so copy it over.
    shutil.copy(os.path.join(source_dir, 'codetemplates.xml'), target_dir)
コード例 #24
0
ファイル: test_config.py プロジェクト: pbugni/pheme.util
 def test_tilde(self):
     "support tilde in directory paths"
     section = 'SECTION'
     key = 'unittest'
     value = "~/tempfile"
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     cp.set(section, key, value)
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     self.assertEquals(os.path.expanduser("~/tempfile"),
                       c.get(section, key))
コード例 #25
0
 def test_tilde(self):
     "support tilde in directory paths"
     section = 'SECTION'
     key = 'unittest'
     value = "~/tempfile"
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     cp.set(section, key, value)
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     self.assertEquals(os.path.expanduser("~/tempfile"),
                       c.get(section, key))
コード例 #26
0
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")
    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))

    # Load in all the static data in anonymized form
    static_data_file = open(
        os.path.join(os.path.dirname(os.path.abspath(__file__)),
                     'anon_static_db_data.yaml'), 'r')
    load_file(static_data_file)
コード例 #27
0
ファイル: test_config.py プロジェクト: pbugni/pheme.util
 def test_truthiness(self):
     "truth value should be case insensitive"
     section = 'SECTION'
     key = 'unittest'
     values = ['TRUE', 'true', 'True', ' t ']
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key+str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(True, c.get(section, key+str(i)))
コード例 #28
0
ファイル: test_config.py プロジェクト: pbugni/pheme.util
 def test_falseness(self):
     "false values should be case insensitive"
     section = 'SECTION'
     key = 'unittest'
     values = ['FALSE', 'false', 'False', ' f ']
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key+str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(False, c.get(section, key+str(i)))
コード例 #29
0
 def test_int(self):
     "Looks like an int, should be one (including 0,1)"
     section = 'SECTION'
     key = 'unittest'
     values = ['0', '1', '-67', 42, -1]
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key + str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(int(values[i]), c.get(section, key + str(i)))
コード例 #30
0
 def test_falseness(self):
     "false values should be case insensitive"
     section = 'SECTION'
     key = 'unittest'
     values = ['FALSE', 'false', 'False', ' f ']
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key + str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(False, c.get(section, key + str(i)))
コード例 #31
0
 def test_float(self):
     "Looks like a float, should be one"
     section = 'SECTION'
     key = 'unittest'
     values = ['0.01', '42.0', '-67.3']
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key + str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(float(values[i]), c.get(section, key + str(i)))
コード例 #32
0
 def test_truthiness(self):
     "truth value should be case insensitive"
     section = 'SECTION'
     key = 'unittest'
     values = ['TRUE', 'true', 'True', ' t ']
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key + str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(True, c.get(section, key + str(i)))
コード例 #33
0
ファイル: test_config.py プロジェクト: pbugni/pheme.util
 def test_int(self):
     "Looks like an int, should be one (including 0,1)"
     section = 'SECTION'
     key = 'unittest'
     values = ['0', '1', '-67', 42, -1]
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key+str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(int(values[i]),
                           c.get(section, key+str(i)))
コード例 #34
0
ファイル: test_config.py プロジェクト: pbugni/pheme.util
 def test_float(self):
     "Looks like a float, should be one"
     section = 'SECTION'
     key = 'unittest'
     values = ['0.01', '42.0', '-67.3']
     cp = ConfigParser.RawConfigParser()
     cp.add_section(section)
     for i in range(len(values)):
         cp.set(section, key+str(i), values[i])
     with open(self.config_files[0], 'w') as f:
         cp.write(f)
     c = Config(self.config_files)
     for i in range(len(values)):
         self.assertEquals(float(values[i]),
                           c.get(section, key+str(i)))
コード例 #35
0
ファイル: tests.py プロジェクト: pbugni/pheme.webAPI
    def testPhinmsTransfer(self):
        # need a document in the db
        self.create_test_file(compression='gzip',
                              report_type='longitudinal')

        # fake a transfer of this object
        context = PHINMS_Transfer(testing.DummyRequest())
        context.request.fs = self.fs
        context.request.document_store = self.document_store
        context = context[str(self.oid)]
        self.assertFalse(inProduction())  # avoid accidental transfers!
        context.transfer_file()

        self.assertEqual(self.report_type, 'longitudinal')
        config = Config()
        path = config.get('phinms', self.report_type)
        self.assertEqual(context.outbound_dir, path)
コード例 #36
0
    def _generate_output_filename(self, start_date=None, end_date=None):
        start_date = self.criteria.start_date if start_date is None\
            else start_date
        end_date = self.criteria.end_date if end_date is None else end_date

        datestr = end_date.strftime('%Y%m%d')
        if start_date != end_date:
            datestr = '-'.join(
                (start_date.strftime('%Y%m%d'), end_date.strftime('%Y%m%d')))

        filename = self.criteria.report_method + '-' + datestr + '.txt'

        config = Config()
        tmp_dir = config.get('general', 'tmp_dir', default='/tmp')

        filepath = os.path.join(tmp_dir, filename)
        return filepath
コード例 #37
0
    def _generate_output_filename(self, start_date=None,
                                  end_date=None):
        start_date = self.criteria.start_date if start_date is None\
            else start_date
        end_date = self.criteria.end_date if end_date is None else end_date

        datestr = end_date.strftime('%Y%m%d')
        if start_date != end_date:
            datestr = '-'.join((start_date.strftime('%Y%m%d'),
                         end_date.strftime('%Y%m%d')))

        filename = self.criteria.report_method + '-' + datestr + '.txt'

        config = Config()
        tmp_dir = config.get('general', 'tmp_dir', default='/tmp')

        filepath = os.path.join(tmp_dir, filename)
        return filepath
コード例 #38
0
def setup_module():
    """Populate database with test data for module tests"""

    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    "Pull in the filesystem dump from a previous mirth run"
    mi = MirthInteraction()
    mi.restore_database()

    "Run a quick sanity check, whole module requires a populated db"
    connection = db_connection('warehouse')
    count = connection.session.query(HL7_Msh).count()
    connection.disconnect()

    if count < 4000:
        err = "Minimal expected count of records not present. "\
            "Be sure to run 'process_testfiles_via_mirth' as a prerequisite"
        raise RuntimeError(err)
コード例 #39
0
def setup_module():
    """Populate database with test data for module tests"""

    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    "Pull in the filesystem dump from a previous mirth run"
    mi = MirthInteraction()
    mi.restore_database()

    "Run a quick sanity check, whole module requires a populated db"
    connection = db_connection('warehouse')
    count = connection.session.query(HL7_Msh).count()
    connection.disconnect()

    if count < 4000:
        err = "Minimal expected count of records not present. "\
            "Be sure to run 'process_testfiles_via_mirth' as a prerequisite"
        raise RuntimeError(err)
コード例 #40
0
ファイル: transfer.py プロジェクト: pbugni/pheme.webAPIclient
def transfer_document(document_id, transfer_agent,
                      compress_with=None):
    """Web API client call to request transfer of given document ID

    :param document_id: the document ID to transfer, likely returned
      from a document_store() call on the same Web API

    :param transfer_agent: such as 'phin-ms' or 'distribute'.

    :param compress_with: if additional compression is desired, this
      may be set to 'zip' or 'gzip', to be performed before sending.

    """
    query_params = dict()
    if compress_with is not None:
        query_params['compress_with'] = compress_with

    config = Config()
    parts = dict()
    parts['doc'] = document_id
    parts['host'] = config.get("WebAPI", "host")
    parts['port'] = config.get("WebAPI", "port")
    parts['agent'] = transfer_agent

    url = 'http://%(host)s:%(port)s/%(agent)s/%(doc)s' % parts
    if query_params:
        url = url + '?' +\
            '&'.join([k+'='+v for k, v in query_params.items()])

    # Initiate request, wait on response
    r = requests.post(url)
    if r.status_code != 200:  # pragma no cover
        failure = "Failed POST (%d) for transfer request: %s" %\
            (r.status_code, url)
        logging.error(failure)
        logging.error(r.text)
        raise RuntimeError(failure)
コード例 #41
0
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")
    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))

    # create a "test_region" and a couple bogus facilities
    f1 = Facility(county='KING', npi=10987, zip='12345',
                  organization_name='Reason Medical Center',
                  local_code='RMC')
    f2 = Facility(county='POND', npi=65432, zip='67890',
                  organization_name='No-Reason Medical Center',
                  local_code='NMC')
    conn = db_connection(CONFIG_SECTION)
    conn.session.add(f1)
    conn.session.add(f2)
    conn.session.commit()
    rr1 = ReportableRegion(region_name='test_region',
                           dim_facility_pk=10987)
    conn.session.add(rr1)
    conn.session.commit()
    conn.disconnect()
コード例 #42
0
ファイル: archive.py プロジェクト: pbugni/pheme.webAPIclient
def url_builder(predicate=None, resource=None, view=None, query_params={}):
    """Build webAPI url from config and passed values

    :param predicate: desired action or type of document
    :param resource: filename or object identifier
    :param view: specilized view, such as metadata
    :param query_params: dictionary of key, values to append

    returns URL ready for request, post, etc.

    """
    config = Config()
    url = 'http://%s:%s' % (config.get("WebAPI", "host"),
                            config.get("WebAPI", "port"))
    if predicate:
        url = '/'.join((url, predicate))
    if resource:
        url = '/'.join((url, resource))
    if view:
        url = '/'.join((url, '@@' + view))
    if query_params:
        url = '?'.join((url,
                        '&'.join([k+'='+v for k, v in query_params.items()])))
    return url
コード例 #43
0
ファイル: field_map.py プロジェクト: pbugni/pheme.anonymize
        associated dictionary.

        """
        segment = self.segment_key(key)
        if segment:
            return dict.__contains__(self, segment)

        segment, element, component = self.assert_triplekey(key)
        if dict.__contains__(self, segment):
            d = dict.__getitem__(self, segment)
            return element in d and component in d[element]
        return False


config = Config()
days = config.get("anonymize", "dayshift")

"""Define functions with parameters needed to anonymize fields"""
dotted_sequence = fixed_length_digits(30, (1, 7))
short_string = fixed_length_string(10)
site_string = fixed_length_string(12, prefix="Site ")
yyyymm = random_date_delta(datetime.timedelta(days=days), "%Y%m")
ymdhms = random_date_delta(datetime.timedelta(days=days), "%Y%m%d%H%M%S")
two_digits = fixed_length_digits(2)
five_digits = fixed_length_digits(5)
six_digits = fixed_length_digits(6)
nine_digits = fixed_length_digits(9)
ten_digits = fixed_length_digits(10)

def ten_digits_starting_w_1(initial):
    """specialized anon function for NPI like numbers
コード例 #44
0
    def process_args(self):
        """Process any optional arguments and possitional parameters

        Using the values provided, assemble ReportCriteria and
        Datefile instances to control report generation.

        """
        parser = OptionParser(usage=usage)
        # Provide the ReportCriteria instance an error callback so any
        # command line errors provoke the standard graceful exit with
        # warning text.
        self.criteria.error_callback = parser.error

        parser.add_option("-u", "--user", dest="user",
                          default=self.user, help="database user")
        parser.add_option("-p", "--password", dest="password",
                          default=self.password,
                          help="database password, or file containing "\
                              "just the password")
        parser.add_option("-c", "--countdown", dest="countdown",
                          default=None,
                          help="count {down,up} the start and end dates "\
                              "set to 'forwards' or 'backwards' "\
                              "if desired")
        parser.add_option("-i", "--include-updates",
                          action='store_true', dest="includeUpdates",
                          default=False, help="include "\
                              "visits updated since last similar report")
        parser.add_option("--include-vitals",
                          action='store_true', dest="includeVitals",
                          default=False, help="include "\
                              "vitals (measured temperature, O2 "\
                              "saturation, influenza and H1N1 vaccine "\
                              "data) as additional columns in the "\
                              "report")
        parser.add_option("-k", "--patient-class",
                          dest="patient_class",
                          default=None, help="use "\
                          "to filter report on a specific patient "\
                          "class [E,I,O]")
        parser.add_option("-r", "--region", dest="region",
                          default=None,
                          help="reportable region defining limited set "\
                              "of facilities to include, by default "\
                              "all  facilities are included")
        parser.add_option("-s", "--save-and-upload",
                          action='store_true', dest="save_upload",
                          default=False, help="save file and upload to "\
                              "DOH")
        parser.add_option("-x", "--save-without-upload",
                          action='store_true', dest="save_only",
                          default=False, help="save file but don't upload")
        parser.add_option("-d", "--upload-diff",
                          action='store_true', dest="upload_diff",
                          default=False, help="upload differences only "\
                              "(from yesterdays like report) to DOH")
        parser.add_option("-t", "--thirty-days",
                          action='store_true', dest="thirty_days",
                          default=False, help="include 30 days up to "\
                              "requested date ")
        parser.add_option("-v", "--verbose", dest="verbosity",
                          action="count", default=self.verbosity,
                          help="increase output verbosity")

        (options, args) = parser.parse_args()
        if len(args) != 2:
            parser.error("incorrect number of arguments")

        # Database to query
        self.criteria.database = args[0]
        self.user = options.user
        self.password = options.password
        self.criteria.credentials(user=self.user,
                                  password=self.password)

        # Potential region restriction
        self.criteria.reportable_region = options.region

        # Potential patient class restriction
        self.criteria.patient_class = options.patient_class

        # Potential to include vitals (not tied to gipse format)
        self.criteria.include_vitals = options.includeVitals

        # Potential inclusion of updates
        self.criteria.include_updates = options.includeUpdates

        # Report date(s) and potential step direction.
        # NB - several options affect report_method and must be set
        # first!

        initial_date = parseDate(args[1])
        config = Config()
        ps_file = os.path.join(config.get('general', 'tmp_dir',
                                default='/tmp'),
                                self.criteria.report_method)
        step = options.thirty_days and 30 or None
        direction = options.countdown
        self.datefile = Datefile(initial_date=initial_date,
                                 persistence_file=ps_file,
                                 direction=direction,
                                 step=step)
        self.criteria.start_date, self.criteria.end_date =\
            self.datefile.get_date_range()

        # What to do once report is completed.  Complicated, protect
        # user from themselves!
        self.save_report = options.save_upload or \
            options.save_only or options.upload_diff
        self.transmit_report = options.save_upload
        self.transmit_differences = options.upload_diff

        if options.save_only and options.save_upload:
            parser.error("save-without-upload and save-and-upload "\
                         "are mutually exclusive")
        if options.save_only and options.upload_diff:
            parser.error("save-without-upload and upload-diff "\
                         "are mutually exclusive")
        if options.upload_diff and options.save_upload:
            parser.error("upload-diff and save-and-upload"\
                         "are mutually exclusive")

        # Can't transmit w/o saving
        if options.save_upload or options.upload_diff:
            assert(self.save_report)
        # Sanity check
        if options.save_only:
            assert(self.save_report and not self.transmit_report and
                   not self.transmit_differences)

        # How verbosely to log
        self.verbosity = options.verbosity
コード例 #45
0
class MirthInteraction(object):
    """Abstraction to interact with Mirth Connect for testing"""
    WAIT_INTERVAL = 15
    TIMEOUT = 300

    def __init__(self):
        self.config = Config()

        # obtain list of files to process
        path = os.path.abspath(
            os.path.join(os.path.dirname(__file__),
                         "../../../test_hl7_batchfiles"))
        self.filenames = [os.path.join(path, file) for file in 
                          os.listdir(path)]


    def prepare_filesystem(self):
        # create clean database (includes non produciton sanity check)
        setup_module()

        # wipe previous run(s) files
        mirth_user = self.config.get('mirth', 'mirth_system_user')
        for dir in ('input_dir', 'output_dir', 'error_dir'):
            wipe_dir_contents(self.config.get('warehouse', dir),
                              mirth_user)


    def process_batchfiles(self):
        """Feed the testfiles to mirth - block till done"""
        self.prepare_filesystem()
        require_mirth()
        for batchfile in self.filenames:
            copy_file_to_dir(batchfile, 
                             self.config.get('warehouse', 'input_dir'),
                             self.config.get('mirth', 'mirth_system_user'))

        # wait for all files to appear in error our output dirs
        # providing occasional output and raising if we appear hung
        last_count = 0
        last_time = time.time()
        output_dir = self.config.get('warehouse', 'output_dir')
        error_dir = self.config.get('warehouse', 'error_dir')
        while last_count < len(self.filenames):
            time.sleep(self.WAIT_INTERVAL)
            count = len(os.listdir(output_dir)) + len(os.listdir(error_dir))
            if count > last_count:
                last_count = count
                last_time = time.time()
            if time.time() - self.TIMEOUT > last_time:
                raise RuntimeError("TIMEOUT exceeded waiting on Mirth")
            print "Waiting on mirth to process files...(%d of %d)" %\
                (last_count, len(self.filenames))

    def persist_database(self):
        """Write the database contents to disk"""
        fsp = FilesystemPersistence(\
            database=self.config.get('warehouse', 'database'),
            user=self.config.get('warehouse', 'database_user'),
            password=self.config.get('warehouse', 'database_password'))
        fsp.persist()

    def restore_database(self):
        """Pull previously persisted data into database"""
        fsp = FilesystemPersistence(\
            database=self.config.get('warehouse', 'database'),
            user=self.config.get('warehouse', 'database_user'),
            password=self.config.get('warehouse', 'database_password'))
        fsp.restore()
コード例 #46
0
ファイル: resources.py プロジェクト: pbugni/pheme.webAPI
 def _set_report_type(self, report_type, patient_class=None):
     if report_type == 'essence' and patient_class:
         report_type += '_pc' + patient_class
     config = Config()
     self._outbound_dir = config.get('phinms', report_type)
コード例 #47
0
 def test_default(self):
     "Asking for missing value with a default"
     c = Config(self.config_files)
     self.assertEquals(42, c.get('Lifes', 'Answer', 42))
コード例 #48
0
    def process_args(self):
        """Process any optional arguments and possitional parameters

        Using the values provided, assemble ReportCriteria and
        Datefile instances to control report generation.

        """
        parser = OptionParser(usage=usage)
        # Provide the ReportCriteria instance an error callback so any
        # command line errors provoke the standard graceful exit with
        # warning text.
        self.criteria.error_callback = parser.error

        parser.add_option("-u",
                          "--user",
                          dest="user",
                          default=self.user,
                          help="database user")
        parser.add_option("-p", "--password", dest="password",
                          default=self.password,
                          help="database password, or file containing "\
                              "just the password")
        parser.add_option("-c", "--countdown", dest="countdown",
                          default=None,
                          help="count {down,up} the start and end dates "\
                              "set to 'forwards' or 'backwards' "\
                              "if desired")
        parser.add_option("-i", "--include-updates",
                          action='store_true', dest="includeUpdates",
                          default=False, help="include "\
                              "visits updated since last similar report")
        parser.add_option("--include-vitals",
                          action='store_true', dest="includeVitals",
                          default=False, help="include "\
                              "vitals (measured temperature, O2 "\
                              "saturation, influenza and H1N1 vaccine "\
                              "data) as additional columns in the "\
                              "report")
        parser.add_option("-k", "--patient-class",
                          dest="patient_class",
                          default=None, help="use "\
                          "to filter report on a specific patient "\
                          "class [E,I,O]")
        parser.add_option("-r", "--region", dest="region",
                          default=None,
                          help="reportable region defining limited set "\
                              "of facilities to include, by default "\
                              "all  facilities are included")
        parser.add_option("-s", "--save-and-upload",
                          action='store_true', dest="save_upload",
                          default=False, help="save file and upload to "\
                              "DOH")
        parser.add_option("-x",
                          "--save-without-upload",
                          action='store_true',
                          dest="save_only",
                          default=False,
                          help="save file but don't upload")
        parser.add_option("-d", "--upload-diff",
                          action='store_true', dest="upload_diff",
                          default=False, help="upload differences only "\
                              "(from yesterdays like report) to DOH")
        parser.add_option("-t", "--thirty-days",
                          action='store_true', dest="thirty_days",
                          default=False, help="include 30 days up to "\
                              "requested date ")
        parser.add_option("-v",
                          "--verbose",
                          dest="verbosity",
                          action="count",
                          default=self.verbosity,
                          help="increase output verbosity")

        (options, args) = parser.parse_args()
        if len(args) != 2:
            parser.error("incorrect number of arguments")

        # Database to query
        self.criteria.database = args[0]
        self.user = options.user
        self.password = options.password
        self.criteria.credentials(user=self.user, password=self.password)

        # Potential region restriction
        self.criteria.reportable_region = options.region

        # Potential patient class restriction
        self.criteria.patient_class = options.patient_class

        # Potential to include vitals (not tied to gipse format)
        self.criteria.include_vitals = options.includeVitals

        # Potential inclusion of updates
        self.criteria.include_updates = options.includeUpdates

        # Report date(s) and potential step direction.
        # NB - several options affect report_method and must be set
        # first!

        initial_date = parseDate(args[1])
        config = Config()
        ps_file = os.path.join(
            config.get('general', 'tmp_dir', default='/tmp'),
            self.criteria.report_method)
        step = options.thirty_days and 30 or None
        direction = options.countdown
        self.datefile = Datefile(initial_date=initial_date,
                                 persistence_file=ps_file,
                                 direction=direction,
                                 step=step)
        self.criteria.start_date, self.criteria.end_date =\
            self.datefile.get_date_range()

        # What to do once report is completed.  Complicated, protect
        # user from themselves!
        self.save_report = options.save_upload or \
            options.save_only or options.upload_diff
        self.transmit_report = options.save_upload
        self.transmit_differences = options.upload_diff

        if options.save_only and options.save_upload:
            parser.error("save-without-upload and save-and-upload "\
                         "are mutually exclusive")
        if options.save_only and options.upload_diff:
            parser.error("save-without-upload and upload-diff "\
                         "are mutually exclusive")
        if options.upload_diff and options.save_upload:
            parser.error("upload-diff and save-and-upload"\
                         "are mutually exclusive")

        # Can't transmit w/o saving
        if options.save_upload or options.upload_diff:
            assert (self.save_report)
        # Sanity check
        if options.save_only:
            assert (self.save_report and not self.transmit_report
                    and not self.transmit_differences)

        # How verbosely to log
        self.verbosity = options.verbosity
コード例 #49
0
ファイル: test_config.py プロジェクト: pbugni/pheme.util
 def test_default(self):
     "Asking for missing value with a default"
     c = Config(self.config_files)
     self.assertEquals(42, c.get('Lifes', 'Answer',  42))
コード例 #50
0
class MirthInteraction(object):
    """Abstraction to interact with Mirth Connect for testing"""
    WAIT_INTERVAL = 15
    TIMEOUT = 300

    def __init__(self):
        self.config = Config()

        # obtain list of files to process
        path = os.path.abspath(
            os.path.join(os.path.dirname(__file__),
                         "../../../test_hl7_batchfiles"))
        self.filenames = [
            os.path.join(path, file) for file in os.listdir(path)
        ]

    def prepare_filesystem(self):
        # create clean database (includes non produciton sanity check)
        setup_module()

        # wipe previous run(s) files
        mirth_user = self.config.get('mirth', 'mirth_system_user')
        for dir in ('input_dir', 'output_dir', 'error_dir'):
            wipe_dir_contents(self.config.get('warehouse', dir), mirth_user)

    def process_batchfiles(self):
        """Feed the testfiles to mirth - block till done"""
        self.prepare_filesystem()
        require_mirth()
        for batchfile in self.filenames:
            copy_file_to_dir(batchfile,
                             self.config.get('warehouse', 'input_dir'),
                             self.config.get('mirth', 'mirth_system_user'))

        # wait for all files to appear in error our output dirs
        # providing occasional output and raising if we appear hung
        last_count = 0
        last_time = time.time()
        output_dir = self.config.get('warehouse', 'output_dir')
        error_dir = self.config.get('warehouse', 'error_dir')
        while last_count < len(self.filenames):
            time.sleep(self.WAIT_INTERVAL)
            count = len(os.listdir(output_dir)) + len(os.listdir(error_dir))
            if count > last_count:
                last_count = count
                last_time = time.time()
            if time.time() - self.TIMEOUT > last_time:
                raise RuntimeError("TIMEOUT exceeded waiting on Mirth")
            print "Waiting on mirth to process files...(%d of %d)" %\
                (last_count, len(self.filenames))

    def persist_database(self):
        """Write the database contents to disk"""
        fsp = FilesystemPersistence(\
            database=self.config.get('warehouse', 'database'),
            user=self.config.get('warehouse', 'database_user'),
            password=self.config.get('warehouse', 'database_password'))
        fsp.persist()

    def restore_database(self):
        """Pull previously persisted data into database"""
        fsp = FilesystemPersistence(\
            database=self.config.get('warehouse', 'database'),
            user=self.config.get('warehouse', 'database_user'),
            password=self.config.get('warehouse', 'database_password'))
        fsp.restore()
コード例 #51
0
def transform_channels():
    """Apply default transform to PHEME channels"""

    doc = """
    Mirth channels can be easily exported in XML format.  This utility
    provides a mechanims to alter an export for subsequent import.
    Useful for altering details such as database name and user
    authentication.

    NB - values defined in the project configuration file will be used
    unless provided as optional arguments.  See
    `pheme.util.config.Config`
    """
    config = Config()
    ap = argparse.ArgumentParser(description=doc)
    ap.add_argument("-d",
                    "--database",
                    dest="db",
                    default=config.get('warehouse', 'database'),
                    help="name of database (overrides "
                    "[warehouse]database)")
    ap.add_argument("-u",
                    "--user",
                    dest="user",
                    default=config.get('warehouse', 'database_user'),
                    help="database user (overrides "
                    "[warehouse]database_user)")
    ap.add_argument("-p",
                    "--password",
                    dest="password",
                    default=config.get('warehouse', 'database_password'),
                    help="database password (overrides [warehouse]"
                    "database_password)")
    ap.add_argument("--input_dir",
                    dest="input_dir",
                    default=config.get('warehouse', 'input_dir'),
                    help="filesystem directory for channel to poll "
                    "(overrides [warehouse]input_dir)")
    ap.add_argument("--output_dir",
                    dest="output_dir",
                    default=config.get('warehouse', 'output_dir'),
                    help="filesystem directory for channel output "
                    "(overrides [warehouse]output_dir)")
    ap.add_argument("--error_dir",
                    dest="error_dir",
                    default=config.get('warehouse', 'error_dir'),
                    help="filesystem directory for channel errors "
                    "(overrides [warehouse]error_dir)")
    ap.add_argument("source_directory",
                    help="directory containing source channel "
                    "definition files")
    ap.add_argument("target_directory",
                    help="directory to write transformed channel "
                    "definition files")
    args = ap.parse_args()
    source_dir = os.path.realpath(args.source_directory)
    target_dir = os.path.realpath(args.target_directory)

    transformer = TransformManager(src=None,
                                   target_dir=target_dir,
                                   options=args)
    for c in CHANNELS:
        transformer.src = os.path.join(source_dir, '%s.xml' % c)
        transformer()
    # no transformation on codetemplates at this time - but the
    # importer expects the codetemplates.xml file to be in the same
    # directory, so copy it over.
    shutil.copy(os.path.join(source_dir, 'codetemplates.xml'), target_dir)
コード例 #52
0
class MirthShell(object):
    """Sets up and executes common tasks via the mirth shell

    We have a few common patterns, such as deploying test and
    production versions of the mirth channels.  This manages pulling
    in configuration details and setting up the necessary paths to
    make it scriptable.

    """
    def __init__(self):
        self.config = Config()
        self.mirth_home = self.config.get('mirth', 'mirth_home')
        self.mirth_system_user = self.config.get('mirth', 'mirth_system_user')

    def write_script(
        self,
        script_file,
        imports=[],
        exports=[],
    ):
        """Generate script for mirth shell to run

        :param script_file: An open file handle to a writable file
        :param imports: The mirth channel(s) XML export to import
        :param exports: The list of (channel name, output path) to
          export.

        Writes the necessary mirth shell instructions to the
        script_file so that the file can be executed via the mirth
        shell.  NB - this takes the liberty of relaxing the 0600 mode
        settings hardcoded in NamedTemporaryFile so mirth shell will
        have read access if running as a different user.

        Codetemplates are also imported or exported (if imports or
        exports are defined) into the same directory as the first named
        import / export in a file named "codetemplates.xml".

        """
        for channel in imports:
            assert (os.path.exists(channel))
            script_file.write("import %s force\n" % channel)
        for channel, output in exports:
            script_file.write("export %s %s\n" % (channel, output))
        if imports:
            codetemplates = os.path.join(os.path.dirname(imports[0]),
                                         "codetemplates.xml")
            script_file.write("importcodetemplates %s\n" % codetemplates)
            script_file.write("deploy\n")
            script_file.write("status\n")
        if exports:
            codetemplates = os.path.join(os.path.dirname(exports[0][1]),
                                         "codetemplates.xml")
            script_file.write("exportcodetemplates %s\n" % codetemplates)
        script_file.flush()
        script_file.seek(0)
        os.chmod(script_file.name, 0644)

    def execute_script(self, script):
        "Execute the given script via the mirth shell"
        try:
            orig_dir = os.getcwd()
            os.chdir(self.mirth_home)
            sudo = ''
            if getpass.getuser() != self.mirth_system_user:
                sudo = 'sudo -H -u %s' % self.mirth_system_user
            command = "%s ./mccommand -s %s" %\
                (sudo, script.name)
            print command
            os.system(command)
        except:
            raise
        finally:
            os.chdir(orig_dir)