def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")
    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))

    # create a "test_region" and a couple bogus facilities
    f1 = Facility(county='KING',
                  npi=10987,
                  zip='12345',
                  organization_name='Reason Medical Center',
                  local_code='RMC')
    f2 = Facility(county='POND',
                  npi=65432,
                  zip='67890',
                  organization_name='No-Reason Medical Center',
                  local_code='NMC')
    conn = db_connection(CONFIG_SECTION)
    conn.session.add(f1)
    conn.session.add(f2)
    conn.session.commit()
    rr1 = ReportableRegion(region_name='test_region', dim_facility_pk=10987)
    conn.session.add(rr1)
    conn.session.commit()
    conn.disconnect()
Ejemplo n.º 2
0
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))
Ejemplo n.º 3
0
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")

    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))
Ejemplo n.º 4
0
    def execute(self):
        """Use the collected info to launch execution"""
        configure_logging(verbosity=self.verbosity,
                          logfile="%s.log" % self.criteria.report_method)

        gr = GenerateReport(user=self.user,
                            password=self.password,
                            report_criteria=self.criteria,
                            datefile=self.datefile)
        gr.execute(save_report=self.save_report,
                   transmit_report=self.transmit_report,
                   transmit_differences=self.transmit_differences)
    def execute(self):
        """Use the collected info to launch execution"""
        configure_logging(verbosity=self.verbosity,
                         logfile="%s.log" % self.criteria.report_method)

        gr = GenerateReport(user=self.user,
                            password=self.password,
                            report_criteria=self.criteria,
                            datefile=self.datefile)
        gr.execute(save_report=self.save_report,
                    transmit_report=self.transmit_report,
                    transmit_differences=self.transmit_differences)
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")
    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))

    # Load in all the static data in anonymized form
    static_data_file = open(os.path.join(os.path.dirname(
        os.path.abspath(__file__)), 'anon_static_db_data.yaml'), 'r')
    load_file(static_data_file)
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")
    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))

    # Load in all the static data in anonymized form
    static_data_file = open(
        os.path.join(os.path.dirname(os.path.abspath(__file__)),
                     'anon_static_db_data.yaml'), 'r')
    load_file(static_data_file)
Ejemplo n.º 8
0
    def processArgs(self):
        """ Process any optional arguments and possitional parameters
        """
        parser = OptionParser(usage=usage)
        parser.add_option("-v",
                          "--verbose",
                          dest="verbosity",
                          action="count",
                          default=self.verbosity,
                          help="increase output verbosity")
        parser.add_option("-b",
                          "--backwards",
                          dest="progression",
                          default=self.progression,
                          help="Progress backwards in time through "
                          "available batch files (default is forwards)")
        parser.add_option("-f",
                          "--file",
                          dest="namedfiles",
                          default=self.files,
                          action='store_true',
                          help="only process named file(s)")
        parser.add_option("--copy-to-tempdir",
                          dest="tempdir",
                          default=None,
                          action='store',
                          help="Don't upload or track, just copy files "
                          "to named directory")

        (options, args) = parser.parse_args()
        if not parser.values.namedfiles:
            if len(args) != 0:
                parser.error("incorrect number of arguments")
        else:
            # User says they're feeding us files.  Gobble up the list
            self.daemon_mode = False
            if not len(args):
                parser.error("must supply at least one filename with"
                             "the files(-f) option")
            self.files = []
            for filename in args:
                self.files.append(filename)

        self.copy_tempdir = parser.values.tempdir
        self.verbosity = parser.values.verbosity
        configure_logging(verbosity=self.verbosity, logfile='stderr')
        self.execute()
Ejemplo n.º 9
0
def test_configure_logging():
    logfile = configure_logging(verbosity=2, logfile='unittest.log',
                                append=False)
    logging.debug("just testing")
    # Thanks to nose working so hard to capture logging, it's quite
    # difficult to test - hand verified.
    assert(logfile == os.path.join(Config().get('general', 'log_dir'),
                                   'unittest.log'))
Ejemplo n.º 10
0
def test_configure_logging():
    logfile = configure_logging(verbosity=2,
                                logfile='unittest.log',
                                append=False)
    logging.debug("just testing")
    # Thanks to nose working so hard to capture logging, it's quite
    # difficult to test - hand verified.
    assert (logfile == os.path.join(Config().get('general', 'log_dir'),
                                    'unittest.log'))
Ejemplo n.º 11
0
    def processArgs(self):
        """ Process any optional arguments and possitional parameters
        """
        parser = OptionParser(usage=usage)
        parser.add_option("-v", "--verbose", dest="verbosity",
                          action="count", default=self.verbosity,
                          help="increase output verbosity")
        parser.add_option("-b", "--backwards", dest="progression",
                          default=self.progression,
                          help="Progress backwards in time through "
                          "available batch files (default is forwards)")
        parser.add_option("-f", "--file", dest="namedfiles",
                          default=self.files, action='store_true',
                          help="only process named file(s)")
        parser.add_option("--copy-to-tempdir", dest="tempdir",
                          default=None, action='store',
                          help="Don't upload or track, just copy files "
                          "to named directory")

        (options, args) = parser.parse_args()
        if not parser.values.namedfiles:
            if len(args) != 0:
                parser.error("incorrect number of arguments")
        else:
            # User says they're feeding us files.  Gobble up the list
            self.daemon_mode = False
            if not len(args):
                parser.error("must supply at least one filename with"
                             "the files(-f) option")
            self.files = []
            for filename in args:
                self.files.append(filename)

        self.copy_tempdir = parser.values.tempdir
        self.verbosity = parser.values.verbosity
        configure_logging(verbosity=self.verbosity, logfile='stderr')
        self.execute()
def setup_module():
    """Create a fresh db (once) for all tests in this module"""
    configure_logging(verbosity=2, logfile='unittest.log')
    c = Config()
    if c.get('general', 'in_production'):  # pragma: no cover
        raise RuntimeError("DO NOT run destructive test on production system")
    create_tables(enable_delete=True, **db_params(CONFIG_SECTION))

    # create a "test_region" and a couple bogus facilities
    f1 = Facility(county='KING', npi=10987, zip='12345',
                  organization_name='Reason Medical Center',
                  local_code='RMC')
    f2 = Facility(county='POND', npi=65432, zip='67890',
                  organization_name='No-Reason Medical Center',
                  local_code='NMC')
    conn = db_connection(CONFIG_SECTION)
    conn.session.add(f1)
    conn.session.add(f2)
    conn.session.commit()
    rr1 = ReportableRegion(region_name='test_region',
                           dim_facility_pk=10987)
    conn.session.add(rr1)
    conn.session.commit()
    conn.disconnect()
    def execute(self):
        """ Start the process """
        # Initialize logging now (verbosity is now set regardless of
        # invocation method)
        configure_logging(verbosity=self.verbosity, logfile="longitudinal-manager.log")

        logging.info("Initiate deduplication for %s", (self.reportDate and self.reportDate or "whole database"))
        # Only allow one instance of the manager to run at a time.
        if self.lock.is_locked():
            logging.warn("Can't continue, %s is locked ", LOCKFILE)
            return

        if systemUnderLoad():
            logging.warn("system under load - continue anyhow")

        try:
            self.lock.acquire()

            self.access = DirectAccess(
                database=self.data_warehouse,
                port=self.warehouse_port,
                user=self.database_user,
                password=self.database_password,
            )
            self.data_warehouse_access = AlchemyAccess(
                database=self.data_warehouse,
                port=self.warehouse_port,
                user=self.database_user,
                password=self.database_password,
            )
            self.data_mart_access = AlchemyAccess(
                database=self.data_mart, port=self.mart_port, user=self.database_user, password=self.database_password
            )

            startTime = time.time()
            if not self.skip_prep:
                self._prepDeduplicateTables()
            visits_to_process = self._visitsToProcess()

            # Now done with db access needs at the manager level
            # free up resources:
            self.data_mart_access.disconnect()
            self.data_warehouse_access.disconnect()
            self.access.close()

            # Set of locks used, one for each table needing protection
            # from asynchronous inserts.  Names should match table
            # minus 'dim_' prefix, plus '_lock' suffix
            # i.e. dim_location -> 'location_lock'
            table_locks = {
                "admission_source_lock": Lock(),
                "admission_o2sat_lock": Lock(),
                "admission_temp_lock": Lock(),
                "assigned_location_lock": Lock(),
                "admit_reason_lock": Lock(),
                "chief_complaint_lock": Lock(),
                "diagnosis_lock": Lock(),
                "disposition_lock": Lock(),
                "flu_vaccine_lock": Lock(),
                "h1n1_vaccine_lock": Lock(),
                "lab_flag_lock": Lock(),
                "lab_result_lock": Lock(),
                "location_lock": Lock(),
                "note_lock": Lock(),
                "order_number_lock": Lock(),
                "performing_lab_lock": Lock(),
                "pregnancy_lock": Lock(),
                "race_lock": Lock(),
                "reference_range_lock": Lock(),
                "service_area_lock": Lock(),
                "specimen_source_lock": Lock(),
            }

            # If we have visits to process, fire up the workers...
            if len(visits_to_process) > 1:
                for i in range(self.NUM_PROCS):
                    dw = Process(
                        target=LongitudinalWorker,
                        kwargs={
                            "queue": self.queue,
                            "procNumber": i,
                            "data_warehouse": self.data_warehouse,
                            "warehouse_port": self.warehouse_port,
                            "data_mart": self.data_mart,
                            "mart_port": self.mart_port,
                            "dbUser": self.database_user,
                            "dbPass": self.database_password,
                            "table_locks": table_locks,
                            "verbosity": self.verbosity,
                        },
                    )
                    dw.daemon = True
                    dw.start()

                # Populate the queue
                for v in visits_to_process:
                    self.queue.put(v)

                # Wait on the queue until empty
                self.queue.join()

            # Common cleanup
            self.tearDown()
            self.datePersistence.bump_date()
            logging.info("Queue is empty - done in %s", time.time() - startTime)
        finally:
            self.lock.release()
Ejemplo n.º 14
0
    def execute(self):
        """ Start the process """
        # Initialize logging now (verbosity is now set regardless of
        # invocation method)
        configure_logging(verbosity=self.verbosity,
                          logfile="longitudinal-manager.log")

        logging.info("Initiate deduplication for %s",
                         (self.reportDate and self.reportDate or
                          "whole database"))
        # Only allow one instance of the manager to run at a time.
        if self.lock.is_locked():
            logging.warn("Can't continue, %s is locked ", LOCKFILE)
            return

        if systemUnderLoad():
            logging.warn("system under load - continue anyhow")

        try:
            self.lock.acquire()

            self.access = DirectAccess(database=self.data_warehouse,
                                       port=self.warehouse_port,
                                       user=self.database_user,
                                       password=self.database_password)
            self.data_warehouse_access = AlchemyAccess(
                database=self.data_warehouse,
                port=self.warehouse_port,
                user=self.database_user, password=self.database_password)
            self.data_mart_access = AlchemyAccess(
                database=self.data_mart, port=self.mart_port,
                user=self.database_user, password=self.database_password)

            startTime = time.time()
            if not self.skip_prep:
                self._prepDeduplicateTables()
            visits_to_process = self._visitsToProcess()

            # Now done with db access needs at the manager level
            # free up resources:
            self.data_mart_access.disconnect()
            self.data_warehouse_access.disconnect()
            self.access.close()

            # Set of locks used, one for each table needing protection
            # from asynchronous inserts.  Names should match table
            # minus 'dim_' prefix, plus '_lock' suffix
            # i.e. dim_location -> 'location_lock'
            table_locks = {'admission_source_lock': Lock(),
                           'admission_o2sat_lock': Lock(),
                           'admission_temp_lock': Lock(),
                           'assigned_location_lock': Lock(),
                           'admit_reason_lock': Lock(),
                           'chief_complaint_lock': Lock(),
                           'diagnosis_lock': Lock(),
                           'disposition_lock': Lock(),
                           'flu_vaccine_lock': Lock(),
                           'h1n1_vaccine_lock': Lock(),
                           'lab_flag_lock': Lock(),
                           'lab_result_lock': Lock(),
                           'location_lock': Lock(),
                           'note_lock': Lock(),
                           'order_number_lock': Lock(),
                           'performing_lab_lock': Lock(),
                           'pregnancy_lock': Lock(),
                           'race_lock': Lock(),
                           'reference_range_lock': Lock(),
                           'service_area_lock': Lock(),
                           'specimen_source_lock': Lock(),
                           }

            # If we have visits to process, fire up the workers...
            if len(visits_to_process) > 1:
                for i in range(self.NUM_PROCS):
                    dw = Process(target=LongitudinalWorker,
                                 kwargs={'queue': self.queue,
                                         'procNumber': i,
                                         'data_warehouse': self.data_warehouse,
                                         'warehouse_port': self.warehouse_port,
                                         'data_mart': self.data_mart,
                                         'mart_port': self.mart_port,
                                         'dbUser': self.database_user,
                                         'dbPass': self.database_password,
                                         'table_locks': table_locks,
                                         'verbosity': self.verbosity})
                    dw.daemon = True
                    dw.start()

                # Populate the queue
                for v in visits_to_process:
                    self.queue.put(v)

                # Wait on the queue until empty
                self.queue.join()

            # Common cleanup
            self.tearDown()
            self.datePersistence.bump_date()
            logging.info("Queue is empty - done in %s", time.time() -
                         startTime)
        finally:
            self.lock.release()