Пример #1
0
def find_missing_draw(days_till_expiration, days_till_urgent, code):
    current_time = datetime.date.today()
    results_expiration = current_time - datetime.timedelta(
        days=days_till_expiration)

    warn_missing = current_time - datetime.timedelta(days=days_till_urgent)
    critically_missing = warn_missing - datetime.timedelta(days=2)

    missing_draw = Session.query(
        models.Result).filter(models.Result.site_code == code).filter(
            models.Result.draw_date == None).filter(
                models.Result.test_date > warn_missing).all()

    warn_missing_draw = Session.query(
        models.Result).filter(models.Result.site_code == code).filter(
            models.Result.draw_date == None).filter(
                models.Result.test_date > critically_missing).filter(
                    models.Result.test_date <= warn_missing).all()

    critically_missing_draw = Session.query(
        models.Result).filter(models.Result.site_code == code).filter(
            models.Result.draw_date == None).filter(
                models.Result.test_date <= critically_missing).filter(
                    models.Result.test_date > results_expiration).all()

    return missing_draw, warn_missing_draw, critically_missing_draw
Пример #2
0
    def setup(self):
        Session.remove()
        self.tempdir = tempfile.mkdtemp()
        self.configfile = os.path.join(self.tempdir, 'config')
        self.datafile = os.path.join(self.tempdir, 'data')
        self.homedir = tempfile.mkdtemp(dir=self.tempdir)

        config = configparser.SafeConfigParser()
        config.add_section('settings')
        config.set('settings', 'dir.raw', os.path.join(self.tempdir, 'raw'))
        config.set('settings', 'dir.web', os.path.join(self.tempdir, 'web'))
        config.set('settings', 'sqlalchemy.url',
                   'sqlite:///' + os.path.join(self.tempdir, 'db'))
        config.set('settings', 'gpg.home', self.homedir)
        config.set('settings', 'gpg.passphrase', 'secret')
        config.set('settings', 'gpg.binary', 'gpg')
        config.set('settings', 'mail.on', 'True')
        config.set('settings', 'mail.message.author', 'proc@localhost')
        config.set('settings', 'notify.error', 'foo@localhost')
        config.set('settings', 'notify.999X.dhiv', 'foo@localhost')
        config.set('settings', 'notify.999X.dhcv', 'foo@localhost')
        config.set('settings', 'notify.999X.dhbv', 'foo@localhost')
        config.set('settings', 'notify.diagnostic', 'foo@localhost')

        self.config = config

        with open(self.configfile, 'w+') as fp:
            config.write(fp)

        sys.argv = ['', '-c', self.configfile]
        scripts.initdb.main()

        self.passphrase = 'secret',

        gpg = gnupg.GPG(gnupghome=self.homedir)
        self.gpg = gpg

        # persist the keys so the function properly reload them
        with tempfile.NamedTemporaryFile(dir=self.homedir) as fp:
            key = gpg.gen_key(gpg.gen_key_input())
            fp.write(gpg.export_keys(str(key)))
            fp.write(gpg.export_keys(str(key), True))

        self.key = key

        # The database will be reconfigured with the commannd, so make sure
        # it is not bound to a database
        Session.remove()
Пример #3
0
    def test_DrawDateSync_ResultHasNoCorrespondingDrawDateInDrawTable_ResultDrawDateIsNone(
            self):
        """
        Result draw dates should be None if no available corresponding draw date in Draw table
        Sync code is in parser.py
        """

        site_code = '999X'
        reference_number = '12345'

        gpg = self.gpg
        datafile = os.path.join(self.tempdir, 'data')

        io = StringIO()
        print >> io, 'Has 1 result'
        print >> io, '999X 12345                                      P               N  N  N'  # NOQA
        io.flush()
        io.seek(0)
        gpg.encrypt_file(io,
                         str(self.key),
                         passphrase=self.passphrase,
                         output=datafile)
        io.close()

        sys.argv = ['', '-c', self.configfile, datafile]
        scripts.parse.main()

        result = Session.query(models.Result)\
            .filter(models.Result.site_code == site_code)\
            .filter(models.Result.reference_number == reference_number).first()

        # Is there a draw date?
        assert not result.draw_date
Пример #4
0
    def test_dry(self):
        """
        Ensure --dry option doesn't affect the file system and database
        """
        config = self.config
        gpg = self.gpg
        datafile = os.path.join(self.tempdir, 'data')

        io = StringIO()
        print >> io, 'Has 1 result'
        print >> io, '076C 12345                                      P'
        io.flush()
        io.seek(0)
        gpg.encrypt_file(io,
                         str(self.key),
                         passphrase=self.passphrase,
                         output=datafile)
        io.close()

        sys.argv = ['', '-c', self.configfile, '--dry', datafile]
        scripts.parse.main()

        eq_(0, Session.query(models.Result).count())
        assert not os.path.exists(
            os.path.join(config.get('settings', 'dir.web'), '76C.html'))
        assert not os.path.exists(
            os.path.join(config.get('settings', 'dir.web'), '76C.xls'))
Пример #5
0
    def test_duplicate(self):
        """
        Ensure we get notified if a duplicate entry is attempted
        """
        gpg = self.gpg
        datafile = os.path.join(self.tempdir, 'data')

        io = StringIO()
        print >> io, 'Has 1 result'
        print >> io, '076C 12345                                      P'
        io.flush()
        io.seek(0)
        gpg.encrypt_file(io,
                         str(self.key),
                         passphrase=self.passphrase,
                         output=datafile)
        io.close()

        sys.argv = ['', '-c', self.configfile, datafile]
        scripts.parse.main()
        Session.remove()

        io = StringIO()
        print >> io, 'Has 1 result'
        print >> io, '076C 12345                                      N'
        io.flush()
        io.seek(0)
        gpg.encrypt_file(io,
                         str(self.key),
                         passphrase=self.passphrase,
                         output=datafile)
        io.close()

        try:
            sys.argv = ['', '-c', self.configfile, datafile]
            scripts.parse.main()
            Session.remove()
        except:
            # the command will re-raise the exception, suppress it so we
            # can read the email
            pass

        emails = turbomail.interface.manager.transport.get_sent_mails()
        eq_(1, len(emails))
        email_content = emails.pop()
        expected = 'Already exists: 76C12345'
        assert expected in email_content
Пример #6
0
def find_missing_draw(days_till_expiration, code):
    current_time = datetime.date.today()
    results_expiration = current_time - datetime.timedelta(
        days=days_till_expiration)
    missing_draw = Session.query(
        models.Result).filter(models.Result.site_code == code).filter(
            models.Result.draw_date == None).filter(
                models.Result.test_date > results_expiration).all()
    return missing_draw
Пример #7
0
    def test_DrawDateSync_ResultHasCorrespondingDrawDateInDrawTable_ResultDrawDateIsSynced(
            self):
        """
        Should sync draw dates from Draw table to results in Result table
        """

        # Add draw date to draw table first
        site_code = '999X'
        reference_number = '12345'

        # Add draw date to Draw table
        Session.add(
            models.Draw(site_code=site_code,
                        reference_number=reference_number,
                        draw_date=datetime.date.today()))
        transaction.commit()

        gpg = self.gpg
        datafile = os.path.join(self.tempdir, 'data')

        io = StringIO()
        print >> io, 'Has 1 result'
        print >> io, '999X 12345                                      P               N  N  N'  # NOQA
        io.flush()
        io.seek(0)
        gpg.encrypt_file(io,
                         str(self.key),
                         passphrase=self.passphrase,
                         output=datafile)
        io.close()

        sys.argv = ['', '-c', self.configfile, datafile]
        scripts.parse.main()

        result = Session.query(models.Result)\
            .filter(models.Result.site_code == site_code)\
            .filter(models.Result.reference_number == reference_number).first()

        # Is there a draw date?
        assert result.draw_date
Пример #8
0
def test_parsefp_duplicate_result():
    """
    Ensure we can detect duplicates
    """
    site_code = 'XXXX'
    reference_number = 'YYYYY'

    # Create a pre-existing result
    Session.add(
        models.Result(site_code=site_code,
                      reference_number=reference_number,
                      nat='N',
                      file='oldfile.txt'))
    Session.flush()

    fp = StringIO()
    print >> fp, 'Line 1'
    print >> fp, (site_code + ' ' + reference_number).ljust(80)
    fp.seek(0)
    results, duplicates = parser.parsefp(fp)
    eq_(len(results), 0)
    eq_(len(duplicates), 1)
Пример #9
0
def from_file(file):
    """
    Custom argument parser type for configuration files.
    Reads configuration file and initializes dependent utilities
    """
    config = configparser.SafeConfigParser()
    config.read(file)

    try:
        # setup logging (this is optional)
        logging.config.fileConfig(file)
    except configparser.NoSectionError:
        pass

    settings = dict(config.items('settings'))

    # Database
    Session.configure(bind=engine_from_config(settings))

    # Email
    turbomail.interface.start(settings)

    return settings
Пример #10
0
    def test_main(self):
        gpg = self.gpg
        datafile = os.path.join(self.tempdir, 'data')

        io = StringIO()
        print >> io, 'Has 1 result'
        print >> io, '076CX 12345                                     P'
        io.flush()
        io.seek(0)
        gpg.encrypt_file(io,
                         str(self.key),
                         passphrase=self.passphrase,
                         output=datafile)
        io.close()

        sys.argv = ['', '-c', self.configfile, datafile]
        scripts.parse.main()

        eq_(1, Session.query(models.Result).count())
Пример #11
0
def bucketize_results():
    """ Returns a dictonary of buckets which has site_codes as keys
      and list of results as values"""

    buckets = {}
    try:
        all_results = Session.query(models.Result)

        site_codes = []
        for result in all_results:
            try:
                if result.site_code not in site_codes:
                    site_codes.append(result.site_code)
                    buckets[result.site_code] = []

                buckets[result.site_code].append(
                    str(result.site_code) + str(result.reference_number))
            except:
                pass
    except Exception as e:
        log.critical(traceback.format_exc())

    return buckets
Пример #12
0
    def test_FindMissingDraw_AllResultsMoreThan6MonthsOldAndAllMissingDrawDates_ReturnEmptyListOfResults(
            self):
        """
        Should return an empty list of results because all results' test date is older
        than 180 days. All results are missing draw dates.
        """

        missing_draw = []

        current_time = datetime.date.today()
        date1 = current_time - datetime.timedelta(days=190)
        date2 = current_time - datetime.timedelta(days=200)
        date3 = current_time - datetime.timedelta(days=250)
        date4 = current_time - datetime.timedelta(days=300)
        date5 = current_time - datetime.timedelta(days=350)

        # Add 5 unique results where 5 results are missing their draw date and
        # 5 are more than 6 months old
        site_code = '99X'
        reference_number1 = '11111'
        reference_number2 = '22222'
        reference_number3 = '33333'
        reference_number4 = '44444'
        reference_number5 = '55555'

        Session.add(
            models.Result(site_code=site_code,
                          reference_number=reference_number1,
                          test_date=date1,
                          nat='P',
                          dhiv='N',
                          file='results.txt'))

        Session.add(
            models.Result(site_code=site_code,
                          reference_number=reference_number2,
                          test_date=date2,
                          nat='P',
                          dhiv='N',
                          file='results.txt'))

        Session.add(
            models.Result(site_code=site_code,
                          reference_number=reference_number3,
                          test_date=date3,
                          nat='P',
                          dhiv='N',
                          file='results.txt'))

        Session.add(
            models.Result(site_code=site_code,
                          reference_number=reference_number4,
                          test_date=date4,
                          nat='P',
                          dhiv='N',
                          file='results.txt'))

        Session.add(
            models.Result(site_code=site_code,
                          reference_number=reference_number5,
                          test_date=date5,
                          nat='P',
                          dhiv='N',
                          file='results.txt'))

        transaction.commit()

        missing_draw = scripts.parse.find_missing_draw()

        # Correct number of missing draw date cases?
        assert len(missing_draw) == 0
Пример #13
0
def sync_sql_result(buckets, settings):
    """ 
      Parameters: 
            
            buckets: Dict of site codes and list of Results for that site as key, value pairs
            
            settings: A dictionary of settings specified in the configuration file            
      Returns None
  """
    try:
        log.info("Synchronization Routine Started")
        #only two attributes could have been updated Draw Date or Location
        #Result should not be modified in the RedCAP directly

        rcs = json.loads(open(settings['redcap_json'], 'r').read())

        # Days after which results become invalid
        days_till_expiration = int(settings['days.tillexpiration'])

        # Days after which results can be published to patients
        days_till_notify = int(settings['days.tillnotify'])

        # Days after which tester should definitely act ASAP on CRF's or redcap entries
        days_till_urgent = int(settings['days.tillurgent'])

        pk_sites = settings.get('pk_site').split()
        redcap = RCProject(buckets.keys(), rcs)

        for key, value in buckets.iteritems():

            ctsrecords = []
            # These malformed draw dates need to be handled offline. This script will only be
            # reporting such entries
            malformed_draw = []

            # Unmindful check. This line is mostly a deadcode, I guess.
            if key not in redcap.project.keys():
                continue
            print 'key'
            print key
            #fields = ['visit_date','test_site']

            # we pass the 'label' flag to get the location value as string instead numeric values
            records = []
            if redcap.project[key].is_longitudinal(
            ) == True or key in pk_sites:
                print 'is logitudinal'
                print key
                l_records = redcap.project[key].export_records()
                records = list(x for x in l_records if x['rc_id'] in value)
            else:
                records = redcap.project[key].export_records(
                    records=value,
                    fields=redcap.nat_fields,
                    raw_or_label='label')

            ctsrecord = redcap.project['CTS'].export_records(
                records=value, fields='rec_status')

            for each in ctsrecord:
                new_ctsrecord = {}
                new_ctsrecord['rc_id'] = each['rc_id']
                new_ctsrecord['rec_status'] = 1

                ctsrecords.append(new_ctsrecord)

            # RCIDs for which we have new results will be in push records
            push_records = []
            for record in records:

                sql_row = Session.query(models.Result)\
                            .filter(models.Result.site_code == key)\
                            .filter(models.Result.reference_number == record['rc_id'][-5:]).first()

                # Visit/Draw date update in SQL DB
                if 'visit_date' in record.keys(
                ) and record['visit_date'] != '':
                    visit_date = dt.strptime(record['visit_date'],
                                             "%Y-%m-%d").date()
                    if sql_row.site_code == 'SDPT':
                        print 'dates not equal'
                        print sql_row.draw_date
                        print visit_date
                    if sql_row.draw_date != visit_date:

                        if sql_row.test_date >= visit_date:
                            print 'update visit date'
                            sql_row.draw_date = visit_date

                            min_visit_date = sql_row.test_date - datetime.timedelta(
                                days=int(settings['result_buffer']))
                            if visit_date < min_visit_date:
                                malformed_draw.append(record['rc_id'])

                        # The malformed draw dates are the ones that don't fall in to the
                        # accepted window for results. Just report them, nothin more.
                        else:
                            malformed_draw.append(record['rc_id'])

                # Location update in SQL DB
                if 'test_site' in record.keys() and record['test_site'] != '':
                    rc_location = ''
                    if redcap.project[key].is_longitudinal() == True:
                        labeled_recs = redcap.project[key].export_records(
                            raw_or_label='label')

                        fil_rec = list(x for x in labeled_recs
                                       if x['rc_id'] == record['rc_id'])[0]
                        rc_location = fil_rec['test_site']
                    else:
                        rc_location = record['test_site']
                    if sql_row.location != rc_location:
                        sql_row.location = rc_location

                # Keep track for bulk update in RedCAP later
                if 'nat_result_date' in record.keys():
                    if record['nat_result_date'] == '':
                        push_records.append(
                            update_result_redcap(record, sql_row))
                    else:
                        if 'nat_test_complete' in record.keys() and \
                            record['nat_test_complete'] == "Incomplete":
                            new_record = {}
                            new_record['rc_id'] = record['rc_id']
                            new_record['nat_test_complete'] = 2
                            push_records.append(new_record)

                Session.commit()

            # Make the bulk update for every 'key' and 'site'
            value = redcap.project[key].import_records(push_records)
            redcap.project['CTS'].import_records(ctsrecords)

            # The following lines form the sophisticated email system ;-P. Well we again
            # ask the human to help.

            malformed_draw_count = len(malformed_draw)
            # Get list of results with missing draw dates
            missing_draw, warn_draw, critical_draw = find_missing_draw(
                days_till_expiration, days_till_urgent, key)
            missing_draw_count = len(missing_draw)
            warn_draw_count = len(warn_draw)
            critical_draw_count = len(critical_draw)

            weekday = datetime.date.today().weekday()

            if warn_draw_count != 0 and time_in_range(
                    datetime.datetime.now().time()) and weekday == 5:
                #Special notifications when draw dates are missing for days_till_urgent
                # This notification is sent on Fridays(5)
                report_date = warn_draw
                report_date_count = warn_draw_count
                level = 1
                notify = settings.get('notify.%s.action' % key.lower()).split()
                turbomail.send(
                    turbomail.Message(
                        to=notify,
                        subject=
                        '[RedCAP Sync Update]: Prolonged Missing RedCAP Entries for (%s)'
                        % key,
                        plain=lookup.get_template('email/date.mako').render(
                            **{
                                'timestamp': datetime.datetime.now(),
                                'report_date': report_date,
                                'report_date_count': report_date_count,
                                'level': level,
                                'days_till_urgent': days_till_urgent,
                                'code': key
                            })))

            if critical_draw_count != 0:
                # Very critical draw date events emailed everyday
                # Between 8-9am

                time_now = datetime.datetime.now().time()
                if time_in_range(time_now):

                    log.info(
                        "Some of the draw dates are missing for over %d days" %
                        (int(days_till_urgent) + 2))
                    report_date = critical_draw
                    report_date_count = critical_draw_count
                    level = 2
                    notify = settings.get('notify.%s.action' %
                                          key.lower()).split()
                    turbomail.send(
                        turbomail.Message(
                            to=notify,
                            subject=
                            '[RedCAP Sync]: Action Required in RedCAP Entries for (%s)!'
                            % key,
                            plain=lookup.get_template(
                                'email/date.mako').render(
                                    **{
                                        'timestamp': datetime.datetime.now(),
                                        'report_date': report_date,
                                        'report_date_count': report_date_count,
                                        'level': level,
                                        'days_till_urgent': days_till_urgent,
                                        'code': key
                                    })))

            # Get list of draw dates with missing Red Cross results that are more than 7 days old
            missing_results = find_missing_results(days_till_notify,
                                                   days_till_expiration,
                                                   redcap, key)
            missing_results_count = len(missing_results)

            shouldNotify = False
            # Notify recipients if there is anything to notify about only if its a Monday
            if missing_results_count > 0 or missing_draw_count > 0 and datetime.date.today(
            ).weekday() == 1:
                shouldNotify = True

            if shouldNotify and time_in_range(datetime.datetime.now().time()):
                notify = settings.get('notify.%s.action' % key.lower()).split()

                # Notify appropriate people if notify is set to true
                turbomail.send(
                    turbomail.Message(
                        to=notify,
                        subject=
                        '[RedCAP Sync]: Synchronization Status Check (%s)' %
                        key,
                        plain=lookup.get_template('email/rcap.mako').render(
                            **{
                                'timestamp': datetime.datetime.now(),
                                'results_count': 0,
                                'missing_draw_count': missing_draw_count,
                                'missing_draw': missing_draw,
                                'missing_results_count': missing_results_count,
                                'missing_results': missing_results,
                                'days_till_notify': days_till_notify,
                                'days_till_urgent': days_till_urgent,
                                'code': key,
                                'malformed_draw_count': malformed_draw_count,
                                'malformed_draw': malformed_draw
                            })))

    except:
        turbomail.send(
            turbomail.Message(
                to=settings['notify.error'].split(),
                subject='[The Early Test]:Exception in matching visit dates!',
                plain=traceback.format_exc()))
Пример #14
0
def setup_module():
    engine = create_engine('sqlite://')
    models.Base.metadata.create_all(bind=engine)
    Session.configure(bind=engine)
Пример #15
0
def teardown_module():
    Session.remove()
Пример #16
0
def teardown():
    Session.rollback()
Пример #17
0
 def teardown(self):
     # reset the temporary directory to system default
     shutil.rmtree(self.tempdir)
     turbomail.interface.stop(force=True)
     turbomail.interface.config = {'mail.on': False}
     Session.remove()
Пример #18
0
def main():
    args = cli.parse_args()
    settings = args.settings

    days_till_expiration = int(settings['days.tillexpiration'])
    days_till_notify = int(settings['days.tillnotify'])

    try:
        results = sync_redcap.get_cts_results(settings)
        if not args.dry:
            Session.add_all(results)
            Session.commit()

        else:
            log.info('Dry run, not commiting changes')

        sync_site_codes = settings.get('site.codes').split()
        ucsd_site_codes = settings.get('ucsd.site.codes').split()
        emory_site_codes = settings.get('emory.site.codes').split()
        gwu_site_codes = settings.get('gwu.site.codes').split()

        rcs = json.loads(open(settings['redcap_json'], 'r').read())
        redcap = RCProject(sync_site_codes, rcs)
        # Refresh results
        for site_code in sync_site_codes:
            print 'site code'
            print site_code
            for type_ in models.TYPES:
                #notify = settings.get('notify.%s.%s' % (site_code.lower(), type_.lower()), '').split()
                notify = []
                print type_
                pnt = list(
                    r for r in results
                    if r.check(type_) is True and r.site_code == site_code)
                neg = [
                    r for r in results
                    if r.check(type_) is False and r.site_code == site_code
                ]
                odd = [
                    r for r in results
                    if r.check(type_) is None and r.site_code == site_code
                ]

                if not (pnt):
                    continue
                'clear'
                if type_ == 'dhiv':
                    notify = get_receipients(redcap, 'hiv_pos', site_code,
                                             ucsd_site_codes, emory_site_codes,
                                             gwu_site_codes)
                    t_type = 'HIV'
                elif type_ == 'dhcv':
                    notify = get_receipients(redcap, 'hcv_pos', site_code,
                                             ucsd_site_codes, emory_site_codes,
                                             gwu_site_codes)
                    t_type = 'HCV'
                elif type_ == 'dhbv':
                    notify = get_receipients(redcap, 'hbv_pos', site_code,
                                             ucsd_site_codes, emory_site_codes,
                                             gwu_site_codes)
                    t_type = 'HBV'

                print notify
                if not notify:
                    continue

                turbomail.send(
                    turbomail.Message(
                        to=notify,
                        subject='New %s+ NAT' % t_type,
                        plain=lookup.get_template('email/parse.mako').render(
                            **{
                                'timestamp': datetime.datetime.now(),
                                'type_': t_type,
                                'site_code': site_code,
                                'pnt': pnt,
                                'neg': neg,
                                'odd': odd
                            })))

                log.info('Notified %s mailing lists of results for "%s"' %
                         (site_code, type_))

        for code in sync_site_codes:
            results_count = 0
            shouldNotify = False

            # Get number of site specific results in this upload
            for r in results:
                if r.site_code == code.upper():
                    results_count += 1

            # Get list of results with missing draw dates
            missing_draw = find_missing_draw(days_till_expiration, code)
            missing_draw_count = len(missing_draw)

            # Get list of draw dates with missing Red Cross results that are more than 7 days old
            missing_results = find_missing_results(days_till_notify,
                                                   days_till_expiration,
                                                   redcap, code)
            missing_results_count = len(missing_results)

            # Notify recipients if there is anything to notify about
            if results_count > 0 or missing_results_count > 0 or missing_draw_count > 0:
                shouldNotify = True

            if shouldNotify:
                #notify = settings.get('notify.%s.sync' % code.lower()).split()
                notify = get_receipients(redcap, 'date_missing', code,
                                         ucsd_site_codes, emory_site_codes,
                                         gwu_site_codes)
                # Notify appropriate people about missing draw dates and Red Cross results
                turbomail.send(
                    turbomail.Message(
                        to=notify,
                        subject=
                        '[The Early Test]: Red Cross Synchronize Report (%s)' %
                        code,
                        plain=lookup.get_template('email/sync.mako').render(
                            **{
                                'timestamp': datetime.datetime.now(),
                                'results_count': results_count,
                                'missing_draw_count': missing_draw_count,
                                'missing_draw': missing_draw,
                                'missing_results_count': missing_results_count,
                                'missing_results': missing_results,
                                'days_till_notify': days_till_notify,
                                'code': code
                            })))

                log.info(
                    'Notified mailing lists of %s for missing draw dates' %
                    (code))
                log.info(
                    'Notified mailing lists of %s diagnostic for missing Red Cross results'
                    % (code))

    except:
        # If an unexpected error occurs, let the developers know
        turbomail.send(
            turbomail.Message(
                to=settings['notify.error'].split(),
                subject='[The Early Test]: Parser failed execution',
                plain=traceback.format_exc()))
        raise
Пример #19
0
    def test_FindMissingDraw_MixedResultsMoreAndLessThan6MonthsOldAndAllMissingDrawDates_ReturnListOfValidResults(
            self):
        """
        Should return 3 results out of 5 in a list. These 3 results' test date are less
        than 180 days. All results are missing draw dates.
        """

        missing_draw = []
        current_time = datetime.date.today()
        date1 = current_time - datetime.timedelta(days=30)
        date2 = current_time - datetime.timedelta(days=60)
        date3 = current_time - datetime.timedelta(days=90)
        # Older than 180 days
        date4 = current_time - datetime.timedelta(days=200)
        date5 = current_time - datetime.timedelta(days=300)

        # Add 5 unique results where 5 results are missing their draw date and
        # 3 are less than 6 months old. The other 2 are more than 6 months old.
        site_code = '99X'
        reference_number1 = '11111'
        reference_number2 = '22222'
        reference_number3 = '33333'
        reference_number4 = '44444'
        reference_number5 = '55555'

        Session.add(
            models.Result(site_code=site_code,
                          reference_number=reference_number1,
                          test_date=date1,
                          nat='P',
                          dhiv='N',
                          file='results.txt'))

        Session.add(
            models.Result(site_code=site_code,
                          reference_number=reference_number2,
                          test_date=date2,
                          nat='P',
                          dhiv='N',
                          file='results.txt'))

        Session.add(
            models.Result(site_code=site_code,
                          reference_number=reference_number3,
                          test_date=date3,
                          nat='P',
                          dhiv='N',
                          file='results.txt'))

        Session.add(
            models.Result(site_code=site_code,
                          reference_number=reference_number4,
                          test_date=date4,
                          nat='P',
                          dhiv='N',
                          file='results.txt'))

        Session.add(
            models.Result(site_code=site_code,
                          reference_number=reference_number5,
                          test_date=date5,
                          nat='P',
                          dhiv='N',
                          file='results.txt'))

        transaction.commit()

        missing_draw = scripts.parse.find_missing_draw()

        # Correct number of missing draw date cases?
        assert len(missing_draw) == 3

        # Are they really missing draw dates?
        for x in missing_draw:
            assert not x.draw_date

        results_expiration = current_time - datetime.timedelta(days=180)

        # Are their test dates under 180 days
        for x in missing_draw:
            assert x.test_date > results_expiration
Пример #20
0
def main():
    args = cli.parse_args()
    settings = args.settings

    days_till_expiration = int(settings['days.tillexpiration'])
    days_till_notify = int(settings['days.tillnotify'])

    try:
        log.info('Called on %s' % args.srcfile)		
        results, duplicates = parser.parse(args.srcfile, settings)

        if not args.dry:
          
            if duplicates:
                raise Exception('\n'.join(
                    ['Already exists: %s%s' % (r.site_code, r.reference_number)
                    for r in duplicates]))

            # Archive processed file
            shutil.move(args.srcfile, settings['dir.raw'])
            log.info('Moved encrypted file to %s' % settings['dir.raw'])
	
      
            # Commit all changes now that we've successfully processed the file
            map(lambda r: setattr(r, 'file', os.path.basename(args.srcfile)), results)
            Session.add_all(results)
            Session.commit()

        else:
            log.info('Dry run, not commiting changes')

        
        sync_site_codes = settings.get('site.codes').split()
        rcs = json.loads(open(settings['redcap_json'], 'r').read())
        redcap = RCProject(sync_site_codes, rcs)
        # Refresh results
        for site_code in sync_site_codes:

            for type_ in models.TYPES:
                notify = settings.get('notify.%s.%s' % (site_code.lower(), type_.lower()), '').split()

                if not notify:
                    continue

                pnt = [r for r in results if r.check(type_) is True and r.site_code == site_code]
                neg = [r for r in results if r.check(type_) is False and r.site_code == site_code]
                odd = [r for r in results if r.check(type_) is None and r.site_code == site_code]

                if not (pnt or odd):
                    continue

                turbomail.send(turbomail.Message(
                    to=notify,
                    subject='[The Early Test]: New Records Notification (%s)' % type_,
                    plain=lookup.get_template('email/parse.mako').render(**{
                        'timestamp': datetime.datetime.now(),
                        'type_': type_,
                        'site_code': site_code,
                        'pnt': pnt,
                        'neg': neg,
                        'odd': odd})))

                log.info('Notified %s mailing lists of results for "%s"' % (site_code, type_))

        
        for code in sync_site_codes:
            results_count = 0
            shouldNotify = False

            # Get number of site specific results in this upload
            for r in results:
                if r.site_code == code.upper():
                    results_count += 1

            # Get list of results with missing draw dates
            missing_draw = find_missing_draw(days_till_expiration, code)
            missing_draw_count = len(missing_draw)

            # Get list of draw dates with missing Red Cross results that are more than 7 days old
            missing_results = find_missing_results(days_till_notify, days_till_expiration, redcap, code)
            missing_results_count = len(missing_results)
            
            # Notify recipients if there is anything to notify about
            if results_count > 0 or missing_results_count > 0 or missing_draw_count > 0:
                shouldNotify = True

            if shouldNotify:
                notify = settings.get('notify.%s.sync' % code.lower()).split()
                
                # Notify appropriate people about missing draw dates and Red Cross results
                turbomail.send(turbomail.Message(
                    to=notify,
                    subject='[The Early Test]: Red Cross Synchronize Report (%s)' % code,
                    plain=lookup.get_template('email/sync.mako').render(**{
                        'timestamp':      datetime.datetime.now(),
                        'results_count': results_count,
                        'missing_draw_count':  missing_draw_count,
                        'missing_draw':      missing_draw,
                        'missing_results_count': missing_results_count,
                        'missing_results': missing_results,
                        'days_till_notify': days_till_notify,
                        'code': code})))

                log.info('Notified mailing lists of %s for missing draw dates' % (code))
                log.info('Notified mailing lists of %s diagnostic for missing Red Cross results' % (code))

    except:
        # If an unexpected error occurs, let the developers know
        turbomail.send(turbomail.Message(
            to=settings['notify.error'].split(),
            subject='[The Early Test]: Parser failed execution',
            plain=traceback.format_exc()))
        raise