def get_old_encounters(driver, bucket_name, today):
    id_source = driver.id_source
    encounters = []
    stored_data = s3_tasks.retrieve_stored_engine_data(bucket_name, id_source, 'unapproved_data')
    unapproved_data = rebuild_event(stored_data)
    unapproved_commsupt = [x for x in unapproved_data if
                           x['Service Type'] == 'CommSupp' and (today - x['Service Date']).days > 90]
    old_commsupt_dates = set(
        x['Service Date'].timestamp() for x in unapproved_commsupt)
    for encounter_date in [datetime.fromtimestamp(x) for x in old_commsupt_dates]:
        encounter_search_data = {
            'clientvisit_id': 1,
            'service_type': 1,
            'non_billable': 3,
            'consumer_name': 1,
            'staff_name': 1,
            'client_int_id': 1,
            'emp_int_id': 1,
            'visittype': 1,
            'orig_rate_amount': 1,
            'timein': 1,
            'data_dict_ids': [3, 4, 6, 70, 74, 83, 86, 87, 218, 641]
        }
        search_args = ('ClientVisit', encounter_search_data, encounter_date, encounter_date)
        same_day_encounters = driver.process_advanced_search(*search_args)
        encounters.extend(same_day_encounters)
    return encounters
Ejemplo n.º 2
0
def handler(event, context):
    event = rebuild_event(event)
    logging.info(
        f'received a call to push an object to persistence: {event}/{context}')
    config_variables = [
        'INDEX_TABLE_NAME', 'GRAPH_DB_ENDPOINT', 'GRAPH_DB_READER_ENDPOINT',
        'LEECH_BUCKET', 'SENSITIVES_TABLE_NAME'
    ]
    _load_config(config_variables)
    work_queue = Queue()
    results = deque()
    push_type = event['push_type']
    leech_results = event['aio']
    push_kwargs = event.get('push_kwargs', {})
    workers = []
    num_workers = event.get('num_workers', 5)
    for _ in range(num_workers):
        worker = Thread(target=_run_handler, args=(work_queue, results))
        worker.start()
        workers.append(worker)
    for entry in leech_results:
        work_queue.put({
            'leech_result': entry,
            'push_type': push_type,
            'push_kwargs': push_kwargs
        })
    for _ in workers:
        work_queue.put(None)
    for worker in workers:
        worker.join()
    return {'push_type': push_type, 'results': [x for x in results]}
def fire_hose_documentation(**kwargs):
    extracted_data = kwargs['extracted_data']
    stream_name = os.environ['FIRE_HOSE_NAME']
    if isinstance(extracted_data, str):
        extracted_data = rapidjson.loads(extracted_data)
    extracted_data = rebuild_event(extracted_data)
    _send_to_fire_hose(stream_name, [extracted_data])
Ejemplo n.º 4
0
def _generate_base_report(id_source, bucket_name):
    report_data = retrieve_stored_engine_data(bucket_name, id_source,
                                              'built_reports')
    report_data = rebuild_event(report_data)
    return {
        x: ReportData.from_stored_data(x, y)
        for x, y in report_data.items()
    }
Ejemplo n.º 5
0
def handler(event, context):
    logging.info(f'started a call for a leech task: {event}/{context}')
    _load_config(ENVIRON_VARIABLES)
    event = rebuild_event(event)
    task_name = event['task_name']
    task_kwargs = event['task_kwargs']
    task_fn = getattr(tasks, task_name)
    results = task_fn(**task_kwargs)
    logging.info(f'completed a call for a leech task: {event}/{results}')
    return results
Ejemplo n.º 6
0
def handle_direct_invocation(event):
    path = event['path']
    path = path.replace('/soot', '')
    path_pieces = path.split('/')
    root = path_pieces[1]
    fn = getattr(task_defs, root)
    if fn is None:
        raise UnknownInvocationException(root)
    payload = event.get('payload', {})
    rebuilt_payload = rebuild_event(payload)
    results = fn(**rebuilt_payload)
    return results
def parse_batch_encounters(event, context):
    _load_config(ENVIRON_VARIABLES)
    migration_table_name = os.environ['PROGRESS_TABLE_NAME']
    if 'Records' in event:
        return _process_queued_parse_task(event, context)
    event = rebuild_event(event)
    parsed_events = _parse_batch_event(event)
    _fire_hose_parsed_events(parsed_events)
    for parsed_event in parsed_events:
        _mark_parsing_complete(parsed_event, migration_table_name)
    return ajson.dumps(
        StoredData.from_object(uuid.uuid4(), parsed_events, full_unpack=True))
Ejemplo n.º 8
0
def es_documentation(encounter,
                     documentation_text,
                     es_driver: ElasticDriver = None):
    if not es_driver:
        es_driver = _build_es_driver()
    documentation_text = rebuild_event(rapidjson.loads(documentation_text))
    logging.debug(
        f'after rebuilding, documentation_text is {documentation_text}')
    encounter_properties = encounter['vertex_properties']['local_properties']
    patient_id_value = _find_encounter_property('patient_id',
                                                encounter_properties)
    provider_id_value = _find_encounter_property('provider_id',
                                                 encounter_properties)
    identifier = encounter['identifier']['property_value']
    logging.debug(
        f'going to resolve the provider and patient internal_id values')
    provider_internal_id, patient_internal_id = _resolve_internal_ids(
        identifier, provider_id_value, patient_id_value)
    logging.debug(
        f'resolved values are provider: {provider_internal_id}, patient: {patient_internal_id}'
    )
    entry_kwargs = {
        'encounter_internal_id':
        encounter['internal_id'],
        'encounter_type':
        _find_encounter_property('encounter_type', encounter_properties),
        'id_source':
        _find_encounter_property('id_source', encounter_properties),
        'documentation_text':
        documentation_text['extracted_data']['source']['documentation_text'],
        'provider_internal_id':
        provider_internal_id,
        'patient_internal_id':
        patient_internal_id,
        'patient_id_value':
        patient_id_value,
        'provider_id_value':
        provider_id_value,
        'encounter_id_value':
        int(encounter['id_value']['property_value'])
    }
    text_entry = DocumentationTextEntry(**entry_kwargs)
    logging.debug(
        f'going to push the created documentation entry: {entry_kwargs} to the elastic search cluster'
    )
    index_name = 'documentation'
    results = es_driver.index_document(index_name, '_doc',
                                       encounter['internal_id'],
                                       text_entry.for_insertion)
    logging.debug(
        f'successfully pushed the documentation to the elastic search cluster')
Ejemplo n.º 9
0
def builder_handler(event, context):
    logging.info(f'received a call to the report_handler: {event}/{context}')
    id_source = event['id_source']
    daily_report = {}
    bucket_name = os.environ['LEECH_BUCKET']
    client_data_table_name = os.environ['CLIENT_DATA_TABLE_NAME']
    daily_data = s3_tasks.retrieve_stored_engine_data(bucket_name, id_source,
                                                      'daily_data')
    daily_data = rebuild_event(daily_data)
    client_data = daily_data['client_data']
    old_encounters = s3_tasks.retrieve_stored_engine_data(
        bucket_name, id_source, 'old_encounters')
    old_encounters = rebuild_event(old_encounters)
    teams = _build_teams(id_source, daily_data)
    caseloads = _build_caseloads(teams, daily_data)
    encounters = _build_encounters(daily_data)
    unapproved = _build_unapproved_data(daily_data)
    tx_plans = _build_tx_plan_data(daily_data)
    diagnostics = _build_da_data(daily_data)
    productivity = _build_productivity_report(caseloads, encounters,
                                              unapproved)
    tx_report = building_tasks.build_expiration_report(caseloads, tx_plans,
                                                       client_data, 180)
    da_report = building_tasks.build_expiration_report(caseloads, diagnostics,
                                                       client_data, 365)
    thirty_sixty_ninety = _build_30_60_90_report(caseloads, encounters)
    unassigned_report = building_tasks.build_unassigned_report(caseloads)
    audit_results = _build_audit_results(id_source, teams, caseloads,
                                         daily_data, old_encounters)
    built_report = {
        'tx_report': tx_report,
        'da_report': da_report,
        '30_60_90': thirty_sixty_ninety,
        'unassigned': unassigned_report,
        'audit': audit_results,
        'productivity': productivity
    }
    _store_final_product(bucket_name, id_source, built_report)
Ejemplo n.º 10
0
def handler(event, context):
    event = rebuild_event(event)
    logging.info(
        f'received a call to run a query_s3_csv command: event/context: {event}/{context}'
    )
    function_payload = {
        'bucket_name': event['bucket_name'],
        'file_key': event['file_key'],
        'expression': event['expression']
    }
    results = query_s3_csv(**function_payload)
    logging.info(
        f'completed a call to run a query_s3_csv command: results: {results}')
    return ajson.dumps(results)
def finalize_documentation(identifier, id_value, start_time, parser_results):
    parser_results = rebuild_event(rapidjson.loads(parser_results))
    session = boto3.session.Session()
    table = session.resource('dynamodb').Table(os.environ['PROGRESS_TABLE_NAME'])
    table.update_item(
        Key={'identifier': identifier, 'id_value': int(id_value)},
        UpdateExpression='SET post_process = :p',
        ExpressionAttributeValues={
            ':p': {
                'completed_at': start_time,
                'stage_results': parser_results
            }
        }
    )
Ejemplo n.º 12
0
def handler(event, context):
    logging.info(f'received a call to run a parser: {event}/{context}')
    variable_names = [
        'GRAPH_GQL_ENDPOINT', 'FIRE_HOSE_NAME', 'ALGERNON_BUCKET_NAME',
        'RDS_SECRET_ARN', 'RDS_CLUSTER_ARN', 'RDS_DB_NAME', 'INDEX_TABLE_NAME',
        'PROGRESS_TABLE_NAME', 'ELASTIC_HOST'
    ]
    event = rebuild_event(event)
    _load_config(variable_names)
    parse_type = event['parse_type']
    parse_kwargs = event['parse_kwargs']
    operation = getattr(tasks, f'{parse_type}_documentation')
    results = operation(**parse_kwargs)
    logging.info(f'completed a call to the parser: {event}/{results}')
    return results
 def test_put_documentation(self, test_documentation):
     os.environ[
         'ELASTIC_HOST_NAME'] = 'vpc-algernon-test-ankmhqkcdnx2izwfkwys67wmiq.us-east-1.es.amazonaws.com'
     os.environ['RDS_DB_NAME'] = 'algernon'
     os.environ['ALGERNON_BUCKET_NAME'] = 'algernonsolutions-leech-dev'
     os.environ[
         'GRAPH_GQL_ENDPOINT'] = 'jlgmowxwofe33pdekndakyzx4i.appsync-api.us-east-1.amazonaws.com'
     os.environ['INDEX_TABLE_NAME'] = 'Indexes'
     documentation_text = rapidjson.loads(
         test_documentation['parse_kwargs']['documentation_text'])
     documentation_text = rebuild_event(documentation_text)
     encounter = test_documentation['parse_kwargs']['encounter']
     results = es_documentation(encounter,
                                rapidjson.dumps(documentation_text))
     assert results is None
def extract_credible_object_h(event, context):
    logging.info(
        f'started a call for a extract_credible_object task, event: {event}, context: {context}'
    )
    _load_config(ENVIRON_VARIABLES)
    event = rebuild_event(event['payload'])
    extracted_data = event['extracted_data']
    id_source = event['id_source']
    driver = tasks.build_driver(id_source)
    results = tasks.get_credible_object(event['object_type'],
                                        event['id_value'], extracted_data,
                                        driver)
    logging.info(
        f'completed a call for extract_credible_object task, event: {event}, results: {results}'
    )
    return _generate_stored_results(results)
def query_object_range_h(event, context):
    logging.info(
        f'started a call for a query_object_range task, event: {event}, context: {context}'
    )
    _load_config(ENVIRON_VARIABLES)
    event = rebuild_event(event['payload'])
    id_source = event['id_source']
    driver = tasks.build_driver(id_source)
    max_entries = event.get('max_entries', 1000)
    results = tasks.get_credible_object_range(event['object_type'],
                                              event['local_max'], max_entries,
                                              driver)
    logging.info(
        f'completed a call for query_object_range task, event: {event}, results: {results}'
    )
    id_values = [str(x['id_value']) for x in results]
    return {
        'results': _generate_stored_results(results),
        'iterator': {
            'count': len(results)
        },
        'id_values': id_values
    }
 def test_parse_batch(self, parse_batch_event, mock_context):
     event = rebuild_event(parse_batch_event)
     results = parse_batch_encounters(event, mock_context)
     assert results
Ejemplo n.º 17
0
 def test_aio(self, aio_event):
     os.environ['INDEX_TABLE_NAME'] = 'Indexes'
     aio_event = rebuild_event(aio_event)
     results = leech(**aio_event)
     assert results