Ejemplo n.º 1
0
def main():
    filename, file_type, records, size, cars, people, thread, process = parse_arg(
    )
    if file_type == 'csv':
        filename += '.csv'
        if cars:
            csv_generator.generate_csv(CARS_HEADERS,
                                       filename,
                                       records=records,
                                       size=size,
                                       cars=cars,
                                       people=people,
                                       thread=thread,
                                       process=process)
        elif people:
            csv_generator.generate_csv(PEOPLE_HEADERS,
                                       filename,
                                       records=records,
                                       size=size,
                                       cars=cars,
                                       people=people,
                                       thread=thread,
                                       process=process)
    else:
        print('in future')
        sys.exit(2)
Ejemplo n.º 2
0
def policy_rollout(agent, path, timesteps=200):
    for j in range(1):
        robot = SwimmingRobot(a1=0, a2=0, t_interval=1)
        xs = [robot.x]
        ys = [robot.y]
        thetas = [robot.theta]
        a1s = [robot.a1]
        a2s = [robot.a2]
        steps = [0]
        # robot.randomize_state(enforce_opposite_angle_signs=True)
        robot_params = []
        robot_param = [
            robot.x, robot.y, robot.theta,
            float(robot.a1),
            float(robot.a2), robot.a1dot, robot.a2dot
        ]
        robot_params.append(robot_param)
        print('Beginning', j + 1, 'th Policy Rollout')
        try:
            for i in range(timesteps):
                # rollout
                state = robot.state
                print('In', i + 1, 'th iteration the initial state is: ',
                      state)
                old_x = robot.x
                action = agent.choose_action(state)
                print('In', i + 1, 'th iteration the chosen action is: ',
                      action)
                robot.move(action=action)
                new_x = robot.x
                print('In', i + 1, 'th iteration, the robot moved ',
                      new_x - old_x, ' in x direction')

                # add values to lists
                xs.append(robot.x)
                ys.append(robot.y)
                thetas.append(robot.theta)
                a1s.append(robot.a1)
                a2s.append(robot.a2)
                steps.append(i + 1)
                robot_param = [
                    robot.x, robot.y, robot.theta,
                    float(robot.a1),
                    float(robot.a2), robot.a1dot, robot.a2dot
                ]
                robot_params.append(robot_param)

        except ZeroDivisionError as e:
            print(str(e), 'occured at ', j + 1, 'th policy rollout')

        # plotting
        make_rollout_graphs(xs, ys, thetas, a1s, a2s, steps, path=path)
        generate_csv(robot_params, path + "/policy_rollout.csv")
Ejemplo n.º 3
0
def Report(qStrList, name):

    # sendReport(region,subject,sender,recipient,curReportName,bodyText)
    Sender_email = os.environ["SENDEREMAIL"]
    Reciver_email = os.environ["RECIVEREMAIL"]
    Region = os.environ["REGION"]

    attach_file = csv_generator.generate_csv(qStrList)

    file = open("/tmp/%s.csv" % name, "w")
    file.write(str(attach_file))
    file.close()
    #sendReport(region,subject,sender,recipient,curReportName,bodyText)
    response = sendReport(Region, 'ri recommendations', Sender_email,
                          Reciver_email, '%s.csv' % name,
                          'This is a set of RI recommendations for %s' % name)
    return response
def detailed_immunization_register():
    try:
        args = request.json
        log = {}
        log['request'] = args
        station_ids = args['station']
        csv = args['csv'] == "y"
        from_str = custom_functions.obj_to_date(
            args['date_from']).split(" ")[0]
        to_str = custom_functions.obj_to_date(args['date_to'],
                                              extra_day=1).split(" ")[0]
        data = {}
        for station_id in station_ids:
            sub_select_query = "SELECT beneficiary_id,vaccine_details,beneficiary_details FROM kba.immunisation_log WHERE \
            station_id='{}' AND current_date >= '{}' AND current_date <= '{}' allow filtering;"

            sub_logs = cassandra_client.session.execute(
                sub_select_query.format(station_id, from_str, to_str))
            for sub_log in sub_logs:
                if sub_log.beneficiary_id in data:
                    data[sub_log.beneficiary_id]['vaccines_given'].append(
                        sub_log.vaccine_details['vaccine_dose_name'])
                else:
                    if check_filter(args, sub_log):
                        data[sub_log.beneficiary_id] = {}
                        data[sub_log.beneficiary_id][
                            'child_name'] = sub_log.beneficiary_details[
                                'child_name']
                        data[sub_log.beneficiary_id][
                            'father_name'] = sub_log.beneficiary_details[
                                'father_name']
                        data[sub_log.beneficiary_id][
                            'mother_name'] = sub_log.beneficiary_details[
                                'mother_name']
                        data[sub_log.beneficiary_id][
                            'address'] = sub_log.beneficiary_details['address']
                        data[sub_log.beneficiary_id][
                            'sex'] = sub_log.beneficiary_details['sex']
                        if not csv:
                            dat = custom_functions.date_to_string(sub_log.beneficiary_details['date_of_birth'],False) if 'date_of_birth' in sub_log.beneficiary_details else {}
                        else:
                            try:
                                format = custom_functions.date_to_string(
                                    sub_log.
                                    beneficiary_details['date_of_birth'], False
                                ) if 'date_of_birth' in sub_log.beneficiary_details else {
                                    'dd': "0",
                                    'mm': "0",
                                    'yyyy': "0000"
                                }
                                dat = str(format['dd']) + "/" + str(
                                    format['mm']) + "/" + str(format['yyyy'])
                            except:
                                dat = ""
                        data[sub_log.beneficiary_id]['date_of_birth'] = dat
                        data[sub_log.beneficiary_id]['vaccines_given'] = [
                            sub_log.vaccine_details['vaccine_dose_name']
                        ]
        detailed_data = data.values()
        if csv:
            file_name = generate_csv(list(detailed_data))
            record = {
                'status_code': 200,
                'status_msg': "Posted successfully",
                "file_name": file_name
            }
        else:
            list_data = list(detailed_data)
            if list_data:
                record = {'status_code': 200, 'data': list_data}
            else:
                record = {
                    'status_code': 404,
                    "status_msg": "No data found.",
                    'data': list_data
                }
    except Exception as e:
        error_email.send_email({'error': str(e), 'url': request.url})
        record = {'status_code': 500, "status_msg": str(e)}
    log['response'] = record
    ml_detailed_immunization_register.insert_one(log)
    return jsonify(record)
def station_vaccination_analytics():
    args = request.json
    log = {}
    log['request'] = args
    csv = args['csv'] == "y"
    from_str = custom_functions.obj_to_date(args['date_from']).split(" ")[0]
    to_str = custom_functions.obj_to_date(args['date_to'],
                                          extra_day=1).split(" ")[0]
    station_ids = args['station']
    vaccine_name = args['vaccine_name'] if 'vaccine_name' in args else None
    batch_id = args['batch_id'] if 'batch_id' in args else None
    return_obj = {}
    main_obj = []
    csv_query = "AND name='{}' " if vaccine_name else ""
    csv_query = csv_query + ("AND package_id='{}' " if batch_id else "")
    sub_select_query = "SELECT package_id,name,date_rec,beneficiary_no,vaccine_no,station_other FROM kba.batch_immunisation_log WHERE \
         station_id='{}' AND date_rec >= '{}' AND date_rec <= '{}' " + csv_query + "allow filtering;"
    try:
        for station in station_ids:
            if vaccine_name:
                sub_logs = cassandra_client.session.execute(
                    sub_select_query.format(str(station), from_str, to_str,
                                            vaccine_name, batch_id))
            else:
                sub_logs = cassandra_client.session.execute(
                    sub_select_query.format(str(station), from_str, to_str,
                                            batch_id))
            for sub_log in sub_logs:
                datestr = str(sub_log.date_rec)
                if sub_log.package_id in return_obj:
                    if datestr in return_obj[sub_log.package_id]:
                        obj = {}
                        obj['batch_id'] = sub_log.package_id
                        obj['vaccine_name'] = sub_log.name
                        if csv:
                            dat = custom_functions.date_to_string(
                                datestr, False)
                        else:
                            try:
                                format = custom_functions.date_to_string(
                                    datestr, False)
                                dat = str(format['dd']) + "/" + str(
                                    format['mm']) + "/" + str(format['yyyy'])
                            except:
                                dat = ""
                        obj['date_of_administration'] = dat
                        obj['no_of_doses_administered'] = sub_log.vaccine_no
                        obj['no_of_beneficiaries_administered'] = sub_log.beneficiary_no
                        main_obj.append(obj)

                    else:
                        return_obj[sub_log.package_id][datestr] = []
                        obj = {}
                        obj['batch_id'] = sub_log.package_id
                        obj['vaccine_name'] = sub_log.name
                        if csv:
                            dat = custom_functions.date_to_string(
                                datestr, False)
                        else:
                            try:
                                format = custom_functions.date_to_string(
                                    datestr, False)
                                dat = str(format['dd']) + "/" + str(
                                    format['mm']) + "/" + str(format['yyyy'])
                            except:
                                dat = ""
                        obj['date_of_administration'] = dat
                        obj['no_of_doses_administered'] = sub_log.vaccine_no
                        obj['no_of_beneficiaries_administered'] = sub_log.beneficiary_no
                        main_obj.append(obj)
                else:
                    return_obj[sub_log.package_id] = {}
                    return_obj[sub_log.package_id][datestr] = []
                    obj = {}
                    obj['batch_id'] = sub_log.package_id
                    obj['vaccine_name'] = sub_log.name
                    if not csv:
                        dat = custom_functions.date_to_string(datestr, False)
                    else:
                        try:
                            format = custom_functions.date_to_string(
                                datestr, False)
                            dat = str(format['dd']) + "/" + str(
                                format['mm']) + "/" + str(format['yyyy'])
                        except:
                            dat = ""
                    obj['date_of_administration'] = dat
                    obj['no_of_doses_administered'] = sub_log.vaccine_no
                    obj['no_of_beneficiaries_administered'] = sub_log.beneficiary_no
                    main_obj.append(obj)
        if main_obj:
            if csv:
                file_name = generate_csv(main_obj)
                record = {
                    'status_code': 200,
                    'status_msg': "Posted successfully",
                    "file_name": file_name
                }
            else:
                record = {
                    'status_code': 200,
                    'status_msg': "Posted successfully",
                    "data": main_obj
                }
        else:
            record = {
                'status_code': 404,
                'status_msg': "No records found",
                "data": []
            }
    except Exception as e:
        error_email.send_email({'error': str(e), 'url': request.url})
        record = {'status_code': 500, "status_msg": str(e)}
    log['response'] = record
    ml_station_vaccination_analytics.insert_one(log)
    return jsonify(record)
Ejemplo n.º 6
0
def parse_html_to_postgres(input_folder,
                           output_html,
                           merge_folder,
                           output_words,
                           output_equations,
                           db_connect_str,
                           strip_tags,
                           ignored_file_when_link,
                           csv_file,
                           corenlp_fd,
                           store_into_postgres=True):
    """
    Helper function for database ingestion.
    :param input_folder: Location of input folder containing source HTML files.
    :param output_html: Intermediate HTML files which will be consumed by the Fonduer parser.
    :param merge_folder: Location of folder containing the merged HTML files.
    :param output_words: Location of folder containing word coordinate json files.
    :param output_equations: Location of folder containing equation coordinate json files.
    :param db_connect_str: Database connection string.
    :param strip_tags: Tags to be flatten.
    :param ignored_file_when_link: Files to be ignored when linking.
    :param csv_file: Location of the output csv file
    :param corenlp_fd: Location of the CoreNLP java file.
    :param store_into_postgres: Flag for whether to ingest data into Postgres.
    """
    assert os.path.isabs(input_folder)
    assert os.path.isabs(output_html)
    assert os.path.isabs(merge_folder)
    assert os.path.isabs(output_words)
    """
    # 1. group files by file name
    merge.stamp: pagemerger.py
        rm -r -f $(merge_folder)
        mkdir -p $(merge_folder)
        python pagemerger.py --rawfolder $(input_folder) --outputfolder $(merge_folder)
        @touch merge.stamp
    """
    if os.path.exists(merge_folder):
        shutil.rmtree(merge_folder)
    os.makedirs(merge_folder, exist_ok=True)
    pagemerger(input_folder, merge_folder)
    """
    # 2. preprocess the input html and store intermediate json and html in the output folder declared above.
    preprocess.stamp: preprocess.py merge.stamp
        rm -r -f $(output_html)
        rm -r -f $(output_words)
        mkdir -p $(output_html)
        mkdir -p $(output_words)
        @$(foreach file,$(all_inputs),\
        python preprocess.py --input $(merge_folder)$(file) --output_words $(output_words)$(file).json --output_html $(output_html)$(file);)
        @touch preprocess.stamp
    """
    if os.path.exists(output_html):
        shutil.rmtree(output_html)
    if os.path.exists(output_words):
        shutil.rmtree(output_words)
    if os.path.exists(output_equations):
        shutil.rmtree(output_equations)

    os.makedirs(output_html, exist_ok=True)
    os.makedirs(output_words, exist_ok=True)
    os.makedirs(output_equations, exist_ok=True)

    all_inputs = [f for f in os.listdir(merge_folder)]
    for html_file in all_inputs:
        preprocess(
            os.path.join(merge_folder, html_file),
            "%s.json" % (os.path.join(output_words, html_file)),
            os.path.join(output_html, html_file),
            "%s.json" % (os.path.join(output_equations, html_file)),
            "%s.json" % (os.path.join(output_words, 'path_info_' + html_file)),
            strip_tags)

    if store_into_postgres:
        """
        # 3. run the fonduer parser on the generated html file. This will fill in the postgres dabase with everything
        # fonduer can understand except the coordinate information.
        parse.stamp: preprocess.stamp parse.py
        python parse.py --html_location $(output_html) --database $(db_connect_str)
        @touch parse.stamp
        """
        parse(output_html, db_connect_str)
        """
        # 4. run the link file to insert coordinate information into fonduer based on the information from the json output folder (aka. hocr)
        link.stamp: parse.stamp link.py
            python link.py --words_location $(output_words) --database $(db_connect_str)
            @touch link.stamp
        """
        link(output_words, db_connect_str, ignored_file_when_link)

        insert_equation_tuple(db_connect_str, output_equations)

        var_in_text(db_connect_str)

        build_table_X(db_connect_str, corenlp_fd)

        generate_csv(db_connect_str, csv_file)
Ejemplo n.º 7
0
def resolved_ticket(request):
    try:
        # priority = ['low', 'medium', 'high']
        # user_list = User.objects.all()
        # engineer_list = User.objects.filter(groups__name='engineer')
        # status_list = TicketStatus.objects.all()
        status_required = TicketStatus.objects.get(name="resolved").id
        ticket_list = ""

        if request.user.has_perm('ticket_tracker.can_assign_ticket'):
            ticket_list = TicketRegister.objects.filter(
                status_id=status_required).order_by('-created_on')
        elif request.user.has_perm('ticket_tracker.self_assign_ticket'):
            # ticket_list = TicketRegister.objects.filter(Q(status_id=status_required), (
            # Q(assigned_to_id=int(request.user.id)) | Q(resolved_by_id=int(request.user.id)) | Q(
            # created_by=request.user))).order_by('-created_on')
            ticket_list = TicketRegister.objects.filter(
                status_id=status_required).order_by('-created_on')
        elif request.user.has_perm('ticket_tracker.view_ticket_tracker'):
            ticket_list = TicketRegister.objects.filter(
                Q(created_by=request.user),
                Q(status_id=status_required)).order_by('-created_on')

        # ###--->>ajax filters
        if request.method == "POST":
            try:
                # assigned_to_me = request.POST.get('assigned_to_me', '')
                # select_engineer = request.POST.get('select_an_engineer', '')
                # select_user = request.POST.get('select_a_user', '')
                start_date = request.POST.get('start_date', '')
                end_date = request.POST.get('end_date', '')
                download_csv = request.POST.get('download', '')
                # if assigned_to_me != '':
                # ticket_list = TicketRegister.objects.filter(Q(assigned_to_id=int(request.user.id)),
                # Q(status_id=status_required)).order_by(
                # '-created_on')
                # if select_engineer != '':
                # ticket_list = TicketRegister.objects.filter(Q(assigned_to_id=int(select_engineer)),
                # Q(status_id=status_required)).order_by(
                # '-created_on')
                # if select_user != '':
                # if request.user.has_perm('ticket_tracker.can_assign_ticket'):
                # ticket_list = TicketRegister.objects.filter(Q(created_by=select_user),
                # Q(status_id=status_required)).order_by(
                # '-created_on')
                # elif request.user.has_perm('ticket_tracker.self_assign_ticket'):
                # ticket_list = TicketRegister.objects.filter(Q(created_by=select_user),
                # Q(status_id=status_required),
                # Q(assigned_to_id=int(request.user.id))).order_by(
                # '-created_on')
                if start_date != '' and end_date != '':
                    if request.user.has_perm(
                            'ticket_tracker.can_assign_ticket'):
                        ticket_list = TicketRegister.objects.filter(
                            status_id=status_required,
                            created_on__range=(
                                start_date, end_date)).order_by('-created_on')
                    elif request.user.has_perm(
                            'ticket_tracker.self_assign_ticket'):
                        ticket_list = TicketRegister.objects.filter(
                            Q(created_on__range=(start_date, end_date)),
                            Q(status_id=status_required),
                            Q(assigned_to_id=int(request.user.id))).order_by(
                                '-created_on')
                    elif request.user.has_perm(
                            'ticket_tracker.view_ticket_tracker'):
                        ticket_list = TicketRegister.objects.filter(
                            Q(created_on__range=(start_date, end_date)),
                            Q(status_id=status_required),
                            Q(created_by=request.user)).order_by('-created_on')
                if download_csv != '':
                    print 'in'
                    ticket_list = TicketRegister.objects.filter(
                        status_id=status_required)
                    response = generate_csv(ticket_list)
                    return response
            except Exception, e:
                print 'Error on line {}'.format(sys.exc_info()[-1].tb_lineno)
                print e.args
        # ###--->>
        # paginator = Paginator(ticket_list, 10)
        # page_no = request.GET.get('page')
        # try:
        # resolved_issues = paginator.page(page_no)
        # except PageNotAnInteger:
        # resolved_issues = paginator.page(1)
        # except EmptyPage:
        # resolved_issues = paginator.page(paginator.num_pages)
        # ticket_list = resolved_issues
        return render(
            request,
            'resolved_dashboard.html',
            {
                # 'priority': priority,
                # 'user_list': user_list,
                # 'engineer_list': engineer_list,
                'ticket_list': ticket_list,
                # 'status_list': status_list,
            })