Пример #1
0
def export(history_id, status_id):
    history = UploadHistoryModel.find_by_id(history_id)
    if history.user.id != current_user.id and current_user.has_role('Vendor'):
        return render_template('errors/404.html'), 404

    # 1 - Validated
    # 2 - Validation-err
    # 3 - Unvalidated
    status = ""

    if status_id.startswith('1'):
        status = 'validated'
    elif status_id.startswith('2'):
        status = 'validation-error'
    elif status_id.startswith('3'):
        status = 'unvalidated'

    file = s3.get_object(Bucket=current_app.config['S3_BUCKET'],
                         Key='{}/{}.csv'.format(status, history_id))

    return Response(
        file['Body'].read(),
        mimetype='application/csv',
        headers={
            "Content-Disposition":
            "attachment;filename=export_raw_data_{}.csv".format(history_id)
        })
Пример #2
0
def result():
    id = request.args.get('id', type=int)
    history = UploadHistoryModel.find_by_id(id)
    if history.user.id != current_user.id and current_user.has_role('Vendor'):
        return render_template('errors/404.html'), 404
    # stdout = ""
    # stderr = ""
    # base_folder = current_app.config['UPLOAD_FOLDER']
    # folder = "{}/{}_vendor/{}/".format(base_folder, current_user.id, id)
    # if not os.path.exists(os.path.realpath(os.path.dirname(folder))):
    #     folder = "{}/{}_{}/{}/".format(base_folder, current_user.id, current_user.short_name, id)
    # file_dir = os.path.realpath(os.path.dirname(folder))
    # print(file_dir)
    # print(os.path.join(file_dir, "stdout"))
    # if os.path.isfile(os.path.join(file_dir, "stdout")):
    #     with open(os.path.join(file_dir, "stdout"), 'r') as file1:
    #         stdout = file1.read()
    #         file1.close()
    #     with open(os.path.join(file_dir, "stderr"), 'r') as file2:
    #         stderr = file2.read()
    #         stderr = stderr.replace("%", "")
    #         stderr = stderr.replace('\n', "<br/>")
    #         file2.close()

    return render_template('result.html', title='Job Result', history=history)
Пример #3
0
def get_histories():
    page = request.args.get('page', 1, type=int)
    per_page = min(request.args.get('per_page', 20, type=int), 100)
    data = UploadHistoryModel.to_all_collection_dict(
        UploadHistoryModel.query.filter_by(user_id=current_user.id).order_by(
            UploadHistoryModel.id.desc()), page, per_page, 'ID')
    return jsonify(data)
Пример #4
0
def is_duplicate_upload(form):
    now = time.time()
    past_uploads = UploadHistoryModel.get_all_user_id(current_user.id)
    upload_file = form.file.data
    for upload in past_uploads:
        if upload_file.filename == upload.file_name:
            return upload
        else:
            return None
Пример #5
0
 def check_email_notify(self):
     try:
         from app.email import notify_job_result_to_user
         from app.data.models.history import UploadHistoryModel
         if self.status_type == 4:
             history = UploadHistoryModel.find_by_id(self.history_id)
             if history.user.company.job_notify_email:
                 notify_job_result_to_user(history)
     except:
         pass
Пример #6
0
def validate(file, form):
    if file and allowed_file(file.filename):
        pass
        # if file.mimetype.startswith('text/plain'):
        #     formats = FileFormatModel.find_all()
        #     line_number = 0
        #     lines = file.readlines()
        #     if len(lines) == 1:
        #         lines = lines[0].split(b'\r')
        #     for line in lines:
        #         line_number += 1
        #         if line_number > 100:
        #             break
        #         try:
        #             cols = line.decode('windows-1252').strip().split('\t')
        #             if len(cols) >= len(formats):
        #                 for idx, input_format in enumerate(formats):
        #                     obj = str
        #                     if input_format.col_type.lower().startswith("int"):
        #                         obj = int
        #                     elif input_format.col_type.lower().startswith("float"):
        #                         obj = float
        #                     if not isinstance(cols[idx], obj):
        #                         return {'message': "Type error on the line #{}".format(line_number)}, 400
        #             else:
        #                 return {'message': "Columns must be at least {}".format(len(formats))}, 400
        #         except:
        #             return {'message': "Type error on the line #{}".format(line_number)}, 400
    else:
        return {"message": "Invalid file format!"}, 400

    try:
        file.seek(0, os.SEEK_END)
        file_length = file.tell()
        file_size = size(file_length, system=alternative)
        history = UploadHistoryModel(current_user.id,
                                     secure_filename(file.filename), file_size)
        history.type = form.type.data
        history.purchasability = form.purchasability.data
        history.natural_products = form.natural_products.data
        history.save_to_db()
        result = save_file(file, history.file_name, False, history.id)
        file_info = "File Uploaded! File Size:{}. ".format(file_size)
        if result is None:
            return {"message": file_info}, 200
        elif result[1] == 200:
            return {"message": file_info + result[0]["message"]}, 200
        else:
            return result
    except:
        print(sys.exc_info())
        return {"message": "An error occured inserting the file."}, 500

    return {"message": "File Uploaded! File Size:{}".format(file_size)}, 200
Пример #7
0
def welcome():
    user = UserModel.find_by_email(current_user.email)
    if current_user.has_role("Admin"):
        return redirect(url_for('admin_views.upload_report'))
    else:
        user_upload_count = get_user_job_count()
        latest_history = UploadHistoryModel.get_last_by_user_id(
            user_id=user.id)
        if latest_history:
            catalog_type = CATALOG_TYPE.get(latest_history.catalog_type)
            status = JOB_STATUS.get(latest_history.status_id)
            return render_template('welcome.html',
                                   user=user,
                                   title='Welcome',
                                   latest_history=latest_history,
                                   catalog_type=catalog_type,
                                   status=status,
                                   user_upload_count=user_upload_count)
        else:
            return render_template('welcome.html', user=user, title='Welcome')
Пример #8
0
def get_job_status_count():
   status_count = {}
   this_month_histories = UploadHistoryModel.get_this_month_upload()
   job_count = 0
   attention = failed = finished = running = 0
   for history in this_month_histories:
      job_count +=1
      if history.status_id in (3, 6, 9):
         failed += 1
         status_count.update({'Failed': failed})
      elif history.status_id == 11:
         finished += 1
         status_count.update({'Finished' : finished})
      elif history.status_id == 15:
         attention += 1
         status_count.update({'Attention' : attention})
      else:
         running += 1
         print(history)
         status_count.update({'Running' : running})

   return status_count, job_count
Пример #9
0
def last_result():
    history = UploadHistoryModel.get_last_by_user_id(current_user.id)
    return render_template('result.html', title='Job Result', history=history)
Пример #10
0
def validate(file, form):
    if file and allowed_file(file.filename):
        pass
        # if file.mimetype.startswith('text/plain'):
        #     formats = FileFormatModel.find_all()
        #     line_number = 0
        #     lines = file.readlines()
        #     if len(lines) == 1:
        #         lines = lines[0].split(b'\r')
        #     for line in lines:
        #         line_number += 1
        #         if line_number > 100:
        #             break
        #         try:
        #             cols = line.decode('windows-1252').strip().split('\t')
        #             if len(cols) >= len(formats):
        #                 for idx, input_format in enumerate(formats):
        #                     obj = str
        #                     if input_format.col_type.lower().startswith("int"):
        #                         obj = int
        #                     elif input_format.col_type.lower().startswith("float"):
        #                         obj = float
        #                     if not isinstance(cols[idx], obj):
        #                         return {'message': "Type error on the line #{}".format(line_number)}, 400
        #             else:
        #                 return {'message': "Columns must be at least {}".format(len(formats))}, 400
        #         except:
        #             return {'message': "Type error on the line #{}".format(line_number)}, 400
    else:
        return {"message": "Invalid file format!"}, 400

    try:
        duplicate_upload = is_duplicate_upload(form)
        if duplicate_upload is None:
            file.seek(0, os.SEEK_END)
            file_length = file.tell()
            file_size = size(file_length, system=alternative)
            history = UploadHistoryModel(current_user.id,
                                         secure_filename(file.filename),
                                         file_size)
            history.catalog_type = form.catalog_type.data
            if form.catalog_type.data == 'bio' or form.catalog_type.data == 'np':
                history.availability = 'stock'
            else:
                history.availability = form.availability.data
            history.upload_type = form.upload_type.data
            history.save_to_db()
            if current_user.has_role['Admin']:
                print("Saving additional information specified by admin ... ")
                info_dict = {}
                short_name = form.short_name.data
                if form.price_fiel.data:
                    if form.availability.data == 'demand':
                        short_name = short_name.split('-')[0]
                        econ = short_name + "e-v"
                        std = short_name + "-v"
                        prem = short_name + "p-v"
                    else:
                        econ = short_name + "e"
                        std = short_name
                        prem = short_name + "p"
                    shortname_list = [econ, std, prem]
                    info_dict.update({'short_name': shortname_list})
                else:
                    info_dict.update({'short_name': short_name})
                info_dict.update({''})

            result = save_file(file, history, history.file_name, False,
                               history.id)
            file_info = "File Uploaded! File Size:{}. ".format(file_size)
            if result is None:
                history.delete_from_db()
                return {"message": file_info}, 200
            elif result[1] == 200:
                return {"message": file_info + result[0]["message"]}, 200
            else:
                return result
        else:
            return {
                "message":
                "File {} had been uploaded before on {}".format(
                    duplicate_upload.file_name,
                    duplicate_upload.date_uploaded.replace(
                        tzinfo=timezone.utc).astimezone(
                            tz=None).strftime("%B %d %Y at %I:%M %p"))
            }, 500

    except:
        print(sys.exc_info())
        return {"message": "An error occured inserting the file."}, 500

    return {"message": "File Uploaded! File Size:{}".format(file_size)}, 200
Пример #11
0
def last_result():
    history = UploadHistoryModel.get_last_by_user_id(current_user.id)
    last_updated = utc_to_local(history.last_updated)
    status = StatusModel.query.filter_by(status_id=history.status_id).first()
    statuses_dict = StatusModel.to_dict()
    return render_template('result.html', title='Job Result', history=history, status=status.status, last_updated=last_updated, statuses_dict=statuses_dict, status_id = status.status_id)
Пример #12
0
def excel_validation(request):
    start_time_whole = time.time()
    warning_msg = []
    error_msg = []

    start_time_readsql = time.time()
    mandatory_fields = [
        mand.field_name.lower() for mand in FieldModel.find_by_mandatory(True)
    ]
    mandatory_field_ids = []
    optional_fields = [
        mand.field_name.lower() for mand in FieldModel.find_by_mandatory(False)
    ]
    validation_row_limit = int(current_app.config['FILE_VALIDATION_LIMIT'])
    end_readsql = time.time()
    elapsed_readsql = end_readsql - start_time_readsql
    print("read SQL spent {} seconds".format(elapsed_readsql))

    start_time_readasarray = time.time()
    dict_value = request.get_array(field_name='file')
    end_readasarray = time.time()
    elapsed_readasarray = end_readasarray - start_time_readasarray
    print("Read as ARRAY  spent {} seconds".format(elapsed_readasarray))
    if len(dict_value) <= 1:
        return {"message": "No data error!"}, 400
    headers = [h.lower() for h in dict_value[0]]
    print("headers")
    print(headers)
    duplicated_fields = set([x for x in headers if headers.count(x) > 1])
    if len(duplicated_fields) > 0:
        error_msg.append([
            0, "Field duplication error: {} \n".format(list(duplicated_fields))
        ])

    if set(mandatory_fields).issubset(set(headers)):
        for m_field in mandatory_fields:
            mandatory_field_ids.append(headers.index(m_field))
        for index, item in enumerate(dict_value[1:validation_row_limit]):
            for m_field_id in mandatory_field_ids:
                if not item[m_field_id]:
                    error_msg.append([
                        "Line {}: ".format(index + 1),
                        "Mandatory field [{}] has no value".format(
                            headers[m_field_id])
                    ])
    else:
        error_msg.append([
            "", "Mandatory field missing {}".format(
                set(mandatory_fields) - set(headers))
        ])

    file = request.files['file']
    file.seek(0, os.SEEK_END)
    file_length = file.tell()
    file_size = size(file_length, system=alternative)
    history = UploadHistoryModel(current_user.id,
                                 secure_filename(file.filename), file_size)
    # No need to add miliseconds for this file name because it was saved to s3 with history id
    history.file_name = secure_filename(file.filename)
    # history.data_array = str(request.get_array(field_name='file'))
    history.save_to_db()

    decimal_fields = FieldDecimalModel.find_all()
    for dec_field in decimal_fields:
        # it skips when mandatory field is missing
        try:
            field_index = headers.index(dec_field.field.field_name.lower())
        except:
            break

        if field_index:
            for row, item_list in enumerate(
                    dict_value[1:validation_row_limit]):
                if isinstance(item_list[field_index], numbers.Real):
                    if float(item_list[field_index]) < dec_field.min_val:
                        error_msg.append([
                            "Line {}: ".format(row + 1),
                            "[{}] field value must be "
                            "greater than {}".format(headers[field_index],
                                                     dec_field.min_val)
                        ])
                        # error_msg += "Line{}: [{}] field value must be greater " \
                        #              "than {} \n".format(row + 1, headers[ field_index], dec_field.min_val)
                    if float(item_list[field_index]) > dec_field.max_val:
                        warning_msg.append([
                            "Line {}: ".format(row + 1),
                            "[{}] field value is greater "
                            "than max value: {}".format(
                                headers[field_index], dec_field.max_val)
                        ])
                        # warning_msg += "Line{}: [{}] field value was greater " \
                        #              "than max value: {}.\n".format(row + 1, headers[field_index], dec_field.max_val)
                    dict_value[row + 1][field_index] = "{0:.2f}".format(
                        item_list[field_index])
                else:
                    error_msg.append([
                        "Line {}: ".format(row + 1),
                        "[{}] field has invalid data".format(
                            headers[field_index])
                    ])
                    # error_msg += "Line{}: [{}] field has invalid data \n".format(row + 1, headers[field_index])

    string_fields = FieldAllowedValueModel.find_all()
    for str_field in string_fields:
        # it skips when mandatory field is missing
        try:
            field_index = headers.index(str_field.field.field_name.lower())
        except:
            break

        if field_index:
            for row, item_list in enumerate(
                    dict_value[1:validation_row_limit]):
                try:
                    if str(item_list[field_index]).lower() not in \
                            [str(x.strip().lower()) for x in str_field.allowed_values.split(',')]:
                        error_msg.append([
                            "Line {}: ".format(row + 1),
                            "[{}] field value is not allowed".format(
                                headers[field_index])
                        ])
                        # error_msg += "Line{}: [{}] field value is not allowed".format(row + 1, headers[field_index])
                except:
                    error_msg.append([
                        "Line {}: ".format(row + 1),
                        "[{}] field allowed values has an error. "
                        "Please contact admin to fix this issue.".format(
                            headers[field_index])
                    ])
                    # error_msg += "Line{}: [{}] field allowed values has an error. " \
                    #              "Please contact admin to fix this issue.".format(row + 1, headers[field_index])

    # error_msg_set = set(["{} {}".format(x[0], x[1]) for x in error_msg if error_msg.count(x) == 1])
    # warning_msg_set = set(["{} {}".format(x[0], x[1]) for x in warning_msg if warning_msg.count(x) == 1])

    error_msg_set = set([
        "{} {}".format(min([y[0] for y in error_msg if y[1] == x[1]]), x[1])
        for x in error_msg if len([y for y in error_msg if y[1] == x[1]]) == 1
    ])
    warning_msg_set = set([
        "{} {}".format(min([y[0] for y in warning_msg if y[1] == x[1]]), x[1])
        for x in warning_msg
        if len([y for y in warning_msg if y[1] == x[1]]) == 1
    ])

    error_msg_set.update(
        set([
            "{} {}  (same errors occurred {} other lines)".format(
                [y[0] for y in error_msg if y[1] == x[1]][0], x[1],
                len([y for y in error_msg if y[1] == x[1]])) for x in error_msg
            if len([y for y in error_msg if y[1] == x[1]]) > 1
        ]))
    warning_msg_set.update(
        set([
            "{} {}  (same errors occurred {} other lines)".format(
                [y[0] for y in warning_msg if y[1] == x[1]][0], x[1],
                len([y for y in warning_msg if y[1] == x[1]]))
            for x in warning_msg
            if len([y for y in warning_msg if y[1] == x[1]]) > 1
        ]))

    # catalog_objs = []
    # catalog_dict = []
    str_data = ""
    # print(mandatory_fields)
    for item_list in dict_value:
        data_dict = {}
        for index, value in enumerate(item_list):
            if headers[index] in mandatory_fields:
                data_dict[mandatory_fields.index(headers[index])] = value
                # catalog_objs.append(CatalogModel(headers[index], 'mandatory', value, history.id))
                # catalog_dict.append(
                #     dict(field_name=headers[index], type='mandatory', value=value, history_id=history.id))
            if headers[index] in optional_fields:
                # used mandatory_fields.count()+index) in order to place optional field after mandatory fields
                # print(len(mandatory_fields)+index)
                data_dict[len(mandatory_fields) + index] = value
            # catalog_objs.append(CatalogModel(headers[index], 'optional', value, history.id))
            # catalog_dict.append(
            #     dict(field_name=headers[index], type='optional', value=value, history_id=history.id))

        str_line = ','.join(
            str(data_dict[key]) for key in sorted(data_dict.keys()))
        str_data = str_data + str_line + '\n'

    s3_dir = "validated"
    history.status = 1
    if error_msg:
        s3_dir = "validation-error"
        history.status = 2

    # Unvalidated
    # s3_dir = "unvalidated"
    # history.status = 3
    history.save_to_db()

    start_time_s3 = time.time()
    file.stream.seek(0)
    # s3_result = upload_file_to_s3(file, history.id, s3_dir)
    upload_data_to_s3(str_data, history.id, s3_dir)
    end_s3 = time.time()
    elapsed_s3 = end_s3 - start_time_s3
    print("S3 upload spent {} seconds".format(elapsed_s3))

    if warning_msg:
        job_log = JobLogModel()
        job_log.status = '<br>'.join(str(s) for s in warning_msg_set)
        job_log.status_type = 2
        job_log.history_id = history.id
        job_log.save_to_db()

    if error_msg:
        job_log = JobLogModel()
        job_log.status = '<br>'.join(str(s) for s in error_msg_set)
        job_log.status_type = 3
        job_log.history_id = history.id
        job_log.save_to_db()
        job_log = JobLogModel()
        job_log.status = "Finished"
        job_log.status_type = 4
        job_log.history_id = history.id
        job_log.save_to_db()
        return {"message": '<br>'.join(str(s) for s in error_msg_set)}, 400

    job_log = JobLogModel()
    job_log.status = "Finished"
    job_log.status_type = 4
    job_log.history_id = history.id
    job_log.save_to_db()

    end_whole = time.time()
    elapsed_whole = end_whole - start_time_whole
    print("Whole process spent {} seconds without file uploading".format(
        elapsed_whole))
    return {"message": "Your excel file has been submitted!"}, 200