def upload_file():
    time_start = time.time()
    # grab and verify the file
    if 'file' not in request.files:
        return json_error_response('No file part')
    uploaded_file = request.files['file']
    if uploaded_file.filename == '':
        return json_error_response('No selected file')
    if not (uploaded_file and allowed_file(uploaded_file.filename)):
        return json_error_response('Invalid file')
    filepath = os.path.join(app.config['UPLOAD_FOLDER'],
                            secure_filename(uploaded_file.filename))
    # have to save b/c otherwise we can't locate the file path (security restriction)... can delete afterwards
    uploaded_file.save(filepath)
    time_file_saved = time.time()
    # parse all the source data out of the file
    sources_to_update, sources_to_create = _parse_sources_from_csv_upload(
        filepath)
    all_results = []
    all_errors = []
    if len(sources_to_create) > 300:
        return jsonify({
            'status':
            'Error',
            'message':
            'Too many sources to upload. The limit is 300.'
        })
    else:
        audit = []
        if len(sources_to_create) > 0:
            audit_results, successful, errors = _create_or_update_sources(
                sources_to_create, True)
            all_results += successful
            audit += audit_results
            all_errors += errors
        if len(sources_to_update) > 0:
            audit_results, successful, errors = _create_or_update_sources(
                sources_to_update, False)
            all_results += successful
            audit += audit_results
            all_errors += errors
        if settings.has_option('smtp', 'enabled'):
            mail_enabled = settings.get('smtp', 'enabled')
            if mail_enabled is '1':
                _email_batch_source_update_results(audit)
        for media in all_results:
            if 'media_id' in media:
                media['media_id'] = int(
                    media['media_id']
                )  # make sure they are ints so no-dupes logic works on front end
        time_end = time.time()
        logger.debug("upload_file: {}".format(time_end - time_start))
        logger.debug("  save file: {}".format(time_file_saved - time_start))
        logger.debug("  processing: {}".format(time_end - time_file_saved))
        return jsonify({'results': all_results})
def upload_file():
    time_start = time.time()
    # grab and verify the file
    if 'file' not in request.files:
        return json_error_response('No file part')
    uploaded_file = request.files['file']
    if uploaded_file.filename == '':
        return json_error_response('No selected file')
    if not(uploaded_file and allowed_file(uploaded_file.filename)):
        return json_error_response('Invalid file')
    filepath = os.path.join(app.config['UPLOAD_FOLDER'], secure_filename(uploaded_file.filename))
    # have to save b/c otherwise we can't locate the file path (security restriction)... can delete afterwards
    uploaded_file.save(filepath)
    time_file_saved = time.time()
    # parse all the source data out of the file
    try: 
        sources_to_update, sources_to_create = _parse_sources_from_csv_upload(filepath)
    except Exception as e:
        logger.error("Couldn't process a CSV row: " + str(e))
        return jsonify({'status': 'Error', 'message': str(e)})
    
    all_results = []
    all_errors = []
    if len(sources_to_create) > 300:
        return jsonify({'status': 'Error', 'message': 'Too many sources to upload. The limit is 300.'})
    else:
        audit = []
        if len(sources_to_create) > 0:
            audit_results, successful, errors = _create_or_update_sources(sources_to_create, True)
            all_results += successful
            audit += audit_results
            all_errors += errors
        if len(sources_to_update) > 0:
            audit_results, successful, errors = _create_or_update_sources(sources_to_update, False)
            all_results += successful
            audit += audit_results
            all_errors += errors
        try:
            mail_enabled = config.get('SMTP_ENABLED')
            if mail_enabled == u'1':
                _email_batch_source_update_results(audit)
        except ConfigException:
            logger.debug("Skipping collection file upload confirmation email")
        for media in all_results:
            if 'media_id' in media:
                media['media_id'] = int(
                    media['media_id'])  # make sure they are ints so no-dupes logic works on front end
        time_end = time.time()
        logger.debug("upload_file: {}".format(time_end - time_start))
        logger.debug("  save file: {}".format(time_file_saved - time_start))
        logger.debug("  processing: {}".format(time_end - time_file_saved))
        return jsonify({'results': all_results, 'status': "Success"})
예제 #3
0
def platform_generic_upload_csv(topics_id):
    """
    Handle an uploaded CSV file by saving it into a temp dir and returning the temp dir to the client.
    That filename will then be relayed back to the server to support preview operations.
    :param topics_id:
    :return:
    """
    if 'file' not in request.files:
        return json_error_response('No file uploaded')
    uploaded_file = request.files['file']
    if uploaded_file.filename == '':
        return json_error_response('No file found in uploads')
    if not(uploaded_file and allowed_file(uploaded_file.filename)):
        return json_error_response('Invalid file')
    filename = "{}-{}-{}".format(topics_id, dt.datetime.now().strftime("%Y%m%d%H%M%S"),
                                 secure_filename(uploaded_file.filename))
    filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename)
    # have to save b/c otherwise we can't locate the file path (security restriction)... can delete afterwards
    uploaded_file.save(filepath)
    return jsonify({'status': 'Success', 'filename': filename})