示例#1
0
def handle_multiqc_data(user, *args, **kwargs):
    data = request.get_json().get('data')
    success, msg = handle_report_data(user, data)
    response = jsonify({'success': success, 'message': msg})
    if not success:
        response.status_code = 400
    return response
示例#2
0
def upload_reports_job():
    with scheduler.app.app_context():
        queued_uploads = db.session.query(Upload).filter(Upload.status == "NOT TREATED").all()
        for row in queued_uploads:
            print "dealing with {0}".format(row.upload_id)
            row.status = "IN TREATMENT"
            db.session.add(row)
            db.session.commit()
            print "updated status"
            user = db.session.query(User).filter(User.user_id == row.user_id).one()
            with open(row.path, 'r') as fh:
            	data = json.load(fh)
            print "loaded data"
            try:
                ret = handle_report_data(user, data)
            except Exception as e:
                ret=(False, str(e))
            print "handled"
            if ret[0]:
                row.status = "TREATED"
                row.message = "The document has been uploaded successfully"
            else:
                row.status = "FAILED"
                row.message = "The document has not been uploaded : {0}".format(ret[1])
            row.modified_at = datetime.datetime.utcnow()
            db.session.add(row)
            db.session.commit()
示例#3
0
def upload_reports_job():
    with scheduler.app.app_context():
        queued_uploads = (db.session.query(Upload).filter(
            Upload.status == "NOT TREATED").all())
        for row in queued_uploads:
            user = db.session.query(User).filter(
                User.user_id == row.user_id).one()
            current_app.logger.info(
                "Beginning process of upload #{} from {}".format(
                    row.upload_id, user.email))
            row.status = "IN TREATMENT"
            db.session.add(row)
            db.session.commit()
            # Check if we have a gzipped file
            gzipped = False
            with open(row.path, "rb") as fh:
                # Check if we have a gzipped file
                file_start = fh.read(3)
                if file_start == b"\x1f\x8b\x08":
                    gzipped = True
            try:
                if gzipped:
                    with io.BufferedReader(gzip.open(row.path, "rb")) as fh:
                        raw_data = fh.read().decode("utf-8")
                else:
                    with io.open(row.path, "rb") as fh:
                        raw_data = fh.read().decode("utf-8")
                data = json.loads(raw_data)
                # Now save the parsed JSON data to the database
                ret = handle_report_data(user, data)
            except Exception:
                ret = (
                    False,
                    "<pre><code>{}</code></pre>".format(
                        traceback.format_exc()),
                )
                current_app.logger.error(
                    "Error processing upload {}: {}".format(
                        row.upload_id, traceback.format_exc()))
            if ret[0]:
                row.status = "TREATED"
                row.message = "The document has been uploaded successfully"
                os.remove(row.path)
            else:
                if ret[1] == "Report already processed":
                    current_app.logger.info(
                        "Upload {} already being processed by another worker, skipping"
                        .format(row.upload_id))
                    continue
                row.status = "FAILED"
                row.message = "The document has not been uploaded : {0}".format(
                    ret[1])
            row.modified_at = datetime.datetime.utcnow()
            current_app.logger.info(
                "Finished processing upload #{} to state {}".format(
                    row.upload_id, row.status))
            db.session.add(row)
            db.session.commit()
示例#4
0
def upload_reports_job():
    with scheduler.app.app_context():
        queued_uploads = db.session.query(Upload).filter(
            Upload.status == "NOT TREATED").all()
        for row in queued_uploads:
            row.status = "IN TREATMENT"
            db.session.add(row)
            db.session.commit()
            user = db.session.query(User).filter(
                User.user_id == row.user_id).one()
            # Check if we have a gzipped file
            gzipped = False
            with open(row.path, 'rb') as fh:
                # Check if we have a gzipped file
                file_start = fh.read(3)
                if file_start == b'\x1f\x8b\x08':
                    gzipped = True
            try:
                if gzipped:
                    with io.BufferedReader(gzip.open(row.path, 'rb')) as fh:
                        raw_data = fh.read().decode('utf-8')
                else:
                    with io.open(row.path, 'rb') as fh:
                        raw_data = fh.read().decode('utf-8')
                data = json.loads(raw_data)
                # Now save the parsed JSON data to the database
                ret = handle_report_data(user, data)
            except Exception:
                ret = (False, '<pre><code>{}</code></pre>'.format(
                    traceback.format_exc()))
            if ret[0]:
                row.status = "TREATED"
                row.message = "The document has been uploaded successfully"
                os.remove(row.path)
            else:
                row.status = "FAILED"
                row.message = "The document has not been uploaded : {0}".format(
                    ret[1])
            row.modified_at = datetime.datetime.utcnow()
            db.session.add(row)
            db.session.commit()