def process_request(cls, params_dict):
        metadata_file = params_dict[cls._file_param][0]
        application = params_dict[cls._app_param][0]
        json_response = {FILENAME: metadata_file.filename}
        http_status_code = 200
        file_uuid = str(uuid4())
        path = os.path.join(TMP_PATH, file_uuid)

        try:
            metadata_file.save(path)
            metadata_file.close()
            dialect = get_dialect(path)
            if dialect:
                probe_ids = cls._DB_CONNECTOR.distinct(
                    PROBE_METADATA_COLLECTION, PROBE_ID)
                ids_are_unique = cls.update_db(dialect, path, probe_ids,
                                               application)
                if not ids_are_unique:
                    http_status_code = 403
            else:
                http_status_code = 415
                json_response[ERROR] = "Invalid file format - file must " \
                    "be either tab or comma delimited."
        except IOError:
            APP_LOGGER.exception(traceback.format_exc())
            http_status_code = 415
            json_response[ERROR] = str(sys.exc_info()[1])
        except:
            APP_LOGGER.exception(traceback.format_exc())
            http_status_code = 500
            json_response[ERROR] = str(sys.exc_info()[1])
        finally:
            silently_remove_file(path)

        return make_clean_response(json_response, http_status_code)
 def process_callback(future):
     try:
         _ = future.result()
         update = { "$set": {
                              STATUS: JOB_STATUS.succeeded, # @UndefinedVariable
                              RESULT: outfile_path,
                              FINISH_DATESTAMP: datetime.today(),
                              URL: get_results_url(outfile_path),
                            }
                 }
         # If job has been deleted, then delete result and don't update DB.
         if len(db_connector.find(PA_CONVERT_IMAGES_COLLECTION, query, {})) > 0:
             db_connector.update(PA_CONVERT_IMAGES_COLLECTION, query, update)
         else:
             silently_remove_file(outfile_path)
     except:
         APP_LOGGER.exception(traceback.format_exc())
         error_msg = str(sys.exc_info()[1])
         update    = { "$set": {STATUS: JOB_STATUS.failed, # @UndefinedVariable
                                RESULT: None,
                                FINISH_DATESTAMP: datetime.today(),
                                ERROR: error_msg}}
         # If job has been deleted, then delete result and don't update DB.
         if len(db_connector.find(PA_CONVERT_IMAGES_COLLECTION, query, {})) > 0:
             db_connector.update(PA_CONVERT_IMAGES_COLLECTION, query, update)
         else:
             silently_remove_file(outfile_path)
 def process_callback(future):
     try:
         _ = future.result()
         update = { "$set": { 
                              STATUS: JOB_STATUS.succeeded, # @UndefinedVariable
                              RESULT: outfile_path,
                              CONFIG: config_path,
                              FINISH_DATESTAMP: datetime.today(),
                              URL: "http://%s/results/%s/%s" % (HOSTNAME, PORT, uuid),
                              CONFIG_URL: "http://%s/results/%s/%s.cfg" % (HOSTNAME, PORT, uuid),
                            }
                 }
         # If job has been deleted, then delete result and don't update DB.
         if len(db_connector.find(PA_PROCESS_COLLECTION, query, {})) > 0:
             db_connector.update(PA_PROCESS_COLLECTION, query, update)
         else:
             silently_remove_file(outfile_path)
             silently_remove_file(config_path)
     except:
         error_msg = str(sys.exc_info()[1])
         update    = { "$set": {STATUS: JOB_STATUS.failed, # @UndefinedVariable
                                RESULT: None, 
                                FINISH_DATESTAMP: datetime.today(),
                                ERROR: error_msg}}
         # If job has been deleted, then delete result and don't update DB.
         if len(db_connector.find(PA_PROCESS_COLLECTION, query, {})) > 0:
             db_connector.update(PA_PROCESS_COLLECTION, query, update)
         else:
             silently_remove_file(outfile_path)
             silently_remove_file(config_path)
    def process_request(cls, params_dict):
        exp_file = params_dict[cls._file_param][0]
        sample_id = params_dict[cls._sid_param][0]
        run_id = params_dict[cls._rid_param][0]
        run_date = params_dict[cls._date_param][0]

        json_response = {
            FILENAME: exp_file.filename,
            RUN_ID: run_id,
            DATE: run_date,
            SAMPLE_ID: sample_id,
        }
        http_status_code = 200
        file_uuid = str(uuid4())

        path = os.path.join(TMP_PATH, file_uuid)
        run_ids = cls._DB_CONNECTOR.distinct(PROBE_EXPERIMENTS_COLLECTION,
                                             RUN_ID)
        if run_id in run_ids:
            http_status_code = 403
        else:
            try:
                exp_file.save(path)
                exp_file.close()
                dialect = get_dialect(path)
                if dialect:
                    cls.update_db(dialect, path, sample_id, run_id, run_date)
                else:
                    http_status_code = 415
                    json_response[ERROR] = "Invalid file format - file must " \
                        "be either tab or comma delimited."
            except IOError:
                http_status_code = 415
                json_response[ERROR] = str(sys.exc_info()[1])
            except:
                http_status_code = 500
                json_response[ERROR] = str(sys.exc_info()[1])
            finally:
                silently_remove_file(path)

        return make_clean_response(json_response, http_status_code)
示例#5
0
    def process_callback(future):
        try:
            _ = future.result()

            update = { "$set": {
                                 STATUS: JOB_STATUS.succeeded, # @UndefinedVariable
                                 RESULT: outfile_path,
                                 URL: get_results_url(os.path.join(dirname, uuid)),
                                 PNG: os.path.join(dirname, scatter_ind_pdf_fn),
                                 PNG_URL: get_results_url(os.path.join(dirname, scatter_ind_pdf_fn)),
                                 PNG_SUM: os.path.join(dirname, scatter_png_fn),
                                 PNG_SUM_URL: get_results_url(os.path.join(dirname, scatter_png_fn)),
                                 KDE_PNG: os.path.join(dirname, kde_ind_pdf_fn),
                                 KDE_PNG_URL: get_results_url(os.path.join(dirname, kde_ind_pdf_fn)),
                                 KDE_PNG_SUM: os.path.join(dirname, kde_png_fn),
                                 KDE_PNG_SUM_URL: get_results_url(os.path.join(dirname, kde_png_fn)),
                                 FINISH_DATESTAMP: datetime.today(),
                               }
                    }
        except:
            APP_LOGGER.exception("Error in Exploratory post request process callback.")
            error_msg = str(sys.exc_info()[1])
            update    = { "$set": {STATUS: JOB_STATUS.failed, # @UndefinedVariable
                                   RESULT: None,
                                   PDF: None,
                                   PNG: None,
                                   PNG_SUM: None,
                                   FINISH_DATESTAMP: datetime.today(),
                                   ERROR: error_msg}}
        finally:
            # If job has been deleted, then delete result and don't update DB.
            if len(db_connector.find(SA_EXPLORATORY_COLLECTION, query, {})) > 0:
                db_connector.update(SA_EXPLORATORY_COLLECTION, query, update)
            else:
                silently_remove_file(outfile_path)
                silently_remove_file(os.path.join(dirname, scatter_png_fn))
                silently_remove_file(os.path.join(dirname, scatter_ind_pdf_fn))
                silently_remove_file(os.path.join(dirname, kde_png_fn))
                silently_remove_file(os.path.join(dirname, kde_ind_pdf_fn))
示例#6
0
 def process_callback(future):
     try:
         _ = future.result()
         update = {
             '$set': {
                 STATUS: JOB_STATUS.succeeded,  # @UndefinedVariable
                 RESULT: outfile_path,
                 URL: get_results_url(outfile_path),
                 SCATTER_PLOT: scatter_plot_path,
                 SCATTER_PLOT_URL: get_results_url(scatter_plot_path),
                 DYES_SCATTER_PLOT: dyes_scatter_plot_path,
                 DYES_SCATTER_PLOT_URL:
                 get_results_url(dyes_scatter_plot_path),
                 FINISH_DATESTAMP: datetime.today(),
             }
         }
         # If job has been deleted, then delete result and don't update DB.
         if len(db_connector.find(SA_ASSAY_CALLER_COLLECTION, query,
                                  {})) > 0:
             db_connector.update(SA_ASSAY_CALLER_COLLECTION, query, update)
         else:
             silently_remove_file(outfile_path)
             silently_remove_file(scatter_plot_path)
             silently_remove_file(dyes_scatter_plot_path)
     except:
         APP_LOGGER.exception(traceback.format_exc())
         error_msg = str(sys.exc_info()[1])
         update = {
             '$set': {
                 STATUS: JOB_STATUS.failed,  # @UndefinedVariable
                 RESULT: None,
                 FINISH_DATESTAMP: datetime.today(),
                 ERROR: error_msg
             }
         }
         # If job has been deleted, then delete result and don't update DB.
         if len(db_connector.find(SA_ASSAY_CALLER_COLLECTION, query,
                                  {})) > 0:
             db_connector.update(SA_ASSAY_CALLER_COLLECTION, query, update)
         else:
             silently_remove_file(outfile_path)
             silently_remove_file(scatter_plot_path)
             silently_remove_file(dyes_scatter_plot_path)
示例#7
0
    def process_callback(future):
        try:
            _ = future.result()

            dirname = os.path.dirname(outfile_path)
            vcf_fn = os.path.basename(outfile_path)
            basename = os.path.splitext(vcf_fn)[0]
            pdf_fn = '%s.%s' % (basename, PDF)
            scatter_png_fn = '%s_scatter.%s' % (basename, PNG)
            scatter_ind_pdf_fn = '%s_scatter_ind.%s' % (basename, PDF)
            kde_png_fn = '%s_kde.%s' % (basename, PNG)
            kde_ind_pdf_fn = '%s_kde_ind.%s' % (basename, PDF)

            generate_plots(exp_def_name,
                           ac_result_path,
                           os.path.splitext(outfile_path)[0],
                           ignored_dyes=ignored_dyes,
                           data_set_name=cur_job_name)

            update = {
                "$set": {
                    STATUS:
                    JOB_STATUS.succeeded,  # @UndefinedVariable
                    RESULT:
                    outfile_path,
                    URL:
                    get_results_url(os.path.join(dirname, vcf_fn)),
                    PDF:
                    os.path.join(dirname, pdf_fn),
                    PDF_URL:
                    get_results_url(os.path.join(dirname, pdf_fn)),
                    PNG:
                    os.path.join(dirname, scatter_ind_pdf_fn),
                    PNG_URL:
                    get_results_url(os.path.join(dirname, scatter_ind_pdf_fn)),
                    PNG_SUM:
                    os.path.join(dirname, scatter_png_fn),
                    PNG_SUM_URL:
                    get_results_url(os.path.join(dirname, scatter_png_fn)),
                    KDE_PNG:
                    os.path.join(dirname, kde_ind_pdf_fn),
                    KDE_PNG_URL:
                    get_results_url(os.path.join(dirname, kde_ind_pdf_fn)),
                    KDE_PNG_SUM:
                    os.path.join(dirname, kde_png_fn),
                    KDE_PNG_SUM_URL:
                    get_results_url(os.path.join(dirname, kde_png_fn)),
                    FINISH_DATESTAMP:
                    datetime.today(),
                }
            }
            # If job has been deleted, then delete result and don't update DB.
            if len(db_connector.find(SA_GENOTYPER_COLLECTION, query, {})) > 0:
                db_connector.update(SA_GENOTYPER_COLLECTION, query, update)
            else:
                silently_remove_file(outfile_path)
                silently_remove_file(os.path.join(dirname, pdf_fn))
                silently_remove_file(os.path.join(dirname, scatter_png_fn))
                silently_remove_file(os.path.join(dirname, scatter_ind_pdf_fn))
                silently_remove_file(os.path.join(dirname, kde_png_fn))
                silently_remove_file(os.path.join(dirname, kde_ind_pdf_fn))
        except:
            APP_LOGGER.exception(
                "Error in Genotyper post request process callback.")
            error_msg = str(sys.exc_info()[1])
            update = {
                "$set": {
                    STATUS: JOB_STATUS.failed,  # @UndefinedVariable
                    RESULT: None,
                    PDF: None,
                    PNG: None,
                    PNG_SUM: None,
                    FINISH_DATESTAMP: datetime.today(),
                    ERROR: error_msg
                }
            }
            # If job has been deleted, then delete result and don't update DB.
            if len(db_connector.find(SA_GENOTYPER_COLLECTION, query, {})) > 0:
                db_connector.update(SA_GENOTYPER_COLLECTION, query, update)
            else:
                silently_remove_file(outfile_path)
                silently_remove_file(os.path.join(dirname, pdf_fn))
                silently_remove_file(os.path.join(dirname, scatter_png_fn))
                silently_remove_file(os.path.join(dirname, scatter_ind_pdf_fn))
                silently_remove_file(os.path.join(dirname, kde_png_fn))
                silently_remove_file(os.path.join(dirname, kde_ind_pdf_fn))
示例#8
0
    def process_callback(future):
        try:
            _ = future.result()
            report_errors = check_report_for_errors(report_path)
            update_data = { STATUS: JOB_STATUS.succeeded,
                            RESULT: outfile_path,
                            URL: get_results_url(outfile_path),
                            PLOT: plot_path,
                            REPORT: report_path,
                            PLOT_URL: get_results_url(plot_path),
                            REPORT_URL: get_results_url(report_path),
                            PLATE_PLOT_URL: get_results_url(plate_plot_path),
                            TEMPORAL_PLOT_URL: get_results_url(temporal_plot_path),
                            DROP_COUNT_PLOT_URL: get_results_url(drop_count_plot_path),
                            FINISH_DATESTAMP: datetime.today()}
            if report_errors:
                update_data[ERROR] = ' '.join(report_errors)

            update = {"$set": update_data}
            # If job has been deleted, then delete result and don't update DB.
            if len(db_connector.find(SA_IDENTITY_COLLECTION, query, {})) > 0:
                db_connector.update(SA_IDENTITY_COLLECTION, query, update)
            else:
                silently_remove_file(report_path)
                silently_remove_file(outfile_path)
                silently_remove_file(plot_path)
        except:
            APP_LOGGER.exception(traceback.format_exc())
            error_msg = str(sys.exc_info()[1])

            update    = { "$set": {STATUS: JOB_STATUS.failed, # @UndefinedVariable
                                   RESULT: None,
                                   FINISH_DATESTAMP: datetime.today(),
                                   ERROR: error_msg}}
            if os.path.isfile(report_path):
                update['$set'][REPORT_URL]
            # If job has been deleted, then delete result and don't update DB.
            if len(db_connector.find(SA_IDENTITY_COLLECTION, query, {})) > 0:
                db_connector.update(SA_IDENTITY_COLLECTION, query, update)
            else:
                silently_remove_file(report_path)
                silently_remove_file(outfile_path)
                silently_remove_file(plot_path)
示例#9
0
    def process_request(cls, params_dict):
        image_stack_tgz = params_dict[cls._file_param][0]
        stack_type = params_dict[cls._stack_type_param][0]
        img_stack_name = params_dict[cls._name_param][0]
        short_desc = params_dict[cls._short_desc_param][0]
        http_status_code = 200
        uuid = str(uuid4())
        tmp_archive_path = os.path.join(TMP_PATH, uuid + '.tar.gz')
        archive_path = os.path.join(RESULTS_PATH, uuid + '.tar.gz')
        json_response = {
            FILENAME: image_stack_tgz.filename,
            UUID: uuid,
            DATESTAMP: datetime.today(),
        }

        try:
            # check tar file
            image_stack_tgz.save(tmp_archive_path)
            image_stack_tgz.close()

            tar_error, nimgs = check_mon_tar_structure(tmp_archive_path,
                                                       stack_type)

            # check for existing image stacks
            existing_stacks = cls._DB_CONNECTOR.find(IMAGES_COLLECTION, {
                NAME: img_stack_name,
                STACK_TYPE: stack_type
            }, [NAME])
            if existing_stacks:
                http_status_code = 403
                json_response[ERROR] = 'Image stack with given name already ' \
                            'exists.'
            elif tar_error:
                APP_LOGGER.error(tar_error)
                http_status_code = 415
                json_response[ERROR] = tar_error
            else:
                url = 'http://%s/results/%s/%s' % (
                    HOSTNAME, PORT, os.path.basename(archive_path))
                shutil.copy(tmp_archive_path, archive_path)
                json_response[RESULT] = archive_path
                json_response[URL] = url
                json_response[NAME] = img_stack_name
                json_response[DESCRIPTION] = short_desc
                json_response[NUM_IMAGES] = nimgs
                json_response[STACK_TYPE] = stack_type
                cls._DB_CONNECTOR.insert(IMAGES_COLLECTION, [json_response])
        except IOError:
            APP_LOGGER.exception(traceback.format_exc())
            http_status_code = 415
            json_response[ERROR] = str(sys.exc_info()[1])
        except:
            APP_LOGGER.exception(traceback.format_exc())
            http_status_code = 500
            json_response[ERROR] = str(sys.exc_info()[1])
        finally:
            if ID in json_response:
                del json_response[ID]
            silently_remove_file(tmp_archive_path)

        return make_clean_response(json_response, http_status_code)
    def process_request(cls, params_dict):
        users            = params_dict[cls._users_param]
        date             = params_dict[cls._date_param][0]
        archive_name     = params_dict[cls._archive_param][0]
        beta             = params_dict[cls._beta_param][0]
        device           = params_dict[cls._device_param][0]
        dye_prof_metrics = params_dict[cls._dye_profile_metrics_param]
        surfactant       = params_dict[cls._surfactant_param][0]
        
        json_response = {}
        
        # Ensure archive directory is valid
        try:
            archives = get_archive_dirs(archive_name)
        except:
            APP_LOGGER.exception(traceback.format_exc())
            json_response[ERROR] = str(sys.exc_info()[1])
            return make_clean_response(json_response, 500)
        
        # Ensure only one valid archive is found
        if len(archives) != 1:
            APP_LOGGER.warning("Expected 1 archive, found %d" % len(archives))
            return make_clean_response(json_response, 404)

        response = {
                    USERS: users,
                    DATE: date,
                    ARCHIVE: archives[0],
                    BETA: beta,
                    DEVICE: device,
                    DYE_PROFILE_METRICS: dye_prof_metrics,
                    SURFACTANT: surfactant,
                    STATUS: JOB_STATUS.submitted,                # @UndefinedVariable
                    JOB_TYPE_NAME: JOB_TYPE.dye_profile_images,  # @UndefinedVariable
                    SUBMIT_DATESTAMP: datetime.today(),
                   }
        status_code = 200
        
        
        
        
        try:
            
#             # Create helper functions
#             callable = PaProcessCallable(archive, dyes, device,
#                                              major, minor,
#                                              offset, use_iid, 
#                                              outfile_path, 
#                                              config_path,
#                                              response[UUID], 
#                                              cls._DB_CONNECTOR)
#             callback = make_process_callback(response[UUID], 
#                                              outfile_path, 
#                                              config_path,
#                                              cls._DB_CONNECTOR)
# 
#             # Add to queue and update DB
#             cls._DB_CONNECTOR.insert(PA_PROCESS_COLLECTION, [response])
#             cls._EXECUTION_MANAGER.add_job(response[UUID], 
#                                            abs_callable, callback)
        except:
            APP_LOGGER.exception(traceback.format_exc())
            response[ERROR]  = str(sys.exc_info()[1])
            status_code = 500
        finally:
            if ID in response:
                del response[ID]
        
        
        
        
        
        
        
        http_status_code = 200
        uuid             = str(uuid4())
        tmp_archive_path = os.path.join(TMP_PATH, uuid + ".tar.gz")
        archive_path     = os.path.join(RESULTS_PATH, uuid + ".tar.gz")
        json_response    = { 
                            FILENAME: image_stack_tgz.filename,
                            UUID: uuid,
                            DATESTAMP: datetime.today(),
                           }

        try:
            # check tar file
            image_stack_tgz.save(tmp_archive_path)
            image_stack_tgz.close()
            tar_error, nimgs = check_ham_tar_structure(tmp_archive_path, HAM)

            # check for existing image stacks
            existing_stacks = cls._DB_CONNECTOR.find(IMAGES_COLLECTION,
                                                     {NAME: img_stack_name, STACK_TYPE: HAM},
                                                     [NAME])

            # check for exp def
            exp_defs     = ExperimentDefinitions()
            exp_def_uuid = exp_defs.get_experiment_uuid(exp_def_name)

            if existing_stacks:
                http_status_code = 403
                json_response[ERROR] = "Image stack with given name already " \
                            "exists."
            elif not exp_def_uuid:
                http_status_code = 404
                json_response[ERROR] = "Couldn't locate UUID for " \
                    "experiment definition."
            elif tar_error:
                APP_LOGGER.error(tar_error)
                http_status_code = 415
                json_response[ERROR] = tar_error
            else:
                url = "http://%s/results/%s/%s" % (HOSTNAME, PORT,
                                                   os.path.basename(archive_path))
                shutil.copy(tmp_archive_path, archive_path)
                json_response[RESULT]       = archive_path
                json_response[URL]          = url
                json_response[NAME]         = img_stack_name
                json_response[DESCRIPTION]  = short_desc
                json_response[EXP_DEF_NAME] = exp_def_name
                json_response[EXP_DEF_UUID] = exp_def_uuid
                json_response[NUM_IMAGES]   = nimgs
                json_response[STACK_TYPE]  = HAM
                cls._DB_CONNECTOR.insert(IMAGES_COLLECTION,
                                         [json_response])
        except IOError:
            APP_LOGGER.exception(traceback.format_exc())
            http_status_code     = 415
            json_response[ERROR] = str(sys.exc_info()[1])
        except:
            APP_LOGGER.exception(traceback.format_exc())
            http_status_code     = 500
            json_response[ERROR] = str(sys.exc_info()[1])
        finally:
            if ID in json_response:
                del json_response[ID]
            silently_remove_file(tmp_archive_path)
        
        return make_clean_response(json_response, http_status_code)
示例#11
0
    def process_request(cls, params_dict):
        image_stack_tgz = params_dict[cls._file_param][0]
        exp_def_name = params_dict[cls._exp_defs_param][0]
        img_stack_name = params_dict[cls._name_param][0]
        short_desc = params_dict[cls._short_desc_param][0]
        http_status_code = 200
        uuid = str(uuid4())
        tmp_archive_path = os.path.join(TMP_PATH, uuid + ".tar.gz")
        archive_path = os.path.join(RESULTS_PATH, uuid + ".tar.gz")
        json_response = {
            FILENAME: image_stack_tgz.filename,
            UUID: uuid,
            DATESTAMP: datetime.today(),
        }

        try:
            # check tar file
            image_stack_tgz.save(tmp_archive_path)
            image_stack_tgz.close()
            tar_error, nimgs = check_ham_tar_structure(tmp_archive_path, HAM)

            # check for existing image stacks
            existing_stacks = cls._DB_CONNECTOR.find(IMAGES_COLLECTION, {
                NAME: img_stack_name,
                STACK_TYPE: HAM
            }, [NAME])

            # check for exp def
            exp_def_fetcher = ExpDefHandler()
            exp_def_uuid = exp_def_fetcher.get_experiment_uuid(exp_def_name)

            if existing_stacks:
                http_status_code = 403
                json_response[ERROR] = "Image stack with given name already " \
                            "exists."
            elif not exp_def_uuid:
                http_status_code = 404
                json_response[ERROR] = "Couldn't locate UUID for " \
                    "experiment definition."
            elif tar_error:
                APP_LOGGER.error(tar_error)
                http_status_code = 415
                json_response[ERROR] = tar_error
            else:
                url = "http://%s/results/%s/%s" % (
                    HOSTNAME, PORT, os.path.basename(archive_path))
                shutil.copy(tmp_archive_path, archive_path)
                json_response[RESULT] = archive_path
                json_response[URL] = url
                json_response[NAME] = img_stack_name
                json_response[DESCRIPTION] = short_desc
                json_response[EXP_DEF_NAME] = exp_def_name
                json_response[EXP_DEF_UUID] = exp_def_uuid
                json_response[NUM_IMAGES] = nimgs
                json_response[STACK_TYPE] = HAM
                cls._DB_CONNECTOR.insert(IMAGES_COLLECTION, [json_response])
        except IOError:
            APP_LOGGER.exception(traceback.format_exc())
            http_status_code = 415
            json_response[ERROR] = str(sys.exc_info()[1])
        except:
            APP_LOGGER.exception(traceback.format_exc())
            http_status_code = 500
            json_response[ERROR] = str(sys.exc_info()[1])
        finally:
            if ID in json_response:
                del json_response[ID]
            silently_remove_file(tmp_archive_path)

        return make_clean_response(json_response, http_status_code)
示例#12
0
    def process_request(cls, params_dict):
        plate_file = params_dict[ParameterFactory.file(
            "Raw plate reader data file.")][0]
        probe_sequence = params_dict[ParameterFactory.cs_string(
            "probe_sequence", "Probe sequence.")][0]
        probe_tm = params_dict[ParameterFactory.float(
            "probe_tm", "Probe melting temperature (degrees celsius).")][0]
        probe_length = params_dict[ParameterFactory.integer(
            "probe_length", "Probe length.")][0]
        target_sequence = params_dict[ParameterFactory.cs_string(
            "target_sequence", "Target sequence.")][0]
        variant_location = params_dict[ParameterFactory.cs_string(
            "variant_location", "Variant location.")][0]
        variant_allele = params_dict[ParameterFactory.cs_string(
            "variant_allele", "Variant allele.")][0]
        reference_allele = params_dict[ParameterFactory.cs_string(
            "reference_allele", "Reference allele.")][0]
        incubation_temp = params_dict[ParameterFactory.float(
            "incubation_temp", "Incubation temperature (degrees celsius).")][0]
        incubation_time = params_dict[ParameterFactory.float(
            "incubation_time", "Incubation time (minutes).")][0]

        json_response = {
            FILENAME: plate_file.filename,
            "probe_sequence": probe_sequence,
            "probe_tm": probe_tm,
            "probe_length": probe_length,
            "target_sequence": target_sequence,
            "variant_location": variant_location,
            "variant_allele": variant_allele,
            "reference_allele": reference_allele,
            "incubation_temp": incubation_temp,
            "incubation_time": incubation_time,
        }
        http_status_code = 200
        file_uuid = str(uuid4())

        path = os.path.join(PLATES_UPLOAD_PATH, file_uuid)
        existing_filenames = cls._DB_CONNECTOR.distinct(
            PLATES_COLLECTION, FILENAME)
        if os.path.exists(path) or plate_file.filename in existing_filenames:
            http_status_code = 403
        else:
            try:
                plate_file.save(path)
                plate_file.close()
                json_response[URL] = "http://%s/uploads/%s/plates/%s" % (
                    HOSTNAME, PORT, file_uuid)
                json_response[FILEPATH] = path
                json_response[UUID] = file_uuid
                json_response[DATESTAMP] = datetime.today()
                json_response[TYPE] = "plate"

                cls._DB_CONNECTOR.insert(PLATES_COLLECTION, [json_response])
            except:
                APP_LOGGER.exception(traceback.format_exc())
                json_response[ERROR] = str(sys.exc_info()[1])
                http_status_code = 500
            finally:
                if ID in json_response:
                    del json_response[ID]
                silently_remove_file(path)

        return make_clean_response(json_response, http_status_code)