Ejemplo n.º 1
0
def validate_bubbleplot_input(_id, feature_table_artifact_path, taxonomy_artifact_path, metadata_path=None, fill_variable=None):
	"""
	Do prechecks as to decrease the chance of job failing.

	Input:
		- feature_table_artifact_path: feature table in QIIME2 artifact format (either path or FileStorage object)
		- taxonomy_artifact_path: taxonomy in QIIME2 artifact format (either path or FileStorage object)
	"""
	# Save uploaded file in the docker container
	feature_table_uploaded_path = save_uploaded_file(_id, feature_table_artifact_path)
	taxonomy_uploaded_path = save_uploaded_file(_id, taxonomy_artifact_path)
	if(metadata_path is not None):
		metadata_uploaded_path = save_uploaded_file(_id, metadata_path)
	else:
		metadata_uploaded_path = None

	def validate_artifact(feature_table_uploaded_path, taxonomy_uploaded_path):
		# Check Artifact type
		try:
			check_artifact_type(feature_table_uploaded_path, "feature_table")
			check_artifact_type(taxonomy_uploaded_path, "taxonomy")

		except AXIOME3PipelineError as err:
			message = str(err)

			return 400, message

		return 200, "Imported data good!"

	def validate_metadata(metadata_uploaded_path, fill_variable):
		# Load metadata via QIIME2 metadata API
		# It will verify metadata vadlity as well
		try:
			metadata_df = load_metadata(metadata_uploaded_path)
		except MetadataFileError as err:
			message = str(err)

			return 400, message

		# Check user-specified columns actually exist in the metadata file
		if(fill_variable is not None):
			try:
				check_column_exists(metadata_df, fill_variable)

			except AXIOME3PipelineError as err:
				message = str(err)

				return 400, message

		return 200, "Ok"

	responseIfError(validate_artifact, feature_table_uploaded_path=feature_table_uploaded_path, taxonomy_uploaded_path=taxonomy_uploaded_path)
	if(metadata_uploaded_path is not None):
		responseIfError(validate_metadata, metadata_uploaded_path=metadata_uploaded_path, fill_variable=fill_variable)

	return feature_table_uploaded_path, taxonomy_uploaded_path, metadata_uploaded_path
Ejemplo n.º 2
0
def validate_triplot_input(_id, feature_table_artifact_path, taxonomy_artifact_path, metadata_path, environmental_metadata_path, fill_variable):
	# Save uploaded file in the docker container
	feature_table_uploaded_path = save_uploaded_file(_id, feature_table_artifact_path)
	taxonomy_uploaded_path = save_uploaded_file(_id, taxonomy_artifact_path)
	metadata_uploaded_path = save_uploaded_file(_id, metadata_path)
	environmental_metadata_uploaded_path = save_uploaded_file(_id, environmental_metadata_path)

	def validate_metadata(metadata_path, environmental_metadata_path):
		# Load metadata via QIIME2 metadata API
		# It will verify metadata vadlity as well
		try:
			metadata_df = load_metadata(metadata_path)
		except MetadataFileError as err:
			message = str(err)

			return 400, message

		try:
			environmental_metadata_df = load_metadata(environmental_metadata_path)
		except MetadataFileError as err:
			message = str(err)

			return 400, message

		# Check user-specified columns actually exist in the metadata file
		try:
			check_column_exists(metadata_df, fill_variable)
		
		except AXIOME3PipelineError as err:
			message = str(err)
		
			return 400, message
		
		return 200, "Ok"

	def validate_artifact(feature_table_uploaded_path, taxonomy_uploaded_path):
		# Check Artifact type
		try:
			check_artifact_type(feature_table_uploaded_path, "feature_table")
			check_artifact_type(taxonomy_uploaded_path, "taxonomy")

		except AXIOME3PipelineError as err:
			message = str(err)

			return 400, message

		return 200, "Imported data good!"

	responseIfError(validate_metadata, metadata_path=metadata_uploaded_path, environmental_metadata_path=environmental_metadata_uploaded_path)
	responseIfError(validate_artifact, feature_table_uploaded_path=feature_table_uploaded_path, taxonomy_uploaded_path=taxonomy_uploaded_path)

	return feature_table_uploaded_path, taxonomy_uploaded_path, metadata_uploaded_path, environmental_metadata_uploaded_path
Ejemplo n.º 3
0
def analysis_precheck(_id, feature_table, rep_seqs, taxonomy, metadata):
    """
	Do prechecks as to decrease the chance of job failing.

	Input:
		- feature_table: QIIME2 artifact of type FeatureTable[Frequency]
		- rep_seqs: QIIME2 artifact of type FeatureData[Sequence]
	"""
    feature_table_path = save_uploaded_file(_id, feature_table)
    rep_seqs_path = save_uploaded_file(_id, rep_seqs)
    taxonomy_path = save_uploaded_file(_id, taxonomy)
    metadata_path = save_uploaded_file(_id, metadata)

    def validate_analysis_input(feature_table, rep_seqs, taxonomy):
        """
		Precheck input files prior to running denoise step

		Input:
			- feature_table: Path to QIIME2 artifact of type FeatureTable[Frequency]
			- rep_seqs: Path to QIIME2 artifact of type FeatureData[Sequence]
		"""
        # Check Artifact type
        try:
            feature_table_artifact = Artifact.load(feature_table)
            rep_seqs_artifact = Artifact.load(rep_seqs)

            if (str(feature_table_artifact.type) != "FeatureTable[Frequency]"):
                msg = "Input Feature Table is not of type 'FeatureTable[Frequency]'!"
                raise ValueError(msg)

            if (str(rep_seqs_artifact.type) != "FeatureData[Sequence]"):
                msg = "Input Representative Sequences is not of type 'FeatureData[Sequence]'!"
                raise ValueError(msg)

        except ValueError as err:
            message = str(err)

            return 400, message

        return 200, "Imported data good!"

    responseIfError(validate_analysis_input,
                    feature_table=feature_table_path,
                    rep_seqs=rep_seqs_path,
                    taxonomy=taxonomy_path)

    return feature_table_path, rep_seqs_path, taxonomy_path, metadata_path
Ejemplo n.º 4
0
def taxonomic_classification_precheck(_id, feature_table, rep_seqs, classifier=None):
	"""
	Do prechecks as to decrease the chance of job failing.

	Input:
		- feature_table: QIIME2 artifact of type FeatureTable[Frequency]
		- rep_seqs: QIIME2 artifact of type FeatureData[Sequence]
	"""
	feature_table_path = save_uploaded_file(_id, feature_table)
	rep_seqs_path = save_uploaded_file(_id, rep_seqs)
	# default classifier path
	default_classifier_path = current_app.config["DEFAULT_CLASSIFIER_PATH"]
	classifier_path = save_uploaded_file(_id, classifier) if classifier is not None else default_classifier_path

	def validate_taxonomic_classification_input(feature_table, rep_seqs):
		"""
		Precheck input files prior to running denoise step

		Input:
			- feature_table: Path to QIIME2 artifact of type FeatureTable[Frequency]
			- rep_seqs: Path to QIIME2 artifact of type FeatureData[Sequence]
		"""
		# Check Artifact type
		try:
			feature_table_artifact = Artifact.load(feature_table)
			rep_seqs_artifact = Artifact.load(rep_seqs)

			if(str(feature_table_artifact.type) != "FeatureTable[Frequency]"):
				msg = "Input Feature Table is not of type 'FeatureTable[Frequency]'!"
				raise ValueError(msg)

			if(str(rep_seqs_artifact.type) != "FeatureData[Sequence]"):
				msg = "Input Representative Sequences is not of type 'FeatureData[Sequence]'!"
				raise ValueError(msg)

		except ValueError as err:
			message = str(err)

			return 400, message

		return 200, "Imported data good!"
	
	responseIfError(validate_taxonomic_classification_input, feature_table=feature_table_path, rep_seqs=rep_seqs_path)

	return feature_table_path, rep_seqs_path, classifier_path
Ejemplo n.º 5
0
def validate_pcoa_input(_id, pcoa_artifact_path, metadata_path, target_primary, target_secondary=None):
	metadata_uploaded_path = save_uploaded_file(_id, metadata_path)
	# Save QIIME2 PCoA artiffact
	pcoa_uploaded_path = save_uploaded_file(_id, pcoa_artifact_path)

	def validate_metadata(metadata_path, target_primary, target_secondary):
		# Load metadata via QIIME2 metadata API
		# It will verify metadata vadlity as well
		try:
			metadata_df = load_metadata(metadata_path)
		except MetadataFileError as err:
			message = str(err)

			return 400, message

		# Check user-specified columns actually exist in the metadata file
		try:
			check_column_exists(metadata_df, target_primary, target_secondary)

		except AXIOME3PipelineError as err:
			message = str(err)

			return 400, message

		return 200, "Ok"

	def validate_artifact(pcoa_artifact_path):
		try:
			check_artifact_type(pcoa_artifact_path, "pcoa")
		except AXIOME3PipelineError as err:
			message = str(err)

			return 400, message

		return 200, "OK"

	responseIfError(validate_metadata, metadata_path=metadata_uploaded_path, target_primary=target_primary, target_secondary=target_secondary)
	responseIfError(validate_artifact, pcoa_artifact_path=pcoa_uploaded_path)

	return pcoa_uploaded_path, metadata_uploaded_path
Ejemplo n.º 6
0
def denoise_precheck(_id, sequence_data):
    """
	Do prechecks as to decrease the chance of job failing.

	Input:
		- sequence_data: sequence data in QIIME2 artifact format
	"""
    # Save uploaded file in the docker container
    upload_path = save_uploaded_file(_id, sequence_data)

    def validate_denoise_input(sequence_data):
        """
		Precheck input files prior to running denoise step

		Input:
			- sequence_data: sequence data in QIIME2 artifact format
		"""

        # Check Artifact type
        try:
            q2_artifact = Artifact.load(sequence_data)
            if (str(q2_artifact.type) !=
                    "SampleData[PairedEndSequencesWithQuality]"):
                msg = "Input QIIME2 Artifact is not of type 'SampleData[PairedEndSequencesWithQuality]'!"
                raise ValueError(msg)

        except ValueError as err:
            message = str(err)

            return 400, message

        return 200, "Imported data good!"

    responseIfError(validate_denoise_input, sequence_data=upload_path)

    return upload_path