Ejemplo n.º 1
0
def update_db(project):
	log = task.logger

	projects_out_port = task.ports("projects_out")

	project_id = project["id"]
	log.info("--- [{0}] --------------------------------------------".format(project_id))

	oclust = project["oncodriveclust"]
	del project["oncodriveclust"]

	if not os.path.exists(oclust["results"]):
		log.warn("No results have been found. Skipping it.")
		return

	log.info("Updating the project database ...")

	projdb = ProjectDb(project["db"])

	exc_path = os.path.join(project["temp_path"], "oncodriveclust-excluded-cause.tsv")

	log.info("  Excluded gene causes ...")
	log.debug("    > {0}".format(exc_path))

	count = 0
	with tsv.open(exc_path, "r") as exf:
		for gene, cause in tsv.lines(exf, (str, str), header=True):
			projdb.update_gene(Gene(id=gene, clust_exc_cause=cause))
			count += 1

	log.debug("    {0} genes excluded".format(count))

	log.info("  OncodriveCLUST results ...")

	with tsv.open(oclust["results"], "r") as f:
		types = (str, str, float, float, float)
		columns = ("GENE", "CLUST_COORDS", "ZSCORE", "PVALUE", "QVALUE")
		for gene, coords, zscore, pvalue, qvalue in tsv.lines(f, types, columns=columns, header=True, null_value="NA"):
			projdb.update_gene(Gene(id=gene, clust_coords=coords, clust_zscore=zscore, clust_pvalue=pvalue,
									clust_qvalue=qvalue, clust_exc_cause=ProjectDb.NO_GENE_EXC))

	projdb.commit()

	projdb.close()

	projects_out_port.send(project)
Ejemplo n.º 2
0
def projects(project):
	log = task.logger

	config = GlobalConfig(task.conf)

	projects_out_port = task.ports("projects_out")

	log.info("--- [{0}] --------------------------------------------".format(project["id"]))

	projdb = ProjectDb(project["db"])

	total_samples = projdb.get_total_affected_samples()

	if total_samples == 0:
		log.warn("There are no samples, recurrences cannot be calculated.")
		projdb.close()
		return

	log.info("Calculating project recurrences for variant genes ...")

	projdb.compute_affected_genes_recurrences(total_samples)

	if not config.variants_only:

		log.info("Calculating project recurrences for genes ...")

		projdb.compute_gene_recurrences(total_samples)

		log.info("Calculating project recurrences for pathways ...")

		projdb.compute_pathway_recurrences(total_samples)

	projdb.commit()
	projdb.close()

	projects_out_port.send(project)
Ejemplo n.º 3
0
def scan_files(project):
	log = task.logger
	conf = task.conf

	config = GlobalConfig(conf)
	paths = PathsConfig(config)

	projects_port, liftover_projects_port = task.ports("projects_out", "liftover_projects")

	project_id = project["id"]
	temp_path = project["temp_path"]
	project_path = project["path"]
	projdb_path = project["db"]
	assembly = project["assembly"]

	log.info("--- [{0}] --------------------------------------------".format(project_id))

	if assembly == "hg18":
		out_port = liftover_projects_port
	elif assembly == "hg19":
		out_port = projects_port
	else:
		raise Exception("Unexpected assembly: {0}".format(assembly))

	#if os.path.exists(projdb_path):
	#	log.warn("Variations database already created, skipping this step.")
	#	out_port.send(project)
	#	return

	if os.path.exists(projdb_path):
		os.remove(projdb_path)

	log.info("Creating variants database ...")

	projdb_tmp_path = make_temp_file(task, suffix=".db")

	log.debug(projdb_tmp_path)

	projdb = ProjectDb(projdb_tmp_path).create()

	data_path = config.data_path

	log.info("Loading genes ...")

	projdb.load_genes(paths.data_ensembl_genes_path())

	log.info("Loading pathways ...")

	projdb.load_pathways(
		paths.data_kegg_def_path(),
		paths.data_kegg_ensg_map_path())

	log.info("Parsing variants ...")

	for obj_name in project["storage_objects"]:
		log.info("Downloading {} ...".format(obj_name))
		dst_path = os.path.join(project_path, "sources", os.path.basename(obj_name))
		dst_dirname = os.path.dirname(dst_path)
		if not os.path.exists(dst_dirname):
			os.makedirs(dst_dirname)
		# TODO: do not copy the source file (do not specify dst_path)
		task.storage.get_object(obj_name).get_data(dst_path)

		for container_name, path, name, ext, f in archived_files(dst_path):
			fname = os.path.join(path, name + ext)
			if container_name is not None:
				source_name = "{0}:{1}".format(os.path.basename(container_name), fname)
			else:
				source_name = name + ext

			log.info("=> {0} ...".format(source_name))

			sample_id = os.path.basename(name)

			if ext.lower() in _SUPPORTED_EXTENSIONS:
				parser_type = ext[1:]
			else:
				parser_type = "tab"

			parser = create_variants_parser(parser_type, f, source_name, sample_id)

			source_id = projdb.add_source(source_name)

			var_ids = set()
			for var in parser:
				for line_num, text in parser.read_lines():
					projdb.add_source_line(source_id, line_num, text)

				var_id = projdb.add_variant(var, source_id=source_id, line_num=parser.get_line_num())
				var_ids.add(var_id)

			for line_num, text in parser.read_lines():
				projdb.add_source_line(source_id, line_num, text)

			num_variants = len(var_ids)
			log.info("   {0} variants".format(num_variants))

			if num_variants == 0:
				raise Exception("No variants found in source '{}'. "
								"Please check the documentation for the expected input for '{}' format.".format(
								source_name, parser.name))

	projdb.commit()
	projdb.close()

	log.info("Copying variants database ...")

	log.debug("{0} -> {1}".format(projdb_tmp_path, projdb_path))

	shutil.copy(projdb_tmp_path, projdb_path)

	remove_temp(task, projdb_tmp_path)

	out_port.send(project)
Ejemplo n.º 4
0
def end():
	log = task.logger

	projects_out_port = task.ports("projects_out")

	log.info("Updating the projects database ...")

	for project_id, projects in task.context.items():

		log.info("[{0}]".format(project_id))

		for index, project in enumerate(projects):
			projdb = ProjectDb(project["db"])

			if index == 0:
				log.info("  Functional impact ...")

				projdb.delete_sample_gene_fimpact()

				with tsv.open(project["sample_gene_fi_data"], "r") as f:
					types = (int, str, float, float, int, float, float, int, float, float, int)
					for fields in tsv.lines(f, types, header=True, null_value="-"):
						projdb.add_sample_gene_fimpact(*fields)

			ofm = project["oncodrivefm"]
			del project["oncodrivefm"]

			exc_path = os.path.join(project["temp_path"], "oncodrivefm-excluded-cause.tsv")

			log.info("  Excluded gene causes ...")
			log.debug("    > {0}".format(exc_path))

			count = 0
			with tsv.open(exc_path, "r") as exf:
				for gene, cause in tsv.lines(exf, (str, str), header=True):
					projdb.update_gene(Gene(id=gene, fm_exc_cause=cause))
					count += 1

			log.debug("    {0} genes excluded".format(count))

			for feature, results_path in ofm:

				log.info("  {0} ...".format(feature))
				log.debug("    > {0}".format(results_path))

				if feature == "genes":
					with tsv.open(results_path, "r") as f:
						count = 0
						for gene, pvalue, qvalue in tsv.lines(f, (str, float, float), header=True):
							projdb.update_gene(Gene(id=gene, fm_pvalue=pvalue,
													fm_qvalue=qvalue, fm_exc_cause=ProjectDb.NO_GENE_EXC))
							count += 1
						log.info("    {0} genes".format(count))
				elif feature == "pathways":
					with tsv.open(results_path, "r") as f:
						count = 0
						for pathway, zscore, pvalue, qvalue in tsv.lines(f, (str, float, float, float), header=True):
							projdb.update_pathway(Pathway(id=pathway, fm_zscore=zscore,
														  fm_pvalue=pvalue, fm_qvalue=qvalue))
							count += 1
						log.info("    {0} pathways".format(count))

			projdb.commit()

			projdb.close()

		projects_out_port.send(projects[0])
Ejemplo n.º 5
0
def liftover(project):
    log = task.logger
    conf = task.conf

    config = GlobalConfig(conf)

    lifted_project_port = task.ports("lifted_projects")

    log.info("--- [{0}] --------------------------------------------".format(project["id"]))

    log.info("Preparing liftOver files ...")

    in_path = make_temp_file(task, suffix=".bed")
    in_file = open(in_path, "w")
    out_path = make_temp_file(task, suffix=".bed")
    unmapped_path = os.path.join(project["temp_path"], "liftover_unmapped.bed")

    projdb = ProjectDb(project["db"])

    for var in projdb.variants(order_by="position"):
        in_file.write(tsv.line_text("chr" + var.chr, var.start, var.start + len(var.ref), var.id))

    in_file.close()

    log.info("Running liftOver ...")

    project["from_assembly"] = project["assembly"]
    project["assembly"] = "hg19"

    cmd = " ".join(
        [
            conf["liftover_bin"],
            in_path,
            os.path.join(conf["liftover_chain_path"], "hg18ToHg19.over.chain"),
            out_path,
            unmapped_path,
        ]
    )

    log.debug(cmd)

    subprocess.call(cmd, shell=True)

    log.info("Annotating unmapped variants ...")

    count = 0
    with open(unmapped_path, "r") as f:
        for line in f:
            if line.lstrip().startswith("#"):
                continue
            fields = line.rstrip().split("\t")
            var_id = int(fields[3])
            projdb.update_variant_start(var_id, start=None)
            count += 1

    log.info("  {0} unmapped variants annotated".format(count))

    log.info("Updating variants ...")

    count = 0
    with open(out_path, "r") as f:
        for line in f:
            fields = line.rstrip().split("\t")
            chr, start, end, var_id = fields
            projdb.update_variant_start(var_id, start=start)
            count += 1

    log.info("  {0} variants".format(count))

    remove_temp(task, in_path, out_path)

    projdb.commit()
    projdb.close()

    lifted_project_port.send(project)
Ejemplo n.º 6
0
def update_db(project):
    log = task.logger

    config = GlobalConfig(task.conf)

    projects_port = task.ports("projects_out")

    log.info("--- [{0}] --------------------------------------------".format(project["id"]))

    partitions = project["partitions"]

    if not os.path.exists(config.vardb_path):
        log.warn("Database for variation external references not found")
        log.debug("> {0}".format(conf["vardb_path"]))

    varxdb = VarXrefsDb(config.vardb_path)
    varxdb.open()

    projdb = ProjectDb(project["db"])

    updated_variants = set()

    plen = len(partitions)

    gene_xrefs = defaultdict(set)

    for part in partitions:
        log.info("Updating database with partition data ({0} out of {1}) ...".format(part["index"] + 1, plen))

        log.info("  VEP results ...")

        ctype = lambda v: v.split(",")

        with open(part["vep_path"], "r") as vf:
            for fields in tsv.lines(vf, (int, str, str, ctype, str, str, str, float, float), null_value="-"):
                (
                    var_id,
                    gene,
                    transcript,
                    consequences,
                    protein_pos,
                    aa_change,
                    protein,
                    sift_score,
                    pph2_score,
                ) = fields

                var = projdb.get_variant(var_id)

                xrefs = varxdb.get_xrefs(var.chr, var.start, var.ref, var.alt, var.strand)

                if xrefs is not None:
                    xrefs = ["{0}:{1}".format(source, xref) for source, xref in xrefs]
                    gene_xrefs[gene].update(xrefs)

                    if len(xrefs) == 0:
                        xrefs = None

                projdb.update_variant(Variant(id=var_id, xrefs=xrefs))

                projdb.add_consequence(
                    Consequence(
                        var=Variant(id=var_id),
                        transcript=transcript,
                        gene=gene,
                        ctypes=consequences,
                        protein_pos=protein_pos,
                        aa_change=aa_change,
                        protein=protein,
                    )
                )

        log.info("  Transcript functional impacts ...")

        with open(part["tfi_path"], "r") as f:
            types = (int, str, str, int, float, float, int, float, float, int, float, float, int)
            columns = [0, 1, 3, 6, 7, 8, 9, 11, 12, 13, 15, 16, 17]
            for fields in tsv.lines(f, types, columns=columns, null_value="-"):
                (
                    var_id,
                    transcript,
                    uniprot,
                    impact,
                    sift_score,
                    sift_tfic,
                    sift_class,
                    pph2_score,
                    pph2_tfic,
                    pph2_class,
                    ma_score,
                    ma_tfic,
                    ma_class,
                ) = fields
                print fields

                projdb.update_consequence(
                    Consequence(
                        var=Variant(id=var_id),
                        transcript=transcript,
                        uniprot=uniprot,
                        sift_score=sift_score,
                        sift_tfic=sift_tfic,
                        sift_tfic_class=sift_class,
                        pph2_score=pph2_score,
                        pph2_tfic=pph2_tfic,
                        pph2_tfic_class=pph2_class,
                        ma_score=ma_score,
                        ma_tfic=ma_tfic,
                        ma_tfic_class=ma_class,
                        impact=impact,
                    )
                )

    log.info("Updating variant-gene functional impacts ...")

    with open(project["gfi_path"], "r") as f:
        types = (int, str, float, int, str)
        for var_id, gene, impact, coding_region, prot_changes in tsv.lines(f, types, null_value="-"):
            projdb.add_affected_gene(
                AffectedGene(
                    var=Variant(id=var_id),
                    gene_id=gene,
                    impact=impact,
                    coding_region=coding_region,
                    prot_changes=prot_changes,
                )
            )

    log.info("Updating database with gene external variant references ...")

    for gene, xrefs in gene_xrefs.items():
        projdb.update_gene(Gene(id=gene, xrefs=xrefs))

    projdb.commit()
    projdb.close()

    varxdb.close()

    del project["partitions"]

    projects_port.send(project)