Exemple #1
0
def load_analysis(ctx, data_directory, id, projects, reload):
    es_host = ctx.obj['host']
    es_port = ctx.obj["port"]

    assert reload or not _is_loaded(
        id, es_host, es_port
    ), f'Dashboard with ID {id} already loaded. To reload, add --reload to command'

    nonexistant_projects = [
        project for project in projects
        if not _is_project_exist(project, es_host, es_port)
    ]

    assert len(nonexistant_projects
               ) == 0, f'Projects do not exist: {nonexistant_projects} '

    if reload:
        _clean_analysis(id, host=es_host, port=es_port)

    hmmcopy_data = _get_scgenome_colossus_tantalus_data(data_directory)
    analysis_record = _get_colossus_tantalus_analysis_object(
        data_directory, dashboard_id)

    _load_analysis(id, hmmcopy_data, analysis_record, projects, data_directory,
                   es_host, es_port)
Exemple #2
0
def load_analysis_from_dirs(ctx, alignment_dir, hmmcopy_dir, annotation_dir,
                            id, projects, reload):
    #is this for msk? for isabl or tantalus, i guess it was igo aka isabl
    #andrew's function, going to keep it here for when he sends over one-off data sets
    es_host = ctx.obj['host']
    es_port = ctx.obj["port"]

    assert reload or not _is_loaded(
        id, es_host, es_port
    ), f'Dashboard with ID {id} already loaded. To reload, add --reload to command'

    nonexistant_projects = [
        project for project in projects
        if not _is_project_exist(project, es_host, es_port)
    ]

    assert len(nonexistant_projects
               ) == 0, f'Projects do not exist: {nonexistant_projects} '

    if reload:
        _clean_analysis(id, host=es_host, port=es_port)

    alhena.alhena_loader.load_analysis_from_dirs(id, projects, es_host,
                                                 es_port, alignment_dir,
                                                 hmmcopy_dir, annotation_dir)
Exemple #3
0
def load_merged_analysis_bccrc(ctx, data_directory, id, projects, reload):
    es_host = ctx.obj['host']
    es_port = ctx.obj["port"]
    # get metadata.json ,sc-test.json, id.json located in the data directory
    # check to see if those libraries exist
    # oneline new function called bccrc_verify_libraries()

    assert reload or not _is_loaded(
        id, es_host, es_port
    ), f'Dashboard with ID {id} already loaded. To reload, add --reload to command'

    nonexistant_projects = [
        project for project in projects
        if not _is_project_exist(project, es_host, es_port)
    ]

    assert len(nonexistant_projects
               ) == 0, f'Projects do not exist: {nonexistant_projects} '

    if reload:
        _clean_analysis(id, host=es_host, port=es_port)

    _download_libraries_for_merged(id, data_directory)

    _load_merged_analysis(id, projects, data_directory, es_host, es_port)
Exemple #4
0
def load_analysis_msk(ctx, id, projects, reload):

    es_host = ctx.obj['host']
    es_port = ctx.obj["port"]

    annotation_pk = _get_scgenome_isabl_annotation_pk(id)
    #hmmcopy_data = None
    hmmcopy_data = _get_scgenome_isabl_data(id)
    print(f'{id} is now {str(annotation_pk)}')

    id = str(annotation_pk)

    assert reload or not _is_loaded(
        id, es_host, es_port
    ), f'Dashboard with ID {id} already loaded. To reload, add --reload to command'

    nonexistent_projects = [
        project for project in projects
        if not _is_project_exist(project, es_host, es_port)
    ]

    assert len(nonexistent_projects
               ) == 0, f'Projects do not exist: {nonexistent_projects} '

    if reload:
        _clean_analysis(id, host=es_host, port=es_port)

    analysis_record = _get_isabl_analysis_object(id)

    _load_analysis(dashboard_id, hmmcopy_data, analysis_record, projects, host,
                   port)
Exemple #5
0
def load_analysis(ctx, data_directory, id, reload):
    es_host = ctx.obj['host']
    es_port = ctx.obj["port"]

    if reload:
        _clean_analysis(id, host=es_host, port=es_port)

    _load_analysis( id, data_directory, es_host, es_port)
Exemple #6
0
def load_analysis_shah(ctx, data_directory, id, sample_id, library_id, description, download, reload):
    es_host = ctx.obj['host']
    es_port = ctx.obj["port"]
    if download:
        data_directory = _download_analysis(id, data_directory, sample_id, library_id, description)

    if reload:
        _clean_analysis(id, host=es_host, port=es_port)

    _load_analysis( id, data_directory, es_host, es_port)
def load_analysis(ctx, data_directory, id, projects, reload):
    es_host = ctx.obj['host']
    es_port = ctx.obj["port"]

    assert reload or not _is_loaded(
        id, es_host, es_port
    ), f'Dashboard with ID {id} already loaded. To reload, add --reload to command'

    nonexistant_projects = [
        project for project in projects
        if not _is_project_exist(project, es_host, es_port)
    ]

    assert len(nonexistant_projects
               ) == 0, f'Projects do not exist: {nonexistant_projects} '

    if reload:
        _clean_analysis(id, host=es_host, port=es_port)

    _load_analysis(id, projects, data_directory, es_host, es_port)
Exemple #8
0
def load_dashboard(ctx, data_directory, id, projects, download, reload):
    es_host = ctx.obj['host']
    es_port = ctx.obj["port"]
    nonexistant_projects = [
        project for project in projects
        if not _is_project_exist(project, es_host, es_port)
    ]

    assert len(nonexistant_projects
               ) == 0, f'Projects do not exist: {nonexistant_projects} '

    download_type = "merged" if os.path.exists(
        os.path.join(data_directory, constants.MERGED_DIRECTORYNAME,
                     f"{id}.json")) else "single"

    if download:
        if download_type == "merged":
            _download_libraries_for_merged(id, data_directory)
        elif download_type == "single":
            data_directory = _download_analysis(id, data_directory)

    if reload:
        _clean_analysis(id, host=es_host, port=es_port, projects=projects)

    if _is_loaded(id, es_host, es_port):
        _add_dashboard_to_projects(id, projects, es_host, es_port)

    else:
        if download_type == "merged":
            _load_merged_analysis(id, projects, data_directory, es_host,
                                  es_port)
        elif download_type == "single":
            hmmcopy_data = _get_scgenome_colossus_tantalus_data(data_directory)
            analysis_record = __get_colossus_tantalus_analysis_object(
                directory, dashboard_id)
            _load_analysis(id, hmmcopy_data, analysis_record, projects,
                           data_directory, es_host, es_port)
def clean_analysis(ctx, dashboard_id):
    _clean_analysis(dashboard_id, host=ctx.obj['host'], port=ctx.obj['port'])