Ejemplo n.º 1
0
def test_run_component_new_then_delete(tmp_path: P):
    """
    Create a new component, then immediately delete it.
    """
    targetyml = setup_directory(tmp_path)

    component_name = "test-component"
    exit_status = call(
        f"commodore -vvv component new {component_name} --lib --pp", shell=True
    )
    assert exit_status == 0

    exit_status = call(
        f"commodore -vvv component delete --force {component_name}", shell=True
    )
    assert exit_status == 0

    # Ensure the dependencies folder is gone.
    assert not P("dependencies", component_name).exists()

    # Links in the inventory should be gone too.
    for f in [
        P("inventory", "classes", "components", f"{component_name}.yml"),
        P("inventory", "classes", "defaults", f"{component_name}.yml"),
        P("dependencies", "lib", f"{component_name}.libsonnet"),
        P("vendor", component_name),
    ]:
        assert not f.exists()

    with open(targetyml) as file:
        target = yaml.safe_load(file)
        classes = target["classes"]
        assert f"defaults.{component_name}" not in classes
        assert f"components.{component_name}" not in classes
Ejemplo n.º 2
0
def fit(filename, fitax, difax):
    folder = P(filename).parent
    _, data = read(filename)
    field = str(P(filename).stem).split("data_")[-1]
    if TESTING:
        popt, pcov = cf(double_debye,
                        data[:, 0],
                        data[:, 1],
                        method='trf',
                        verbose=0)
        print(field)
    else:
        popt, pcov = cf(double_debye,
                        data[:, 0],
                        data[:, 1],
                        method='trf',
                        verbose=2,
                        p0=[50, 10, 200, 10])
    leg_popt = [f"{ii:.1f}" for ii in popt]
    fitax.plot(data[:, 0],
               double_debye(data[:, 0], *popt),
               label=f'fit: {field}')
    fitax.plot(data[:, 0], data[:, 1], label=f'raw: {field}')
    fitax.legend()

    difax.plot(data[:, 0],
               data[:, 1] - double_debye(data[:, 0], *popt),
               label=f'{field}')
    difax.legend()

    return popt
Ejemplo n.º 3
0
 def test_thirdparty_package_lists_cs(self):
     r = PackageResolver(P('r')/'g1')
     p = r.resolve('g1+p5', {})
     assert('g1+p5'                          == p.name)
     assert([]                               == p.dependencies())
     assert([str(P('r')/'g1'/'g1+p5')]       == list(p.includes()))
     assert([str(P('r')/'g1'/'g1+p5'/'a.c')] == list(p.sources()))
Ejemplo n.º 4
0
def create_component(config, name, lib, pp):
    component_dir = P('dependencies', name)
    if component_dir.exists():
        raise click.ClickException(
            f"Unable to add component {name}: {component_dir} already exists.")
    click.secho(f"Adding component {name}...", bold=True)
    cookiecutter_args = {
        'component': name,
        'add_lib': 'y' if lib else 'n',
        'add_pp': 'y' if pp else 'n',
    }
    cookiecutter('component-template', no_input=True,
                 output_dir='dependencies',
                 extra_context=cookiecutter_args)

    repo = git.create_repository(component_dir)
    git.add_remote(repo, 'origin', f"{config.global_git_base}/commodore-components/{name}.git")
    index = repo.index
    index.add('*')
    git.commit(repo, 'Initial commit')

    click.echo(' > Installing component')
    create_component_symlinks(name)

    targetfile = P('inventory', 'targets', 'cluster.yml')
    target = yaml_load(targetfile)
    target['classes'].append(f"components.{name}")
    target['classes'].insert(0, f"defaults.{name}")
    yaml_dump(target, targetfile)

    click.secho(f"Component {name} successfully added 🎉", bold=True)
Ejemplo n.º 5
0
def process(targ):
    if P(targ).is_file():
        targ = P(targ).parent
    regex = re.compile(r"_Scan(\d+).dat")
    experiments = list(set([P("_".join(str(ii).split("_")[:-1]))
                            for ii in P(targ).iterdir() if regex.search(ii.name)]))

    for ind, exp in enumerate(experiments):
        files = [ii for ii in P(targ).iterdir() if ii.name.startswith(
            exp.name) and regex.search(ii.name)]

        for idx, f in enumerate(files):
            header, data = read(f, flipX=False)

            if idx == 0:
                outdata = np.zeros_like(data)
                for row in header.split("\n"):
                    if 'Wavelength:' in row:
                        wv = int(
                            float(("".join([ii for ii in row if ii.isdigit() or ii == "."]))))
            outdata += data
        outdata /= len(files)
        try:
            os.mkdir(str(exp.parent) + '/processed_data/')
        except FileExistsError:
            pass
        fileout = exp.parent.joinpath(
            "processed_data/" + exp.name + f"_{wv}.csv")
        outstr = ""

        for row in outdata:
            if row[0] > 0:
                outstr += f"{row[0]}, {row[1]}\n"
        fileout.write_text(outstr)
        statusBar((ind + 1) / len(experiments) * 100)
Ejemplo n.º 6
0
 def test_thirdparty_package_ignores_non_c_non_cpp(self):
     r = PackageResolver(P('r')/'g1')
     p = r.resolve('g1+p6', {})
     assert('g1+p6'                    == p.name)
     assert([]                         == p.dependencies())
     assert([str(P('r')/'g1'/'g1+p6')] == list(p.includes()))
     assert([]                         == list(p.sources()))
Ejemplo n.º 7
0
 def test_thirdparty_package_with_header(self):
     r = PackageResolver(P('r')/'g1')
     p = r.resolve('g1+p7', {})
     assert('g1+p7'                          == p.name)
     assert([]                               == p.dependencies())
     assert([str(P('r')/'g1'/'g1+p7')]       == list(p.includes()))
     assert([str(P('r')/'g1'/'g1+p7'/'a.h')] == list(p.headers()))
Ejemplo n.º 8
0
    def test_input_files_created(self, tmpdir):

        run_dir = P(tmpdir) / "run"
        out_dir = P(tmpdir) / "out"

        fn_raw = PATH / "fake" / "fake.raw"
        fn_mqp = PATH / "maxquant" / "tmt11" / "mqpar" / "mqpar.xml"
        fn_faa = PATH / "fasta" / "minimal.faa"

        mq = MaxquantRunner(
            fasta_file=fn_faa,
            mqpar_file=fn_mqp,
            run_dir=run_dir,
            out_dir=out_dir,
            add_uuid_to_rundir=False,
            add_raw_name_to_outdir=False,
            maxquantcmd="lrg_fake_maxquant.sh",
        )

        mq.run(fn_raw, run=False)

        print(glob(str(run_dir / "*")))

        files_generated = [
            (run_dir / "run.sbatch").is_file(),
            (run_dir / "fake.raw").is_file(),
            (run_dir / "mqpar.xml").is_file(),
        ]

        assert all(files_generated), files_generated
Ejemplo n.º 9
0
    def test_log_files_created(self, tmpdir):

        run_dir = P(tmpdir) / "run"
        out_dir = P(tmpdir) / "out"

        fn_raw = PATH / "fake" / "fake.raw"
        fn_mqp = PATH / "maxquant" / "tmt11" / "mqpar" / "mqpar.xml"
        fn_faa = PATH / "fasta" / "minimal.faa"

        mq = MaxquantRunner(
            fasta_file=fn_faa,
            mqpar_file=fn_mqp,
            run_dir=run_dir,
            out_dir=out_dir,
            add_uuid_to_rundir=False,
            add_raw_name_to_outdir=False,
            cleanup=False,
            verbose=True,
            maxquantcmd="lrg_fake_maxquant.sh",
        )

        mq.run(fn_raw, run=True)

        files_generated = [
            (out_dir / "maxquant.err").is_file(),
            (out_dir / "maxquant.out").is_file(),
            (out_dir / "time.txt").is_file(),
            (run_dir / "combined").is_dir(),
        ]

        assert all(files_generated), files_generated
Ejemplo n.º 10
0
def test__missing_faa_raises_exception(tmpdir):

    fn_raw = PATH / "fake" / "fake.raw"
    fn_mqp = PATH / "maxquant" / "tmt11" / "mqpar" / "mqpar.xml"
    fn_faa = PATH / "fasta" / "minimal.faa"
    run_dir = P(tmpdir) / "run"
    out_dir = P(tmpdir) / "out"
    cmd = (
        f"lrg_run_maxquant.py --fasta {fn_faa} --raw {fn_raw} --mqpar {fn_mqp}"
        f" --run-dir {run_dir} --out-dir {out_dir}"
        " --maxquantcmd lrg_fake_maxquant.sh --verbose --add-raw-name-to-outdir --add-uuid-to-rundir"
    )

    print(cmd)

    return_value = os.system(cmd)

    assert return_value == 0, f"Could not run: {cmd}"

    files_generated = [
        (out_dir / "maxquant.err").is_file(),
        (out_dir / "maxquant.out").is_file(),
        (out_dir / "time.txt").is_file(),
        (run_dir / "combined").is_dir(),
    ]

    assert all(files_generated), files_generated
Ejemplo n.º 11
0
    def delete(self):
        component = Component(
            name=self.slug,
            repo=None,
            repo_url="",
        )

        if component.target_directory.exists():

            if not self.config.force:
                click.confirm(
                    "Are you sure you want to delete component "
                    f"{self.slug}? This action cannot be undone",
                    abort=True,
                )
            delete_component_symlinks(self.config, component)
            rmtree(component.target_directory)

            targetfile = P("inventory", "targets", "cluster.yml")
            remove_from_inventory_targets_cluster(targetfile, self.slug)
            remove_from_jsonnetfile(P("jsonnetfile.json"),
                                    component.target_directory)
            # Fetch jsonnet libs after removing component from jsonnetfile to
            # remove symlink to removed component in vendor/
            fetch_jsonnet_libraries()

            click.secho(f"Component {self.slug} successfully deleted 🎉",
                        bold=True)
        else:
            raise click.BadParameter("Cannot find component with slug "
                                     f"'{self.slug}'.")
Ejemplo n.º 12
0
def test_run_component_new_command(tmp_path: P):
    """
    Run the component new command
    """

    targetyml = setup_directory(tmp_path)

    component_name = 'test-component'
    exit_status = os.system(
        f"commodore -vvv component new {component_name} --lib --pp")
    assert exit_status == 0
    for file in [
            P('README.md'),
            P('class', f"{component_name}.yml"),
            P('component', 'main.jsonnet'),
            P('component', 'app.jsonnet'),
            P('lib', f"{component_name}.libsonnet"),
            P('postprocess', 'filters.yml'),
    ]:
        assert os.path.exists(P('dependencies', component_name, file))
    for file in [
            P('inventory', 'classes', 'components', f"{component_name}.yml"),
            P('inventory', 'classes', 'defaults', f"{component_name}.yml"),
            P('dependencies', 'lib', f"{component_name}.libsonnet")
    ]:
        assert file.is_symlink()
    with open(targetyml) as file:
        target = yaml.safe_load(file)
        assert target['classes'][0] == f"defaults.{component_name}"
        assert target['classes'][-1] == f"components.{component_name}"
Ejemplo n.º 13
0
def maxquant_qc(txt_path):
    '''
    Runs all MaxQuant quality control functions 
    and returns a concatenated pandas.Series() 
    object including meta data.
    Args:
        txt_path: path with MaxQuant txt output.
    '''
    txt_path = P(abspath(txt_path))
    meta_json = txt_path / P('meta.json')
    assert isdir(txt_path), f'Path does not exists: {txt_path}'
    dfs = []
    if isfile(meta_json):
        meta = pd.read_json(meta_json, typ='series')
        dfs.append(meta)
    try:
        for df in [
                maxquant_qc_summary(txt_path),
                maxquant_qc_protein_groups(txt_path),
                maxquant_qc_peptides(txt_path),
                maxquant_qc_msmScans(txt_path),
                maxquant_qc_evidence(txt_path)
        ]:
            dfs.append(df)
    except:
        pass
    if len(dfs) == 0: return None
    df = pd.concat(dfs, sort=False).to_frame().T
    df['RUNDIR'] = str(txt_path)
    return df
Ejemplo n.º 14
0
 def test_empty_package(self):
     r = PackageResolver(P('r')/'g1')
     p = r.resolve('g1p1', {})
     assert('g1p1'                    == p.name)
     assert([]                        == p.dependencies())
     assert([str(P('r')/'g1'/'g1p1')] == list(p.includes()))
     assert([]                        == list(p.sources()))
Ejemplo n.º 15
0
def _import_cb(basedir, rel):
    # Add current working dir to search path for Jsonnet import callback
    search_path = [
        P(".").resolve(),
        __install_dir__.resolve(),
        P("./dependencies").resolve(),
    ]
    return _import_callback_with_searchpath(search_path, basedir, rel)
Ejemplo n.º 16
0
def construct_test_files(base_dir, contents, suffix=None):
    P(base_dir).mkdir(parents=True, exist_ok=True)
    suffix = '' if suffix is None else '.%s' % suffix
    if not isinstance(contents, Iterable):
        contents = [contents]
    for content in contents:
        with open(P(base_dir) / (content + suffix), 'w') as f:
            f.write(content)
Ejemplo n.º 17
0
 def get_a_list(fn, wdir):
     if fn is None:
         raise PreventUpdate
     ms_dir = T.get_ms_dirname(wdir)
     fn_new = P(ms_dir) / P(fn).name
     shutil.move(fn, fn_new)
     logging.info(f'Move {fn} to {fn_new}')
     return dbc.Alert('Upload finished', color='success')
Ejemplo n.º 18
0
 def test_one_non_driver_component(self):
     r = PackageResolver(P('r')/'g1')
     p = r.resolve('g1p2', {})
     assert('g1p2'                                  == p.name)
     assert([]                                      == p.dependencies())
     assert([str(P('r')/'g1'/'g1p2')]               == list(p.includes()))
     assert([str(P('r')/'g1'/'g1p2'/'g1p2_c1.cpp')] == list(p.sources()))
     assert([]                                      == list(p.drivers()))
Ejemplo n.º 19
0
def make_dirs():
    tmpdir = tempfile.gettempdir()
    tmpdir = os.path.join(tmpdir, 'MINT')
    tmpdir = os.getenv('MINT_DATA_DIR', default=tmpdir)
    cachedir = os.path.join(tmpdir, '.cache')
    os.makedirs(tmpdir, exist_ok=True)
    os.makedirs(cachedir, exist_ok=True)
    print('MAKEDIRS:', tmpdir, cachedir)
    return P(tmpdir), P(cachedir)
Ejemplo n.º 20
0
    def test_level_one_standalone_resolution_components(self):
        r = TargetResolver(self.config)

        p1 = r.resolve('p1', {})
        assert('p1' == p1.name)

        c1 = P('r')/'adapters'/'p1'/'p1c1.cpp'
        c2 = P('r')/'adapters'/'p1'/'p1c2.cpp'
        assert([str(c1), str(c2)] == list(sorted(p1.sources())))
Ejemplo n.º 21
0
 def _copy_missing_language_files(self):
     for fn in [
             "schema-v1.0.json", "schema-v1.1.json",
             "schema-v1.2.0-dev1.json"
     ]:
         src_file = P(default_config_data_dir, fn)
         dst_file = P(self.cfg_path, fn)
         if not dst_file.exists():
             shutil.copy(src_file, dst_file)
Ejemplo n.º 22
0
def load_davis_test_data():

    meanval = (104.00699, 116.66877, 122.67892)
    path_db_root = P(db_root_dir)
    path_sequences = path_db_root / 'ImageSets' / '480p'
    file_extension = '.txt'
    seq_name = None  #'blackswan'
    mode = 'test'
    mode_fname_mapping = {
        'train': 'train',
        'test': 'val',
    }
    if mode in mode_fname_mapping:  # if mode is 'train' or 'test', and doesn't named any sequence, then, fname = train or val
        if seq_name is None:
            fname = mode_fname_mapping[mode]
        else:
            fname = 'trainval'

    else:
        raise Exception(
            'Mode {} does not exist. Must be one of [\'train\', \'val\', \'test\']'
        )

    sequences_file = path_sequences / (fname + file_extension)
    with open(str(sequences_file)) as f:
        sequences = f.readlines()
        sequences = [s.split() for s in sequences]
        img_list, labels = zip(*sequences)
        path_db_root.joinpath(*img_list[0].split('/'))
        tmp_list = [i.split('/') for i in img_list]

        seq_list = [i[-2] for i in tmp_list]
        fname_list = [i[-1].split('.')[0] for i in tmp_list]
        img_list = [
            str(path_db_root.joinpath(*i.split('/'))) for i in img_list
        ]
        labels = [str(P(*l.split('/'))) for l in labels]

    if seq_name is not None:
        tmp = [(s, f, i, l)
               for s, f, i, l in zip(seq_list, fname_list, img_list, labels)
               if s == seq_name]
        tmp = [(s, f, i, l if index == 0 else None)
               for index, (s, f, i, l) in enumerate(tmp)]
        seq_list, fname_list, img_list, labels = list(zip(*tmp))
        if mode == 'train':
            seq_list = [seq_list[0]]
            fname_list = [fname_list[0]]
            img_list = [img_list[0]]
            labels = [labels[0]]

    print('seq_list:', seq_list)
    print('fname:', fname)
    print('img_list:', img_list)
    print('labels:', labels)
    assert (len(labels) == len(img_list))
Ejemplo n.º 23
0
 def test_one_driver_component(self):
     r = PackageResolver(P('r') / 'g1')
     p = r.resolve('g1p3', {})
     assert ('g1p3' == p)
     assert ([] == p.dependencies())
     assert ([P('r') / 'g1' / 'g1p3'] == list(p.includes()))
     assert ([P('r') / 'g1' / 'g1p3' / 'g1p3_c1.h'] == list(p.headers()))
     assert ([P('r') / 'g1' / 'g1p3' / 'g1p3_c1.cpp'] == list(p.sources()))
     assert ([P('r') / 'g1' / 'g1p3' / 'g1p3_c1.t.cpp'
              ] == list(p.drivers()))
Ejemplo n.º 24
0
    def __init__(self, config_path):
        self.package_path = P(__file__).parent
        self.data_path = self.package_path / "data"
        self.config_path = P(config_path)

        self.ensure_config_path()
        self.ensure_config_file()

        self.defaults = toml.load(str(self.data_path / CONF_NAME))
        self.user_rc = toml.load(str(self.config_path / CONF_NAME))
        self.rc = A(dict_merge(self.defaults, self.user_rc))
Ejemplo n.º 25
0
def mzml2mzmlb(fn, fn_out=None, out_parent=None):
    if out_parent is None:
        out_parent = P(fn).parent
    if fn_out is None:
        fn_out = out_parent / P(fn).with_suffix(".mzMLb").name
    if fn_out.is_file():
        logging.warning(f"File exists {fn_out}")
    else:
        os.makedirs(out_parent, exist_ok=True)
        logging.info(f"{fn} --> {fn_out}")
        MzMLToMzMLb(fn, fn_out).write()
Ejemplo n.º 26
0
    def test_thirdparty_package_lists_cpps(self):
        r = PackageResolver(P('r')/'g1')
        p = r.resolve('g1+p4', {})

        assert('g1+p4'                    == p.name)
        assert([]                         == p.dependencies())
        assert([str(P('r')/'g1'/'g1+p4')] == list(p.includes()))

        assert(2                                == len(list(p.sources())))
        assert(str(P('r')/'g1'/'g1+p4'/'a.cpp') in list(p.sources()))
        assert(str(P('r')/'g1'/'g1+p4'/'b.cpp') in list(p.sources()))
Ejemplo n.º 27
0
def relsymlink(srcdir, srcname, destdir, destname=None):
    if destname is None:
        destname = srcname
    # pathlib's relative_to() isn't suitable for this use case, since it only
    # works for dropping a path's prefix according to the documentation. See
    # https://docs.python.org/3/library/pathlib.html#pathlib.PurePath.relative_to
    link_src = os.path.relpath(P(srcdir) / srcname, start=destdir)
    link_dst = P(destdir) / destname
    if link_dst.exists():
        os.remove(link_dst)
    os.symlink(link_src, link_dst)
Ejemplo n.º 28
0
def maybe_update_workpace_scheme(wdir):
    old_pkl_fn = P(wdir) / 'peaklist' / 'peaklist.csv'
    new_pkl_fn = P(get_targets_fn(wdir))
    new_path = new_pkl_fn.parent
    old_path = old_pkl_fn.parent
    if old_pkl_fn.is_file():
        logging.info(
            f'Moving targets file to new default location ({new_pkl_fn}).')
        if not new_path.is_dir():
            os.makedirs(new_path)
        os.rename(old_pkl_fn, new_pkl_fn)
        shutil.rmtree(old_path)
Ejemplo n.º 29
0
 def _read_protein_groups(self):
     data = []
     for path, rawfile in tqdm(self._df_paths[["Path", "RawFile"]].values):
         fn = P(path) / "proteinGroups.txt"
         if not fn.is_file():
             logging.warning(f"FileNotFound: {fn}")
             continue
         df = self._reader.read(P(path) / "proteinGroups.txt")
         df["RawFile"] = rawfile
         data.append(df)
     self.df_protein_groups = pd.concat(data).set_index(
         "RawFile").reset_index()
Ejemplo n.º 30
0
def main(targ="./", makeAbs=True):
    targ = '/Users/Brad/Library/Containers/com.eltima.cloudmounter.mas/Data/.CMVolumes/Brad Price/Research/Data/2021/06/20/sample490/M09_sample490_usweep_LightOff_rephased.dat'
    if P(targ).is_file():
        targ = str(P(targ).parent)
    makeAbs = True

    if makeAbs:
        make(targ=targ,
             keyw='Light',
             file_suffix='rephased.dat',
             numerical_keyw=False,
             field=0)
    compare(targ=targ)