def hash(self):
     return mt.sha1OfObject(
         dict(name='downsampled-recording-extractor',
              version=2,
              recording=self._recording.hash(),
              ds_factor=self._ds_factor,
              input_has_minmax=self._input_has_minmax))
def _samplehash(recording):
    from mountaintools import client as mt
    obj = {
        'channels': tuple(recording.get_channel_ids()),
        'frames': recording.get_num_frames(),
        'data': _samplehash_helper(recording)
    }
    return mt.sha1OfObject(obj)
Esempio n. 3
0
 def hash(self):
     from mountainclient import client as mt
     obj = dict(
         raw=mt.computeFileSha1(self._timeseries_path),
         geom=mt.computeFileSha1(self._geom_fname),
         params=self._dataset_params
     )
     return mt.sha1OfObject(obj)
def _samplehash(sorting):
    from mountaintools import client as mt
    obj = {
        'unit_ids': sorting.get_unit_ids(),
        'sampling_frequency': sorting.get_sampling_frequency(),
        'data': _samplehash_helper(sorting)
    }
    return mt.sha1OfObject(obj)
Esempio n. 5
0
 def hash(self):
     params = self.paramsForHash()  # pylint: disable=assignment-from-none
     if params is None:
         raise Exception('Cannot compute hash. Params for hash not implemented.')
     return mt.sha1OfObject(dict(
         name='FilterRecording',
         params=params,
         recording=self._recording.hash()
     ))
Esempio n. 6
0
def install_waveclus(repo, commit):
    spikeforest_alg_install_path = get_install_path()
    key = dict(
        alg='waveclus',
        repo=repo,
        commit=commit
    )
    source_path = spikeforest_alg_install_path + '/waveclus_' + commit
    if os.path.exists(source_path):
        # The dir hash method does not seem to be working for some reason here
        # hash0 = mt.computeDirHash(source_path)
        # if hash0 == mt.getValue(key=key):
        #     print('waveclus is already auto-installed.')
        #     return source_path

        a = mt.loadObject(path=source_path + '/spikeforest.json')
        if a:
            if mt.sha1OfObject(a) == mt.sha1OfObject(key):
                print('waveclus is already auto-installed.')
                return source_path

        print('Removing directory: {}'.format(source_path))
        shutil.rmtree(source_path)

    script = """
    #!/bin/bash
    set -e

    git clone {repo} {source_path}
    cd {source_path}
    git checkout {commit}
    """.format(repo=repo, commit=commit, source_path=source_path)
    ss = mlpr.ShellScript(script=script)
    ss.start()
    retcode = ss.wait()
    if retcode != 0:
        raise Exception('Install script returned a non-zero exit code/')

    # The dir hash method does not seem to be working for some reason here
    # hash0 = mt.computeDirHash(source_path)
    # mt.setValue(key=key, value=hash0)
    mt.saveObject(object=key, dest_path=source_path + '/spikeforest.json')

    return source_path
Esempio n. 7
0
 def hash(self):
     if not self._recording_hash:
         if hasattr(self._recording, 'hash'):
             if type(self._recording.hash) == str:
                 self._recording_hash = self._recording.hash
             else:
                 self._recording_hash = self._recording.hash()
         else:
             self._recording_hash = _samplehash(self._recording)
     return mt.sha1OfObject(
         dict(name='downsampled-recording-extractor',
              version=2,
              recording=self._recording_hash,
              ds_factor=self._ds_factor,
              input_has_minmax=self._input_has_minmax))
Esempio n. 8
0
def install_jrclust(repo, commit):
    spikeforest_alg_install_path = get_install_path()
    key = dict(alg='jrclust', repo=repo, commit=commit)
    source_path = spikeforest_alg_install_path + '/jrclust_' + commit
    if os.path.exists(source_path):
        # The dir hash method does not seem to be working for some reason here
        # hash0 = mt.computeDirHash(source_path)
        # if hash0 == mt.getValue(key=key):
        #     print('jrclust is already auto-installed.')
        #     return source_path

        a = mt.loadObject(path=source_path + '/spikeforest.json')
        if a:
            if mt.sha1OfObject(a) == mt.sha1OfObject(key):
                print('jrclust is already auto-installed.')
                return source_path

        print('Removing directory: {}'.format(source_path))
        shutil.rmtree(source_path)

    script = """
    #!/bin/bash
    set -e

    git clone {repo} {source_path}
    cd {source_path}
    git checkout {commit}
    """.format(repo=repo, commit=commit, source_path=source_path)
    ss = mlpr.ShellScript(script=script)
    ss.start()
    retcode = ss.wait()
    if retcode != 0:
        raise Exception('Install script returned a non-zero exit code/')

    compile_gpu = mlpr.ShellScript(script="""
    function compile_gpu

    try
        jrc compile
    catch
        disp('Problem running `jrc compile`');
        disp(lasterr());
        exit(-1)
    end;
    exit(0)
    """)
    compile_gpu.write(script_path=source_path + '/compile_gpu.m')

    script = """
    #!/bin/bash
    set -e

    cd {source_path}
    matlab -nodisplay -nosplash -r "compile_gpu"
    """.format(source_path=source_path)
    ss = mlpr.ShellScript(script=script)
    ss.start()
    retcode = ss.wait()
    if retcode != 0:
        raise Exception('Compute gpu script returned a non-zero exit code.')

    # The dir hash method does not seem to be working for some reason here
    # hash0 = mt.computeDirHash(source_path)
    # mt.setValue(key=key, value=hash0)
    mt.saveObject(object=key, dest_path=source_path + '/spikeforest.json')

    return source_path
def install_kilosort2(repo, commit):
    spikeforest_alg_install_path = get_install_path()
    key = dict(alg='kilosort2', repo=repo, commit=commit)
    source_path = spikeforest_alg_install_path + '/kilosort2_' + commit
    if os.path.exists(source_path):
        # The dir hash method does not seem to be working for some reason here
        # hash0 = mt.computeDirHash(source_path)
        # if hash0 == mt.getValue(key=key):
        #     print('Kilosort2 is already auto-installed.')
        #     return source_path

        a = mt.loadObject(path=source_path + '/spikeforest.json')
        if a:
            if mt.sha1OfObject(a) == mt.sha1OfObject(key):
                print('Kilosort2 is already auto-installed.')
                return source_path

        print('Removing directory: {}'.format(source_path))
        shutil.rmtree(source_path)

    script = """
    #!/bin/bash
    set -e

    git clone {repo} {source_path}
    cd {source_path}
    git checkout {commit}
    """.format(repo=repo, commit=commit, source_path=source_path)
    ss = mlpr.ShellScript(script=script)
    ss.start()
    retcode = ss.wait()
    if retcode != 0:
        raise Exception('Install script returned a non-zero exit code/')

    # make sure module unload gcc/7.4.0
    compile_gpu = mlpr.ShellScript(script="""
    function compile_gpu

    try
        [~,path_nvcc_] = system('which nvcc');
        path_nvcc_ = strrep(path_nvcc_, 'nvcc', '');
        disp(['path_nvcc_: ', path_nvcc_]);
        setenv('MW_NVCC_PATH', path_nvcc_);
        run('mexGPUall.m');
    catch
        disp('Problem running mexGPUall.');
        disp(lasterr());
        exit(-1)
    end;
    exit(0)
    """)
    compile_gpu.write(script_path=source_path + '/CUDA/compile_gpu.m')

    script = """
    #!/bin/bash
    set -e

    cd {source_path}/CUDA
    matlab -nodisplay -nosplash -r "compile_gpu"
    """.format(source_path=source_path)
    ss = mlpr.ShellScript(script=script)
    ss.start()
    retcode = ss.wait()
    if retcode != 0:
        raise Exception('Compute gpu script returned a non-zero exit code.')

    # The dir hash method does not seem to be working for some reason here
    # hash0 = mt.computeDirHash(source_path)
    # mt.setValue(key=key, value=hash0)
    mt.saveObject(object=key, dest_path=source_path + '/spikeforest.json')

    return source_path