def test_eFELFeature_string_settings(): """ephys.efeatures: Testing eFELFeature string_settings""" recording_names = {'': 'square_pulse_step1.soma.v'} efeature = efeatures.eFELFeature(name='test_eFELFeature_vb_default', efel_feature_name='voltage_base', recording_names=recording_names, stim_start=700, stim_end=2700) efeature_median = efeatures.eFELFeature( name='test_eFELFeature_vb_median', efel_feature_name='voltage_base', recording_names=recording_names, stim_start=700, stim_end=2700, string_settings={ 'voltage_base_mode': "median"}) response = TimeVoltageResponse('mock_response') testdata_dir = joinp( os.path.dirname( os.path.abspath(__file__)), 'testdata') response.read_csv(joinp(testdata_dir, 'TimeVoltageResponse.csv')) responses = {'square_pulse_step1.soma.v': response, } vb_median = efeature_median.calculate_feature( responses, raise_warnings=True) vb_default = efeature.calculate_feature(responses, raise_warnings=True) assert vb_median != vb_default
def __init__(): if '__file__' in globals(): cdir = os.path.dirname(os.path.realpath(__file__)) else: cdir = os.getcwd() _dir = nt('DirObject', 'cur data static')( *[joinp(cdir, d) for d in ['', 'database/', 'static_files/']]) # pylint: disable=W0106 [os.makedirs(p) for p in _dir if not path.exists(p)] main_url = 'http://tkm.ibb.gov.tr/' # File Names for static files fl_road = ['r{0:d}.txt'.format(x) for x in range(5)] fl_other = ['d{0:02d}.txt'.format(x) for x in range(1, 10)] static_files_url = 'YHarita/res/' # Create list of URLs to use in module. _url = nt( 'UrlList', 'trafficindex trafficdata parkingdata ' 'announcements weatherdata road other')(*(tuple([ joinp(main_url, url) for url in [ 'data/IntensityMap/' + url + '.aspx' for url in [ 'TrafficIndex', 'TrafficDataNew', 'ParkingLotData', 'AnnouncementData', 'WeatherData' ] ] ]) + ([joinp(main_url, static_files_url, fn) for fn in fl_road], [joinp(main_url, static_files_url, fn) for fn in fl_other]))) return _url, _dir
def test_eFELFeature(): """ephys.efeatures: Testing eFELFeature creation""" recording_names = {'': 'square_pulse_step1.soma.v'} efeature = efeatures.eFELFeature(name='test_eFELFeature', efel_feature_name='voltage_base', recording_names=recording_names, stim_start=700, stim_end=2700, exp_mean=1, exp_std=1) response = TimeVoltageResponse('mock_response') testdata_dir = joinp(os.path.dirname(os.path.abspath(__file__)), 'testdata') response.read_csv(joinp(testdata_dir, 'TimeVoltageResponse.csv')) responses = { 'square_pulse_step1.soma.v': response, } ret = efeature.calculate_feature(responses, raise_warnings=True) nt.assert_almost_equal(ret, -72.0575843859) score = efeature.calculate_score(responses) nt.assert_almost_equal(score, 73.05758438592171) nt.eq_(efeature.name, 'test_eFELFeature') nt.ok_('voltage_base' in str(efeature))
def generate(dstdir): template = template_env.get_template("ts-definition.j2") for _, msgspec in __parsed_msgs.items(): if not isinstance(msgspec, str): base_type = msgspec.base_type pkg_name = base_type.pkg_name output_fpath = joinp(dstdir, pkg_name, msgspec.reldir, f"{msgspec.base_type.type}.ts") if not exists(dirname(output_fpath)): makedirs(dirname(output_fpath)) print(f"Generating model {base_type}") template.stream(msgspec=msgspec).dump(output_fpath) template = template_env.get_template("srv-ts-definition.j2") for full_type, srvspec in __parsed_srvs.items(): if not isinstance(srvspec, str): output_fpath = joinp(dstdir, srvspec.pkg_name, srvspec.reldir, f"{srvspec.srv_name}.ts") if not exists(dirname(output_fpath)): makedirs(dirname(output_fpath)) print(f"Generating model {full_type}") template.stream(srvspec=srvspec).dump(output_fpath)
def test_eFELFeature_max_score(): """ephys.efeatures: Testing eFELFeature max_score option""" recording_names = {'': 'square_pulse_step1.soma.v'} response = TimeVoltageResponse('mock_response') testdata_dir = joinp(os.path.dirname(os.path.abspath(__file__)), 'testdata') response.read_csv(joinp(testdata_dir, 'TimeVoltageResponse.csv')) responses = { 'square_pulse_step1.soma.v': response, } efeature_normal = efeatures.eFELFeature(name='test_eFELFeature', efel_feature_name='AP_amplitude', recording_names=recording_names, stim_start=600, stim_end=700, exp_mean=1, exp_std=1) score_normal = efeature_normal.calculate_score(responses) nt.assert_almost_equal(score_normal, 250) efeature_150 = efeatures.eFELFeature(name='test_eFELFeature', efel_feature_name='AP_amplitude', recording_names=recording_names, stim_start=600, stim_end=700, exp_mean=1, exp_std=1, max_score=150) score_150 = efeature_150.calculate_score(responses) nt.assert_almost_equal(score_150, 150)
def __init__(self, user, branch, target, cache, master_branch='master', log=None): self.user = user self.build_output = '' self.status = 'fail' self.branch = branch self.master_branch = master_branch self.build_status = 'Build started...' self.build_pdf_path = '' self.cache = cache self.master_repo_path = joinp(self.cache, 'scipy_proceedings') self.build_timestamp = time.strftime('%d/%m %H:%M') self.target_path = joinp(self.cache, f'{target!s}.pdf') self.build_path = None data_filenames = [ 'IEEEtran.cls', 'draftwatermark.sty', 'everypage.sty' ] self.data_files = [ joinp(package_path, 'data', f) for f in data_filenames ]
def test_eFELFeature_int_settings(): """ephys.efeatures: Testing eFELFeature int_settings""" recording_names = {'': 'square_pulse_step1.soma.v'} efeature = efeatures.eFELFeature(name='test_eFELFeature', efel_feature_name='Spikecount', recording_names=recording_names, stim_start=1200, stim_end=2000, exp_mean=1, exp_std=1) efeature_strict = efeatures.eFELFeature( name='test_eFELFeature_strict', efel_feature_name='Spikecount', recording_names=recording_names, stim_start=1200, stim_end=2000, exp_mean=1, exp_std=1, int_settings={'strict_stiminterval': True}) response = TimeVoltageResponse('mock_response') testdata_dir = joinp(os.path.dirname(os.path.abspath(__file__)), 'testdata') response.read_csv(joinp(testdata_dir, 'TimeVoltageResponse.csv')) responses = { 'square_pulse_step1.soma.v': response, } spikecount = efeature.calculate_feature(responses) spikecount_strict = efeature_strict.calculate_feature(responses) nt.assert_true(spikecount_strict != spikecount)
def round_trip(path: str, labels: dict = None): """ Compare the result of assembling path directly, or passing the output through the disassembler and back. Args: path (str): Relative path to assembly file. labels: (dict): Mapping from ROM addresses to label names Returns: bool: Whether the resulting binaries match """ labels = {} if labels is None else labels root, ext = os.path.splitext(os.path.basename(path)) assemble(path, BASE_ADDRESS, debug=False) build_clean = joinp('build', f'{root}.bin') with open(build_clean, 'rb') as f: dis = disasm(f, BASE_ADDRESS) asm_rt = joinp(TEST_DIR, f'{root}_rt.s') write_asm(dis, asm_rt, labels) assemble(asm_rt, BASE_ADDRESS, debug=False) build_rt = joinp('build', f'{root}_rt.bin') try: eq = compare_binaries(build_clean, build_rt) except Exception: raise else: paths = glob.glob(joinp('build', f'{root}*')) + [asm_rt] for path in paths: sremove(path) return eq
def test_eFELFeature_force_max_score(): """ephys.efeatures: Testing eFELFeature force_max_score option""" recording_names = {'': 'square_pulse_step1.soma.v'} response = TimeVoltageResponse('mock_response') testdata_dir = joinp(os.path.dirname(os.path.abspath(__file__)), 'testdata') response.read_csv(joinp(testdata_dir, 'TimeVoltageResponse.csv')) responses = { 'square_pulse_step1.soma.v': response, } efeature_normal = efeatures.eFELFeature(name='test_eFELFeature', efel_feature_name='voltage_base', recording_names=recording_names, stim_start=700, stim_end=2700, exp_mean=1, exp_std=.001) score_normal = efeature_normal.calculate_score(responses) nt.assert_true(score_normal > 250) efeature_force = efeatures.eFELFeature(name='test_eFELFeature', efel_feature_name='voltage_base', recording_names=recording_names, stim_start=700, stim_end=2700, exp_mean=1, exp_std=.001, force_max_score=True) score_force = efeature_force.calculate_score(responses) nt.assert_almost_equal(score_force, 250)
def __init__(): if '__file__' in globals(): cdir = os.path.dirname(os.path.realpath(__file__)) else: cdir = os.getcwd() _dir = nt('DirObject', 'cur data static')( *[joinp(cdir, d) for d in ['', 'database/', 'static_files/']]) # pylint: disable=W0106 [os.makedirs(p) for p in _dir if not path.exists(p)] main_url = 'http://tkm.ibb.gov.tr/' # File Names for static files fl_road = ['r{0:d}.txt'.format(x) for x in range(5)] fl_other = ['d{0:02d}.txt'.format(x) for x in range(1, 10)] static_files_url = 'YHarita/res/' # Create list of URLs to use in module. _url = nt('UrlList', 'trafficindex trafficdata parkingdata ' 'announcements weatherdata road other')( *(tuple([joinp(main_url, url) for url in [ 'data/IntensityMap/' + url + '.aspx' for url in [ 'TrafficIndex', 'TrafficDataNew', 'ParkingLotData', 'AnnouncementData', 'WeatherData']]]) + ([joinp(main_url, static_files_url, fn) for fn in fl_road], [joinp(main_url, static_files_url, fn) for fn in fl_other]))) return _url, _dir
def test_eFELFeature(): """ephys.efeatures: Testing eFELFeature creation""" recording_names = {"": "square_pulse_step1.soma.v"} efeature = efeatures.eFELFeature( name="test_eFELFeature", efel_feature_name="voltage_base", recording_names=recording_names, stim_start=700, stim_end=2700, exp_mean=1, exp_std=1, ) response = TimeVoltageResponse("mock_response") testdata_dir = joinp(os.path.dirname(os.path.abspath(__file__)), "testdata") response.read_csv(joinp(testdata_dir, "TimeVoltageResponse.csv")) responses = { "square_pulse_step1.soma.v": response, } ret = efeature.calculate_feature(responses, raise_warnings=True) nt.assert_almost_equal(ret, -72.0576124731685) score = efeature.calculate_score(responses) nt.assert_almost_equal(score, 73.05761247316) nt.eq_(efeature.name, "test_eFELFeature") nt.ok_("voltage_base" in str(efeature))
def test_eFELFeature_int_settings(): """ephys.efeatures: Testing eFELFeature int_settings""" recording_names = {"": "square_pulse_step1.soma.v"} efeature = efeatures.eFELFeature( name="test_eFELFeature", efel_feature_name="Spikecount", recording_names=recording_names, stim_start=1200, stim_end=2000, exp_mean=1, exp_std=1, ) efeature_strict = efeatures.eFELFeature( name="test_eFELFeature_strict", efel_feature_name="Spikecount", recording_names=recording_names, stim_start=1200, stim_end=2000, exp_mean=1, exp_std=1, int_settings={"strict_stiminterval": True}, ) response = TimeVoltageResponse("mock_response") testdata_dir = joinp(os.path.dirname(os.path.abspath(__file__)), "testdata") response.read_csv(joinp(testdata_dir, "TimeVoltageResponse.csv")) responses = { "square_pulse_step1.soma.v": response, } spikecount = efeature.calculate_feature(responses) spikecount_strict = efeature_strict.calculate_feature(responses) nt.assert_true(spikecount_strict != spikecount)
def test_eFELFeature_double_settings(): """ephys.efeatures: Testing eFELFeature double_settings""" recording_names = {'': 'square_pulse_step1.soma.v'} efeature = efeatures.eFELFeature(name='test_eFELFeature', efel_feature_name='voltage_base', recording_names=recording_names, stim_start=700, stim_end=2700, exp_mean=1, exp_std=1) efeature_ds = efeatures.eFELFeature( name='test_eFELFeature_other_perc', efel_feature_name='voltage_base', recording_names=recording_names, stim_start=700, stim_end=2700, exp_mean=1, exp_std=1, double_settings={ 'voltage_base_start_perc': 0.01}) response = TimeVoltageResponse('mock_response') testdata_dir = joinp(os.path.dirname(os.path.abspath(__file__)), 'testdata') response.read_csv(joinp(testdata_dir, 'TimeVoltageResponse.csv')) responses = {'square_pulse_step1.soma.v': response, } vb_other_perc = efeature_ds.calculate_feature( responses, raise_warnings=True) vb = efeature.calculate_feature(responses, raise_warnings=True) nt.assert_true(vb_other_perc != vb)
def test_eFELFeature_double_settings(): """ephys.efeatures: Testing eFELFeature double_settings""" recording_names = {'': 'square_pulse_step1.soma.v'} efeature = efeatures.eFELFeature(name='test_eFELFeature', efel_feature_name='voltage_base', recording_names=recording_names, stim_start=700, stim_end=2700, exp_mean=1, exp_std=1) efeature_ds = efeatures.eFELFeature( name='test_eFELFeature_other_perc', efel_feature_name='voltage_base', recording_names=recording_names, stim_start=700, stim_end=2700, exp_mean=1, exp_std=1, double_settings={'voltage_base_start_perc': 0.01}) response = TimeVoltageResponse('mock_response') testdata_dir = joinp(os.path.dirname(os.path.abspath(__file__)), 'testdata') response.read_csv(joinp(testdata_dir, 'TimeVoltageResponse.csv')) responses = { 'square_pulse_step1.soma.v': response, } vb_other_perc = efeature_ds.calculate_feature(responses, raise_warnings=True) vb = efeature.calculate_feature(responses, raise_warnings=True) nt.assert_true(vb_other_perc != vb)
def copy_working_file(finfo): """ Copy the initial file to the temp working dir renamed with a unique hash. Also create the other working directories. """ create_dir(finfo['workingdir']) shutil.copyfile(joinp(finfo['origdir'], finfo['origfilename']), joinp(finfo['workingdir'], finfo['tempfilehash']+'.pdf'))
def merge_tiff(finfo): tiff2pdfargs = ['tiff2pdf', '-o', joinp(finfo['workingdir'], finfo['tempfilehash'] + '.pdf.complete'), joinp(finfo['workingdir'], finfo['tempfilehash'] + '.tif')] #out = subprocess.check_output(tiff2pdfargs, shell=True) t2pproc = subprocess.Popen(tiff2pdfargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, errs = t2pproc.communicate() if out: finfo['errors'] += 'Tiff2Pdf:\n' + out + '\n'
def context_url(self, context): '''return url to navitem by context''' url = joinp(self._host, 'collab/context/%s/' % context) resp = requests.get(url, headers=self._get_headers()) resp.raise_for_status() return joinp( get_services()['collaboratory'][self.environment]['url'], '#/collab/%s/nav/%s' % (resp.json()['collab']['id'], resp.json()['id']))
def _cache_token_path(user, oauth_url): '''returns the expected token path based on user ex. /home/user/.credentials/py_oidc_user_authserv ''' url = urlparse(oauth_url) token_folder = joinp(os.path.expanduser('~'), '.credentials') token_file = joinp(token_folder, 'py_oidc_%s_%s' % (user, url.hostname)) L.debug('token_file: %s', token_file) return token_file
def _retrieve_pdf(self): """Collects pdf from temporary directory and moves it to target_path. """ output_path = joinp(self.build_path, 'output', self.paper) try: shutil.copy(joinp(output_path, 'paper.pdf'), self.target_path) except IOError: self.add_output('[X] Paper build failed.\n') self.build_status = 'Build failed, no pdf can be found' raise BuildError('retrieve_pdf')
def makedirs(self, path): '''Recursive directory creation function''' norm_path = self._norm_path(path) L.debug('makedirs %s', path) split_path = norm_path.split('/') assert(not split_path[0]) prev = joinp('/', split_path[1]) self.mkdir(prev, ignore_error=True) for p in split_path[2:]: prev = joinp(prev, p) self.mkdir(prev, ignore_error=True)
def makedirs(self, path): '''Recursive directory creation function''' norm_path = self._norm_path(path) L.debug('makedirs %s', path) split_path = norm_path.split('/') assert (not split_path[0]) prev = joinp('/', split_path[1]) self.mkdir(prev, ignore_error=True) for p in split_path[2:]: prev = joinp(prev, p) self.mkdir(prev, ignore_error=True)
def burst_pdf(finfo): gsargs = ['gswin64c', '-SDEVICE=tiffg4', '-r300x300', '-o', joinp(finfo['workingdir'], finfo['tempfilehash'] + '.tif'), joinp(finfo['workingdir'], finfo['tempfilehash'] + '.pdf')] # try: # out = subprocess.check_output(gsargs, stderr=subprocess.STDOUT, shell=True) # except subprocess.CalledProcessError as e: # pass gsproc = subprocess.Popen(gsargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, errs = gsproc.communicate() if gsproc.returncode > 0: finfo['errors'] += 'Ghostscript:\n' + errs + '\n'
def collect_from_local_fs(root_src, root_dst, upload=False): '''recursively explores a local directory and returns a generator that builds objects of type ImportInfo representing local folders or files and the desired path of the imported document service equivalent ''' if not os.path.exists(root_src): raise ValueError("Source path doesn't exist") for src, _, files in os.walk(root_src): dst = src.replace(root_src, root_dst) yield ImportInfo(src, dst, ImportInfo.FOLDER, upload=upload) for f in files: yield collect_single_file(joinp(src, f), joinp(dst, f), upload=upload)
def add_datafiles(self, data_files=None , barcode_files=None ): ''' Add datafiles and barcodes in pairs to the database. Each pair defines the samples present in the datafiles listed. If 'files' or 'barcodes' is a str, it is interpreted as a glob to the data_path / barcode_path respecively. ''' if type(data_files) is str: data_files = glob.glob(joinp(self.c.data_inpath, data_files)) if type(barcode_files) is str: barcode_files = glob.glob(joinp(self.c.barcode_inpath, barcode_files)) # Input samples-datafiles info in Database self.db.add_barcodes_datafiles(barcode_files, data_files, datafile_type='raw_mixed')
def _static_file_read(fl): fl = joinp(DIR.static, fl) data = None if path.exists(fl): with open(fl, "rb") as f: data = f.read().decode('UTF-8') return data
def __init__(self): # Detect live boot devicea if not os.path.exists(self.TGT): os.mkdir(self.TGT) for line in runcmd(['mount']).split(b'\n'): if b' /boot ' in line: self.info.boot_partition = line.split(None, 1)[0].decode() self.info.boot_device = self.info.boot_partition[:-1] break # Compute required size for root, dirs, files in os.walk('/boot'): self.info.boot_part_size += sum(getsize(joinp(root, name)) for name in files) # List all partitions & store them in self.parts global cur_part cur_part = O() out = runcmd(['blkid', '-o', 'export']) for line in out.split(b'\n'): if not line.strip(): # empty line = new device if hasattr(cur_part, 'type') and cur_part.type in ('iso9660', 'squashfs', 'udf', 'cramfs'): cur_part.ro = True self._parts[cur_part.devname] = cur_part cur_part = O() else: k, v = line.split(b'=') setattr(cur_part, k.lower().decode(), v.decode()) self.DISKLABEL = self._parts[self.info.boot_partition].label self._list_disks()
def test_decay_time_constant_after_stim2(): """basic: decay_time_constant_after_stim 2""" import efel efel.reset() import numpy stim_start = 100.0 stim_end = 1000.0 test_data_path = joinp(testdata_dir, 'basic', 'tau20.0.csv') data = numpy.loadtxt(test_data_path) time = data[:, 0] voltage = data[:, 1] trace = { 'T': time, 'V': voltage, 'stim_start': [stim_start], 'stim_end': [stim_end], 'decay_start_after_stim': [1.0], 'decay_end_after_stim': [10.0] } features = ['decay_time_constant_after_stim'] feature_values = efel.getFeatureValues([trace], features)[0] nt.assert_almost_equal( 20.0, feature_values['decay_time_constant_after_stim'][0], places=1)
def test_APlast_amp(): """basic: Test APlast_amp""" import efel efel.reset() import numpy stim_start = 500.0 stim_end = 900.0 test_data_path = joinp(testdata_dir, 'basic', 'mean_frequency_1.txt') data = numpy.loadtxt(test_data_path) time = data[:, 0] voltage = data[:, 1] trace = {} trace['T'] = time trace['V'] = voltage trace['stim_start'] = [stim_start] trace['stim_end'] = [stim_end] features = ['AP_amplitude', 'APlast_amp'] feature_values = \ efel.getFeatureValues( [trace], features) APlast_amp = feature_values[0]['APlast_amp'][0] AP_amplitude = feature_values[0]['APlast_amp'] nt.assert_equal(APlast_amp, AP_amplitude[-1])
def test_voltagebase1(): """basic: Test voltagebase 1""" import efel efel.reset() import numpy stim_start = 500.0 stim_end = 900.0 test_data_path = joinp(testdata_dir, 'basic', 'mean_frequency_1.txt') data = numpy.loadtxt(test_data_path) time = data[:, 0] voltage = data[:, 1] trace = {} trace['T'] = time trace['V'] = voltage trace['stim_start'] = [stim_start] trace['stim_end'] = [stim_end] features = ['voltage_base'] feature_values = \ efel.getFeatureValues( [trace], features) voltage_base = numpy.mean(voltage[numpy.where( (time >= 0.9 * stim_start) & (time <= stim_start))]) nt.assert_almost_equal(voltage_base, feature_values[0]['voltage_base'][0])
def on_exit(self): #Write Ner File with io.open(joinp(self.dir, self.corpus_name + ".ner"), 'w', encoding="utf-8") as f: ner_writer = csv.writer(f, delimiter='\t', lineterminator='\n', quotechar='', quoting=csv.QUOTE_NONE) for token, ner in self.unique_token.items(): ner_writer.writerow([token, ner])
def majvote(target, multiatlas=False): """Generate the commands to vote on this target image. This "function" relies on lots of stuff being in module scope, specifically: - atlases - templates - registrations_dir - fusion_dir - score_dir - options - logger - voting_cmds - resample_cmds """ target_vote_dir = mkdirp(fusion_dir, "majvote", target.stem) if options.clobber or not os.path.exists( joinp(target_vote_dir, 'labels.mnc')): if multiatlas: vote_cmd, resamples = multiatlas_vote(target_vote_dir, template_labels_dir) else: vote_cmd, resamples = mb_vote(templates, target_vote_dir, template_labels_dir) voting_cmds.append(vote_cmd) resample_cmds.extend(resamples)
def get_path_by_id(self, _id): '''returns a path on the DS from the uuid of an existing object''' attr = self.get_standard_attr_by_id(_id) if attr._parent == 'None': return '/' + attr._name else: return joinp(self.get_path_by_id(attr._parent), attr._name)
def add_item(self, parent_id, prop, _type='Item'): '''post an item to the nav tree, returns the id''' name = prop['name'] L.info('Adding %s to parent_id: %d (type: %s)', name, parent_id, _type) _type = Client.ITEM_TYPES[_type.lower()] url = joinp(self._host, 'collab/%d/nav/' % self.collab_id) if 'folder' == _type: data = { 'name': name, 'collab': self.collab_id, 'type': 'FO', 'parent': str(parent_id), 'order_index': '-1', } else: app_id = self.get_app_id(prop['app_id']) data = { 'name': name, 'collab': self.collab_id, 'type': 'IT', 'parent': str(parent_id), 'app_id': app_id, 'context': prop.get('context', self._get_uuid4()), 'order_index': prop.get('order_index', '-1'), } resp = requests.post(url, headers=self._get_headers(), data=data) if resp.status_code != 201: raise CollabException('Failed to get add_item to collab %s, "%s"' % (resp.status_code, resp.text)) return resp.json()
def setup_preprocessing(self, infiles_pattern, params=None, param_id=None): ''' Setup the preprocessing function for the workflow ''' # Get params if id given if params is None and param_id is None: raise Exception("No parameters and no parameter id to lookup.") if param_id: params = self.db.get_binary('params', 'filtering_parameterID', param_id, table='filtering_parameters') assert params, "No data returned from database for param_id: %s" % param_id self.c.filterparam_id = param_id else: # Insert parameters dictionary into filter_parameters table self.c.filterparam_id = self.db.insert_binary(params, col='params', table='filtering_parameters') # Define Preprocessing Class and set inputs self.Preprocessor = Preprocessor(self.c) self.Preprocessor.db = self.db # Pass database reference to Preprocessor self.Preprocessor.set_input_files(data_files=infiles_pattern, data_inpath=self.c.data_inpath) self.Preprocessor.filter_functions = [ self.Preprocessor.make_propN_filter(params['filtering']['propN']), self.Preprocessor.make_phred_filter(params['filtering']['phred']), self.Preprocessor.make_cutsite_filter(max_edit_dist=params['filtering']['cutsite_edit_dist']), self.Preprocessor.make_overhang_filter('TCGAGG', 'GG', params['filtering']['overhang_edit_dist']) ] # Save addition to config file path = joinp(self.c.db_path, '.' + self.c.root_name + '-config.pkl') if os.path.exists(path): os.remove(path) pkl.dump(self.c, open(path, 'w'))
def _upload_content(self, content, dst, mimetype, st_attr): '''upload the content Args: content(string or open file): follows the same conventions as HTTPConnection.request dst(string path): on the server, including the /project/folder/...folders/file mimetype(str): set the _contentType property to this mimetype st_attr(dict): standard attributes Returns: uuid of created entity ''' parent, entity = self._create_placeholder(dst, mimetype, st_attr) content_url = joinp(self.host, 'file', entity._uuid, 'content/upload') headers = copy.copy(self._get_headers()) resp = requests.post(content_url, headers=headers, data=content) if 201 != resp.status_code: raise DocException('Could not upload file (%s): %s' % (resp.status_code, resp.text)) response_obj = json.loads(resp.text) entity = self._api.deserialize(response_obj, EntityReturn.EntityReturn) #add entity to cache, since everything was succesful self._add_to_cache(parent, (entity, )) return entity._uuid
def post_prov_dm(self, prov_dm_json): '''post Prov-DM to operation REST endpoint''' url = joinp(self.host, 'operation') resp = requests.post(url, json=prov_dm_json, headers=self._get_headers()) L.debug('Provenance service POST /operation call took: %s', resp.elapsed) if resp.status_code != 200: raise ProvException('Failed to send prov_dm %s\n%s' % (resp.status_code, resp.text))
def __init__(self, oauth_url=None, oidcconfig=None): '''this is generally not used, as the staticmethod's are better: bearer_auth(oauth_url, token) implicit_auth(user=None, password=None, oauth_url=None, use_cache=True) secret_auth(oauth_url, oidcconfig=None) file_auth(yaml_path, oauth_url=None) ''' service = get_services()['oidc_service'] if oauth_url in service: self.oauth_url = service[oauth_url]['url'] else: self.oauth_url = oauth_url or service['prod']['url'] L.debug('Using url: %s', self.oauth_url) self.oidcconfig = oidcconfig or DEFAULT_OIDC_CONFIG super(BBPOIDCClient, self).__init__( client_id=self.oidcconfig['client_id'], client_secret=self.oidcconfig.get('client_secret', None), user_agent=self.oidcconfig['user_agent'], scope=self.oidcconfig['scope'], auth_uri=self.oauth_url + 'authorize', token_uri=self.oauth_url + 'token', tokeninfo_uri=self.oauth_url + 'tokeninfo', userinfo_uri=self.oauth_url + 'userinfo' ) self.redirect_uri = urljoin(self.oauth_url, 'resources/oauth_code.html') # bundle of X.509 certificates of public Certificate Authorities cacerts_path = joinp(os.path.dirname(__file__), 'cacert/cacert.pem') self.http = httplib2.Http(ca_certs=cacerts_path) self.credentials = None
def test_mean_frequency1(): """basic: Test mean_frequency 1""" import efel efel.reset() import numpy stim_start = 500.0 stim_end = 900.0 test_data_path = joinp(testdata_dir, 'basic', 'mean_frequency_1.txt') data = numpy.loadtxt(test_data_path) time = data[:, 0] voltage = data[:, 1] trace = {} trace['T'] = time trace['V'] = voltage trace['stim_start'] = [stim_start] trace['stim_end'] = [stim_end] features = ['mean_frequency'] feature_values = \ efel.getFeatureValues( [trace], features) nt.assert_almost_equal(feature_values[0]['mean_frequency'][0], 15.2858453)
def download_file_by_id(self, _id, dst_path=None): '''download a file from the server Args: id(string): the id of the file entity dst_path: the path to store the downloaded contents Returns: path to the file if dst_path was provided contents of the file as a string otherwise ''' content_url = joinp(self.host, 'file', _id, 'content/download') resp = requests.get(content_url, headers=self._get_headers()) if 200 != resp.status_code: raise DocException('Could not download file (%s): %s' % (resp.status_code, resp.text)) if dst_path: CHUNK_SIZE = 10 * 1024 with open(dst_path, 'wb') as f: for chunk in resp.iter_content(CHUNK_SIZE): f.write(chunk) return dst_path else: return resp.text
def test_getDistance1(): """basic: Test getDistance 1""" import efel efel.reset() import numpy stim_start = 500.0 stim_end = 900.0 test_data_path = joinp(testdata_dir, 'basic', 'mean_frequency_1.txt') data = numpy.loadtxt(test_data_path) time = data[:, 0] voltage = data[:, 1] trace = {} trace['T'] = time trace['V'] = voltage trace['stim_start'] = [stim_start] trace['stim_end'] = [stim_end] nt.assert_almost_equal( 3.09045815935, efel.getDistance( trace, 'AP_amplitude', 50, 10))
def test_spikecount1(): """basic: Test Spikecount 1""" import efel efel.reset() import numpy stim_start = 500.0 stim_end = 900.0 test_data_path = joinp(testdata_dir, 'basic', 'mean_frequency_1.txt') data = numpy.loadtxt(test_data_path) time = data[:, 0] voltage = data[:, 1] trace = {} trace['T'] = time trace['V'] = voltage trace['stim_start'] = [stim_start] trace['stim_end'] = [stim_end] features = ['peak_indices', 'Spikecount'] feature_values = \ efel.getFeatureValues( [trace], features) peak_indices = feature_values[0]['peak_indices'] spikecount = feature_values[0]['Spikecount'][0] nt.assert_equal(len(peak_indices), spikecount)
def _static_file_get_modified_time(f): f = joinp(DIR.static, f) if path.exists(f): return dt.fromtimestamp( path.getmtime(f) ).replace(tzinfo=tz.tzlocal()) return None
def cache(path='../cache'): cache_path = joinp(base_path, path) try: os.mkdir(cache_path) except OSError as e: pass return cache_path
def log(message): print(message) with io.open(joinp(os.path.dirname(__file__), '../flask.log'), 'a') as f: time_of_message = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()) cf = inspect.currentframe().f_back where = '{}:{}'.format(cf.f_code.co_filename, cf.f_lineno) f.write(" ".join([time_of_message, where, message, '\n'])) f.flush()
def _static_file_write(tkmd): """Write tkmd data to a static file. :param tkmd: TKM_DATA type object :type tkmd: TKM_DATA :rtype: None """ _write_to_file(joinp(DIR.static, tkmd.filename), tkmd.data, tkmd.date)
def log(message): print(message) with io.open(joinp(package_path, '../flask.log'), 'a') as f: time_of_message = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()) cf = inspect.currentframe().f_back where = f'{cf.f_code.co_filename}:{cf.f_lineno}' f.write(" ".join([time_of_message, where, message, '\n'])) f.flush()
def get_current_tree(self): '''return the current tree in the collab''' url = joinp(self._host, 'collab/%d/nav/all/' % self.collab_id) resp = requests.get(url, headers=self._get_headers()) if resp.status_code != 200: raise CollabException('Failed to get collab current_tree %s, "%s"' % (resp.status_code, resp.text)) return self._create_tree(resp.json())
def permissions(self): '''return collab permissions for current user''' url = joinp(self._host, 'collab/%s/permissions/' % self.collab_id) resp = requests.get(url, headers=self._get_headers()) if resp.status_code != 200: raise CollabException('Failed to get collab permissions %s, "%s"' % (resp.status_code, resp.text)) return resp.json()
def _norm_path(self, path=None): '''returns a normalized path''' path = str(path) # convert from unicode, potentially if path: ret = os.path.normpath(joinp(self._cwd, path)) else: ret = self._cwd return ret
def register_activity(self, activity): '''Post Activity''' if not activity.get('time'): activity['time'] = str(datetime.now()) url = joinp(self._host, 'activity/') resp = requests.post(url, json=activity, headers=self._get_headers()) resp.raise_for_status()