def __init__(self, **kwargs): Compression.__init__(self) self.log = _helpers.get_logger(__name__) self.source_path = kwargs.get('source_path', None) self.dest_dir = kwargs.get( 'dest_dir', '%s/' % os.path.dirname(self.source_path) ) self.compressed_path = None self.remove_source = kwargs.get('remove_source', False) self.compress_success = None self.decompress_success = None self.compressed_path = None self.decompressed_path = None # validations assert self.source_path, '%s missing' assert isinstance(self.remove_source, bool), \ "remove_source must be True/False" assert os.path.exists(self.source_path), \ '%s does not exist, skipping compression' % self.source_path # real paths please self.source_path = os.path.realpath(self.source_path) self.dest_dir = os.path.realpath(self.dest_dir) _helpers.ensure_dir(self.dest_dir)
def __init__(self, **kwargs): Compression.__init__(self) self.log = _helpers.get_logger(__name__) self.source_path = kwargs.get('source_path', None) self.dest_dir = kwargs.get('dest_dir', '%s/' % os.path.dirname(self.source_path)) self.compressed_path = None self.remove_source = kwargs.get('remove_source', False) self.compress_success = None self.decompress_success = None self.compressed_path = None self.decompressed_path = None # validations assert self.source_path, '%s missing' assert isinstance(self.remove_source, bool), \ "remove_source must be True/False" assert os.path.exists(self.source_path), \ '%s does not exist, skipping compression' % self.source_path # real paths please self.source_path = os.path.realpath(self.source_path) self.dest_dir = os.path.realpath(self.dest_dir) _helpers.ensure_dir(self.dest_dir)
def __init__(self, **kwargs): self.log = _helpers.get_logger(__name__) Compression.__init__(self) self.stream = None self.output_path = kwargs.get('output_path', None) assert not os.path.exists(self.output_path), \ '%s already exists!' % self.output_path _helpers.ensure_dir(os.path.dirname(self.output_path))
def pytest_configure(config): if not hasattr(config, 'slaveinput'): current_day = '{:%Y_%m_%d_%H_%S}'.format(datetime.datetime.now()) ensure_dir('results') ensure_dir(os.path.join('slaveinput', current_day)) result_dir = os.path.join(os.path.dirname(__file__), 'results', current_day) ensure_dir(result_dir) result_dir_test_run = result_dir ensure_dir(os.path.join(result_dir_test_run, 'screenshots')) ensure_dir(os.path.join(result_dir_test_run, 'logcat')) config.screen_shot_dir = os.path.join(result_dir_test_run, 'screenshots') config.logcat_dir = os.path.join(result_dir_test_run, 'logcat')
def pytest_configure(config): if not hasattr(config, "slaveinput"): current_day = (datetime.datetime.now().strftime("%Y_%m_%d_%H_%S")) ensure_dir("results") ensure_dir(os.path.join("results", current_day)) result_dir = os.path.join(os.path.dirname(__file__), "results", current_day) ensure_dir(result_dir) result_dir_test_run = result_dir ensure_dir(os.path.join(result_dir_test_run, "screenshots")) ensure_dir(os.path.join(result_dir_test_run, "logcat")) config.screen_shot_dir = os.path.join(result_dir_test_run, "screenshots") config.logcat_dir = os.path.join(result_dir_test_run, "logcat")
def string_to_file(request, input_dict, output_dict, widget): import helpers destination = helpers.get_media_root() + '/' + str( request.user.id) + '/' + str(widget.id) + '.txt' helpers.ensure_dir(destination) f = open(destination, 'w') f.write(str(input_dict['string'])) f.close() filename = str(request.user.id) + '/' + str(widget.id) + '.txt' output_dict['filename'] = filename return render(request, 'visualizations/string_to_file.html', { 'widget': widget, 'input_dict': input_dict, 'output_dict': output_dict })
def download(self, out_dir=None, prefix=None, build_filter=None): dld_builds = [] for build in self.builds: if build_filter and not build.match_filter(build_filter): log.info("SKIPPING: %s" % build) continue if out_dir: updir_path = out_dir else: updir_path = '' if prefix: updir_path = os.path.join(updir_path, prefix) build_path = build.path(prefix=updir_path) helpers.ensure_dir(build_path) with helpers.cdir(build_path): build.download() dld_builds.append(build) return dld_builds
def save_data(self): info = self.eegInlet.info() desc = info.desc() chanNum = info.channel_count() channels = desc.child('channels').first_child() channelNames = [channels.child_value('label')] for i in range(1, chanNum): channels = channels.next_sibling() channelNames.append(channels.child_value('label')) startTime = datetime.datetime.fromtimestamp(self.startTime).strftime(Constants.SESSION_FILE_DATETIME_FORMAT) finishTime = datetime.datetime.fromtimestamp(self.finishTime).strftime(Constants.SESSION_FILE_DATETIME_FORMAT) # Save EEG Data fileBase = os.path.join('session_data', self.user, self.mode.name, self.user + '_' + self.mode.name + '_' + startTime + '_' + finishTime) file = fileBase + '_EEG.csv' helpers.ensure_dir(file) with open(file, 'w', newline='') as csvfile: writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) writer.writerow(['timestamp'] + channelNames) for data in self.eegData: writer.writerow(data) print('Saved EEG data to: ' + file) # Save Marker Data file = os.path.join('session_data', self.user, self.mode.name, self.user + '_' + self.mode.name + '_' + startTime + '_' + finishTime).replace(':','\ua789') file = fileBase + '_MRK.csv' helpers.ensure_dir(file) with open(file, 'w', newline='') as csvfile: writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) writer.writerow(['timestamp','key marker']) for data in self.markers: writer.writerow(data) print('Saved Marker data to: ' + file)
def saveAssertScreenShot(self, driver, testcaseName): current_day = (datetime.datetime.now().strftime("%Y_%m_%d_%H_%S")) ensure_dir("Screenshot") result_dir = ensure_dir(os.path.join("Screenshot", current_day)) # result_dir = os.path.join(os.path.dirname(__file__), "Screenshot", current_day) # print(result_dir) # ensure_dir(result_dir) result_dir_test_run = result_dir Execution_screen_shot_dir = os.path.join(result_dir_test_run, "Assertion") ensure_dir(Execution_screen_shot_dir) # print(Execution_screen_shot_dir) ExeImagePath = os.path.join(Execution_screen_shot_dir, current_day + '_' + testcaseName + ".png") driver.save_screenshot(ExeImagePath) # print(ExeImagePath) return ExeImagePath
cluster_id = args.cluster_id host_path = args.host_path threads = args.threads verbose = args.verbose remove_files = args.remove_files ### log('Building clustering table from amplicon data...') merged_sorted_output_file = OUTPATH + sample_name + '_SORTED.fna' merged_output_file = OUTPATH + sample_name + '.fna' #make directories for processed files output_subfolders = ['trim/','derep/'] for dirname in output_subfolders: ensure_dir(OUTPATH + dirname) demux_files = [f for f in os.listdir(INPATH) if os.path.isfile(os.path.join(INPATH, f))] log('%s files found...' % len(demux_files)) for i,filename in enumerate(demux_files): if (i % 100 == 0) : log('%s files processed...' % i) #trim input_full_path = INPATH + filename sufix = os.path.splitext(filename)[1] output_filenmae = filename.replace(sufix,'.trim' + sufix) output_full_path = OUTPATH + 'trim/' + output_filenmae cmd = ('vsearch '
list_of_clustering_dataframes = args.list_of_clustering_dataframes OUTPATH = args.outpath min_pair_count = args.min_shared_occurances alpha = args.alpha merge_similar_id = args.merge_similar_id flag_edges = args.flag_edges threads = args.threads verbose = args.verbose override = args.override ### ensure_dir(OUTPATH) frames = [] for clustering_dataframe_file in list_of_clustering_dataframes: try: frame = pd.read_csv(clustering_dataframe_file, index_col=0) frames.append(frame) except: log('Unable to load clustering dataframe file -> %s...' % clustering_dataframe_file) sys.exit() #construct multi-domain tables log('Concatenating domain dataframes...') merged_filtered_clustering_table = pd.concat(frames).reset_index() DOMAINS = sorted(merged_filtered_clustering_table['domain'].dropna().unique())