def DownloadUpdate(self, file): self.log('Downloading: %s' % file) dirfile = os.path.join(self.UpdateTempDir,file) dirname, filename = os.path.split(dirfile) if not os.path.isdir(dirname): try: os.makedirs(dirname) except: self.log('Error creating directory: ' +dirname) url = self.SVNPathAddress+urllib.quote(file) try: if re.findall(".xbt",url): self.totalsize = int(re.findall("File length: ([0-9]*)",urllib2.urlopen(url+"?view=log").read())[0]) urllib.urlretrieve( url.decode("utf-8"), dirfile.decode("utf-8")) else: urllib.urlretrieve( url.decode("utf-8"), dirfile.decode("utf-8") ) self.DownloadedFiles.append(urllib.unquote(url)) return 1 except: try: time.sleep(2) if re.findall(".xbt",url): self.totalsize = int(re.findall("File length: ([0-9]*)",urllib2.urlopen(url+"?view=log").read())[0]) urllib.urlretrieve(url.decode("utf-8"), dirfile.decode("utf-8")) else: urllib.urlretrieve(url.decode("utf-8"), dirfile.decode("utf-8") ) urllib.urlretrieve(url.decode("utf-8"), dirfile.decode("utf-8")) self.DownloadedFiles.append(urllib.unquote(url)) return 1 except: self.log("Download failed: %s" % url) self.DownloadFailedFiles.append(urllib.unquote(url)) return 0
def processJarSection(self, jarinfo, jardir): '''Internal method called by makeJar to actually process a section of a jar.mn file. ''' # chromebasepath is used for chrome registration manifests # {0} is getting replaced with chrome/ for chrome.manifest, and with # an empty string for jarfile.manifest chromebasepath = '{0}' + os.path.basename(jarinfo.name) if self.outputFormat == 'jar': chromebasepath = 'jar:' + chromebasepath + '.jar!' chromebasepath += '/' jarfile = os.path.join(jardir, jarinfo.base, jarinfo.name) jf = None if self.outputFormat == 'jar': # jar jarfilepath = jarfile + '.jar' try: os.makedirs(os.path.dirname(jarfilepath)) except OSError, error: if error.errno != errno.EEXIST: raise jf = ZipFile(jarfilepath, 'a', lock=True) outHelper = self.OutputHelper_jar(jf)
def __enter__(self): # Create directory if it doesn't exist dir_name = os.path.dirname(FILE_NAME) try: os.makedirs(dir_name) except OSError as e: if e.errno != errno.EEXIST: raise # Open file with a lock and create it if it doesn't exist flag = os.O_RDWR if self._write is True else os.O_RDONLY mode = "rb+" if self._write is True else "rb" self._file = os.fdopen(os.open(FILE_NAME, os.O_CREAT | flag), mode) # Acquire a file lock op = fcntl.LOCK_EX if self._write is True else fcntl.LOCK_SH fcntl.flock(self._file.fileno(), op) try: self.data = pickle.load(self._file) except EOFError: self.data = { 'jobs': [], 'schedules': [], 'workers': deque(), 'next_job_id': 1, 'next_schedule_id': 1, 'next_worker_id': 1 } if self._write is False: self._file.close() return self.data
def buildApp(): # ---------------------- # Under Windows, we're just going to grab the xulrunner-stub.exe and run Resource Hacker # to rename it to "Kylo", set the icon, and set some version info, etc. reshack_temp_dir = os.path.join(Settings.prefs.build_dir, "stub") if not os.path.exists(reshack_temp_dir): os.makedirs(reshack_temp_dir) build_stub.main(Settings.config.get('App','Version'), Settings.config.get('App','BuildID'), temp_dir = reshack_temp_dir, stub_dir = os.path.join(Settings.prefs.build_dir, "application")) # ---------------------- # We also need mozilla DLLS for lib in ["mozcrt19.dll", "mozutils.dll", "gkmedias.dll"]: f = os.path.join(Settings.prefs.xul_dir, lib) if (os.path.isfile(f)): shutil.copy2(f, os.path.join(Settings.prefs.build_dir, "application")) # ---------------------- # Now let's grab the XULRunner directory and drop it in to our working application directory xulrunner_dir = os.path.join(Settings.prefs.build_dir, "application", "xulrunner") if not os.path.exists(xulrunner_dir): os.makedirs(xulrunner_dir) build_util.syncDirs(Settings.prefs.xul_dir, xulrunner_dir, exclude=["xulrunner-stub.exe"])
def consume_in_thread(self): """Runs the ZmqProxy service.""" ipc_dir = CONF.rpc_zmq_ipc_dir consume_in = "tcp://%s:%s" % \ (CONF.rpc_zmq_bind_address, CONF.rpc_zmq_port) consumption_proxy = InternalContext(None) try: os.makedirs(ipc_dir) except os.error: if not os.path.isdir(ipc_dir): with excutils.save_and_reraise_exception(): LOG.error(_LE("Required IPC directory does not exist at" " %s") % (ipc_dir, )) try: self.register(consumption_proxy, consume_in, zmq.PULL) except zmq.ZMQError: if os.access(ipc_dir, os.X_OK): with excutils.save_and_reraise_exception(): LOG.error(_LE("Permission denied to IPC directory at" " %s") % (ipc_dir, )) with excutils.save_and_reraise_exception(): LOG.error(_LE("Could not create ZeroMQ receiver daemon. " "Socket may already be in use.")) super(ZmqProxy, self).consume_in_thread()
def validate_references(references=None, path_to_bowtie2='bowtie2', logger=None, environ_key='SOT_DEFAULT_REFERENCES', target_dir=curdir, **kwargs): makedirs(target_dir, mode=0755) debug('Validating references') new_references = [] if references is None: if environ_key in environ: references = environ[environ_key].split() else: critical('no reference genomes specified') return [] for r in references: bowtie2_index = find_bowtie2_index(r, path_to_bowtie2=path_to_bowtie2) if bowtie2_index is None: if exists(r): debug('Attempting to build bowtie2 index from %s' % r) new_index = fasta_to_bowtie2(r, target_dir=target_dir, path_to_bowtie2=path_to_bowtie2) if new_index is not None: new_references.append(new_index) continue else: critical('Failed to build bowtie2 index.') critical('bowtie2 could not find the index for %s', r) critical('we will not align to %s', r) else: new_references.append(bowtie2_index) return new_references
def save_data(self, filename, data): ''' Saves the data structure using pickle. If the addon data path does not exist it will be automatically created. This save function has the same restrictions as the pickle module. Args: filename (string): name of the file you want to save data to. This file will be saved in your addon's profile directory. data (data object/string): you want to save. Returns: True on success False on failure ''' profile_path = self.get_profile() try: os.makedirs(profile_path) except: pass save_path = os.path.join(profile_path, filename) try: pickle.dump(data, open(save_path, 'wb')) return True except pickle.PickleError: return False
def _create_output_dir(self, path, type_): try: if not os.path.exists(path): os.makedirs(path) except EnvironmentError, err: raise DataError("Creating %s file directory '%s' failed: %s" % (type_.lower(), path, err.strerror))
def create_dir(path): """Create directory if it does not exist.""" try: makedirs(path) except OSError as e: if e.errno != EEXIST: raise
def generatePara(filename): _config = ConfigParser.ConfigParser() _config.optionxform=str _config.read(filename) _filenames = [] sec = 'modelABrc' Vals, sizes, = {}, [] for k in KEYS: Vals[k] = _config.get(sec,k) _path = 'c'+Vals['chiN']+'m'+Vals['miuN']+'b'+Vals['Beta'] if not os.path.exists(_path): os.makedirs(_path) with open(Vals['listName'],'r') as f: for l in f.readlines(): sizes = json.loads(l) count = len(glob.glob(os.path.join(_path,'Para*.ini'))) for k in sizes: for name in sz: _config.set(sec,name,k[name]) _fn = 'Para_' + str(count) + '.ini' count += 1 with open(_path + '//' + _fn,'wb') as f: _config.write(f) _filenames.append(_fn) for ff in FILELIST: if not os.path.isfile(os.path.join(_path,ff)): #shutil.copy('.//'+ff,_path+'//'+ff) os.symlink(os.path.join(os.path.realpath('.'),ff), \ os.path.join(_path,ff)) #if not os.path.isfile(_path+'//scfRigid'): # shutil.copy('.//scfRigid',_path+'//scfRigid') return _path, _filenames
def extract(self, file, dir): if not dir.endswith(':') and not os.path.exists(dir): os.mkdir(dir) zf = zipfile.ZipFile(file) # create directory structure to house files self._createstructure(file, dir) num_files = len(zf.namelist()) percent = self.percent divisions = 100 / percent perc = int(num_files / divisions) # extract files to directory structure for i, name in enumerate(zf.namelist()): if self.verbose == True: print "Extracting %s" % name elif perc > 0 and (i % perc) == 0 and i > 0: complete = int (i / perc) * percent if not name.endswith('/'): try: (path,namet) = os.path.split(os.path.join(dir, name)) os.makedirs( path) except: pass outfile = open(os.path.join(path, namet), 'wb') outfile.write(zf.read(name)) outfile.flush() outfile.close()
def make_dir(dir): ''' Creates a directory if it does not exist dir: absolute path to directory to create ''' if not os.path.isdir(dir): os.makedirs(dir)
def download_entire_dataset(dataset_name, num_data, labels, method, cache_dir): """Downloads the train/valid/test parts of a dataset and stores them in the cache directory. Args: dataset_name: Dataset to be downloaded. num_data: Amount of data samples to be parsed from the dataset. labels: Target labels for regression. method: Method name. See `parse_arguments`. cache_dir: Directory to store the dataset to. """ print('Downloading {}...'.format(dataset_name)) preprocessor = preprocess_method_dict[method]() # Select the first `num_data` samples from the dataset. target_index = numpy.arange(num_data) if num_data >= 0 else None dataset_parts = D.molnet.get_molnet_dataset(dataset_name, preprocessor, labels=labels, target_index=target_index) dataset_parts = dataset_parts['dataset'] # Cache the downloaded dataset. if not os.path.exists(cache_dir): os.makedirs(cache_dir) for i, part in enumerate(['train', 'valid', 'test']): filename = dataset_part_filename(part, num_data) path = os.path.join(cache_dir, filename) NumpyTupleDataset.save(path, dataset_parts[i]) return dataset_parts
def initDb(self): path=os.path.dirname(self.jdoc_db_path) if not os.path.exists(path): os.makedirs(path) create_jdoc_sql="create table jdoc(class_name varchar(100), package varchar(200), homeid int )" create_links_sql="create table homelinks( id INTEGER PRIMARY KEY, url varchar(200))" self.dbm.update([create_jdoc_sql, create_links_sql])
def convert_mp3_to_wav(filename, sample_frequency): ext = filename[-4:] if(ext != '.mp3'): return files = filename.split('/') orig_filename = files[-1][0:-4] new_path = '' if(filename[0] == '/'): new_path = '/' for i in xrange(len(files) - 1): new_path += files[i] + '/' tmp_path = new_path + 'tmp' new_path += 'wave' if not os.path.exists(new_path): os.makedirs(new_path) if not os.path.exists(tmp_path): os.makedirs(tmp_path) filename_tmp = tmp_path + '/' + orig_filename + '.mp3' new_name = new_path + '/' + orig_filename + '.wav' sample_freq_str = "{0:.1f}".format(float(sample_frequency) / 1000.0) cmd = 'lame -a -m m {0} {1}'.format(quote(filename), quote(filename_tmp)) os.system(cmd) cmd = 'lame --decode {0} {1} --resample {2}'.format( quote(filename_tmp), quote(new_name), sample_freq_str ) os.system(cmd) return new_name
def _get_cache_dir(): confcache = os.path.join(config_dir, u'caches') if isportable: return confcache if 'CALIBRE_CACHE_DIRECTORY' in os.environ: return os.path.abspath(os.environ['CALIBRE_CACHE_DIRECTORY']) if iswindows: w = plugins['winutil'][0] try: candidate = os.path.join(w.special_folder_path(w.CSIDL_LOCAL_APPDATA), u'%s-cache'%__appname__) except ValueError: return confcache elif isosx: candidate = os.path.join(os.path.expanduser(u'~/Library/Caches'), __appname__) else: candidate = os.environ.get('XDG_CACHE_HOME', u'~/.cache') candidate = os.path.join(os.path.expanduser(candidate), __appname__) if isinstance(candidate, bytes): try: candidate = candidate.decode(filesystem_encoding) except ValueError: candidate = confcache if not os.path.exists(candidate): try: os.makedirs(candidate) except: candidate = confcache return candidate
def cat_counter_references(counter_references=None, target_dir=curdir, path_to_bowtie2='bowtie2', logger=None, **kwargs): if counter_references is None: return try: makedirs(target_dir, mode=0755) except OSError: pass debug('Validating counter-references and building counter-reference index') valid_references = validate_references(references=counter_references, target_dir=target_dir, path_to_bowtie2=path_to_bowtie2, logger=logger, environ_key= 'SOT_DEFAULT_COUNTER_REFERENCES') crefs_fa = open(join(target_dir, 'counter_references.fa'), 'w') for ref in valid_references: Popen([path_to_bowtie2 + '-inspect', ref], stdout=crefs_fa).wait() crefs_index = join(target_dir, counter_references) args = [path_to_bowtie2 + '-build', crefs_fa, crefs_index] P = Popen(args, stderr=PIPE) stderr = P.communicate()[1] if stderr.startswith('Error'): critical(stderr) critical('No counter-references will be used.') return crefs_index
def populate_dir(path, files): if not os.path.exists(path): os.makedirs(path) for (name, content) in files.iteritems(): with open(os.path.join(path, name), "w") as fp: fp.write(content) fp.close()
def populate(): files = {} folders = {} new_folders = {} path = os.getcwd() #read all the files and get all the extentions all_dirs = os.listdir(path) for folders_file in all_dirs: if isfile(join(path , folders_file)): #it is a file #get the extension of the files temp = folders_file.split('.') extn = temp[-1]; files[folders_file] = extn print(extn) new_folders[extn] = True else: #it is a directory #print folders_file folders[folders_file] = True #create all the necessary folders for f in new_folders: if f not in folders: #creata a new folder os.makedirs(path+'/'+f) #move the files for f in files: # do somethning os.rename(path+'/'+f , path+'/' + files[f]+'/'+f)
def doCopy(self, res): # now copy tree to workdir fromdir = os.path.join(self.builder.basedir, self.srcdir) todir = os.path.join(self.builder.basedir, self.workdir) if runtime.platformType != "posix": d = threads.deferToThread(shutil.copytree, fromdir, todir) def cb(_): return 0 # rc=0 def eb(f): self.sendStatus( {'header': 'exception from copytree\n' + f.getTraceback()}) return -1 # rc=-1 d.addCallbacks(cb, eb) return d if not os.path.exists(os.path.dirname(todir)): os.makedirs(os.path.dirname(todir)) if os.path.exists(todir): # I don't think this happens, but just in case.. log.msg( "cp target '%s' already exists -- cp will not do what you think!" % todir) command = ['cp', '-R', '-P', '-p', fromdir, todir] c = runprocess.RunProcess(self.builder, command, self.builder.basedir, sendRC=False, timeout=self.timeout, maxTime=self.maxTime, logEnviron=self.logEnviron, usePTY=False) self.command = c d = c.start() d.addCallback(self._abandonOnFailure) return d
def create_directory(dir) : if os.path.exists(dir) : logger.info('Directory exists: ' + dir, __name__) else : os.makedirs(dir) #os.fchmod(dir,770) logger.info('Directory created: ' + dir, __name__)
def configure(self): # configure for 64-bit build self.updatecfg('configopts', "-b 64") if self.getcfg('ignorethrottling'): # ignore CPU throttling check # this is not recommended, it will disturb the measurements done by ATLAS # used for the EasyBuild demo, to avoid requiring root privileges self.updatecfg('configopts', '-Si cputhrchk 0') # if LAPACK is found, instruct ATLAS to provide a full LAPACK library # ATLAS only provides a few LAPACK routines natively if self.getcfg('full_lapack'): lapack = get_software_root('LAPACK') if lapack: self.updatecfg('configopts', ' --with-netlib-lapack=%s/lib/liblapack.a' % lapack) else: self.log.error("netlib's LAPACK library not available,"\ " required to build ATLAS with a full LAPACK library.") # enable building of shared libraries (requires -fPIC) if self.getcfg('sharedlibs') or self.toolkit().opts['pic']: self.log.debug("Enabling -fPIC because we're building shared ATLAS libs, or just because.") self.updatecfg('configopts', '-Fa alg -fPIC') # ATLAS only wants to be configured/built in a separate dir' try: objdir = "obj" os.makedirs(objdir) os.chdir(objdir) except OSError, err: self.log.error("Failed to create obj directory to build in: %s" % err)
def _exec(self, args, run_in_check_mode=False, check_rc=True): if not self.module.check_mode or (self.module.check_mode and run_in_check_mode): cmd = self.executable + args if self.glbl: cmd.append('--global') if self.production: cmd.append('--production') if self.ignore_scripts: cmd.append('--ignore-scripts') if self.name: cmd.append(self.name_version) if self.registry: cmd.append('--registry') cmd.append(self.registry) #If path is specified, cd into that path and run the command. cwd = None if self.path: self.path = os.path.abspath(os.path.expanduser(self.path)) if not os.path.exists(self.path): os.makedirs(self.path) if not os.path.isdir(self.path): self.module.fail_json(msg="path %s is not a directory" % self.path) cwd = self.path rc, out, err = self.module.run_command(cmd, check_rc=check_rc, cwd=cwd) return out return ''
def clean_destination(self): create = self.cleaned_data.get("create") dest = self.cleaned_data.get("destination") dest = os.path.abspath(dest.strip().replace("..", "")) if not self.jail: jail = self.cleaned_data.get("jail") if jail: self.jail = Jails.objects.get(jail_host=jail) if not self.jail: raise forms.ValidationError( _("This shouldn't happen, but the jail could not be found") ) full = "%s/%s%s" % (self.jc.jc_path, self.jail.jail_host, dest) if len(full) > 88: raise forms.ValidationError( _("The full path cannot exceed 88 characters") ) if not os.path.exists(full): os.makedirs(full) return dest
def generate_report(task, db): date = dt.now() date = date.strftime('%d-%m-%Y_%H-%M') directory = os.path.join(task['directory'], 'reports') if not os.path.exists(directory): os.makedirs(directory) filename = "%s/%s.txt" %(directory, date) with open(filename, 'w') as f: f.write("\n======PROJECT PARAMS======\n") for k, v in task.items(): if k not in ["action", "status","msg", "date", "creation_date", "_id"]: if k == "creation_date": v = task[k].strftime('%d-%m-%Y %H-%M-%S') f.write(str(k)+": "+str(v)+"\n") try: f.write(str(k)+": "+str(v)+"\n") except Exception: pass f.write(db.show_stats()) f.write("\n\n======HISTORY OF THE PROJECT======\n") date_list = [n.strftime('%d-%m-%Y %H-%M-%S') for n in task["date"]] status_list = list(zip(task["action"], task["status"],task["msg"], date_list)) for msg in status_list: f.write("\n-"+str(msg)) print "Your report is ready!\nCheck here: %s" %(filename) return True
def save_template(data=None): """\ Returns an out file name and template description for saving a template """ dlg = templates_ui.TemplateInfoDialog(None, -1, "") if data is not None: dlg.template_name.SetValue( misc.wxstr(os.path.basename(os.path.splitext(data.filename)[0]))) dlg.author.SetValue(misc.wxstr(data.author)) dlg.description.SetValue(misc.wxstr(data.description)) dlg.instructions.SetValue(misc.wxstr(data.instructions)) ret = None retdata = Template() if dlg.ShowModal() == wx.ID_OK: ret = dlg.template_name.GetValue().strip() retdata.author = dlg.author.GetValue() retdata.description = dlg.description.GetValue() retdata.instructions = dlg.instructions.GetValue() if not ret: wx.MessageBox(_("Can't save a template with an empty name"), _("Error"), wx.OK|wx.ICON_ERROR) dlg.Destroy() name = ret if ret: d = os.path.join(config._get_appdatapath(), '.wxglade', 'templates') if not os.path.exists(d): try: os.makedirs(d) except (OSError, IOError), e: print _("ERROR creating %s: %s") % (d, e) return None, retdata ret = os.path.join(d, ret + '.wgt')
def buildInstaller(): """ Build the installer""" logger = build_util.getLogger('build_Installer') makensis = os.path.normpath(Settings.config.get("env","NSIS")) nsi_file = os.path.normpath(os.path.join(module_path, "installer", "kylo.nsi")) version = build_util.VersionFormat() nsis_defs = { "APP_DIR": Settings.prefs.kylo_build_dir, "BUILD_ID": Settings.config.get("App", "BuildID"), "WIN_VERSION": version.win, "FULL_VERSION": version.full, "FILENAME_VERSION": version.full.replace(".","_"), "VERSION_MAJOR": version.ints[0], "VERSION_MINOR": version.ints[1], "DISPLAY_VERSION": version.display, "OUT_FILE_DIR": Settings.prefs.dist_dir, "LOCALE": "en-US", } # Create dist_dir if it doesn't exist if not os.path.exists(Settings.prefs.dist_dir): logger.info("Creating dist directory: %s", Settings.prefs.dist_dir) os.makedirs(Settings.prefs.dist_dir) args= [makensis] + ["/D%s=%s" % (k,v) for (k,v) in nsis_defs.iteritems()] + [nsi_file] logger.debug("Running: **" + " ".join(args)) build_util.runSubprocess(args, logger)
def main(): path_to_chromedriver = "/Users/xuanzhang/Developer/python/scrapping/chromedriver" browser0 = webdriver.Chrome(executable_path = path_to_chromedriver) website = "http://www.sephora.com/" # categories to work for now categories = ['face-wash-facial-cleanser',\ 'facial-toner-skin-toner',\ 'night-cream',\ 'eye-cream-dark-circles',\ 'moisturizer-skincare',\ 'bb-cream-cc-cream',\ 'face-serum'] category = categories[-1] # if no such directory for the html, create on category_path = '../scraping/sephora/' + category if not os.path.isdir(category_path): os.makedirs(category_path) # in case aborted, check what pages have been scraped files = [f for f in os.listdir(category_path) if f[-4:]=='html' ] product_list = [re.findall(r'(P\w+)',file)[0] for file in files] page_number = 0 # scrap and hit 'arrow-right' to scrap while EC.element_to_be_clickable((By.CLASS_NAME, "arrow arrow-right")): page_number += 1 # pageSize = -1 s.t. each page has 300 items url = website + category + '?pageSize=-1¤tPage=' + str(page_number) browser0.get(url) # get item list by tag sku-item, and go through the list to scrap items = browser0.find_elements_by_class_name('sku-item') for item in items: product_id = item.get_attribute('data-product_id') product_path = category_path + '/' + product_id # if product page is already scrapped, skip to next product if (product_id in product_list) and\ (product_id+'_reviews' in product_list): continue time.sleep(.5) # open a new browser window for reviews browser1 = webdriver.Chrome(executable_path = path_to_chromedriver) browser1.get(item.get_attribute('href')) # scrap maincontent or reviews, whichever is not scraped if product_id not in product_list: flag1 = scrap_maincontent(browser1, product_path) else: flag1 = True if product_id+'_reviews' not in product_list: flag2 = scrap_reviews(browser1, product_path, product_id) else: flag2 = True print product_id, flag1, flag2
def on_login_succeed(self): '''callback called on login succeed''' self._save_login_dimensions() self.config.save(self.config_path) plugin_manager = get_pluginmanager() plugin_manager.scan_directory('plugins') plugin_dir = self.config_dir.join('plugins') if not os.path.exists(plugin_dir): os.makedirs(plugin_dir) plugin_manager.scan_directory(plugin_dir) self.draw_main_screen() self.session.config.get_or_set('l_active_plugins', []) for plugin in self.session.config.l_active_plugins: plugin_manager.plugin_start(plugin, self.session) # hack: where do we start this? how do we generalize for other # extensions? if plugin == "music": extension.get_and_instantiate('listening to', self.window.content) self.set_default_extensions_from_config() self._sync_emesene1() self.logged_in = True if self.network_checker is not None: self.network_checker.set_new_session(self.session)
def export_data(input_path, output_path, output_format): import qiime2.util import qiime2.sdk import distutils result = qiime2.sdk.Result.load(input_path) if output_format is None: if isinstance(result, qiime2.sdk.Artifact): output_format = result.format.__name__ else: output_format = 'Visualization' result.export_data(output_path) else: if isinstance(result, qiime2.sdk.Visualization): error = '--output-format cannot be used with visualizations' click.secho(error, fg='red', bold=True, err=True) click.get_current_context().exit(1) else: source = result.view(qiime2.sdk.parse_format(output_format)) if os.path.isfile(str(source)): if os.path.isfile(output_path): os.remove(output_path) else: # create directory (recursively) if it doesn't exist yet os.makedirs(os.path.dirname(output_path), exist_ok=True) qiime2.util.duplicate(str(source), output_path) else: distutils.dir_util.copy_tree(str(source), output_path) output_type = 'file' if os.path.isfile(output_path) else 'directory' success = 'Exported %s as %s to %s %s' % (input_path, output_format, output_type, output_path) click.secho(success, fg='green')