def _prepare_pgk_base(self, template_name, root='', gpl=True): """Create and populate the src base directory osx_pkg_name[in] the name of the resulting package root[in] the root path for the package contents gpl[in] if false license and readme will correspond to the commercial package instead of GPL files. default=True """ # copy and create necessary files osx_dist_name = template_name.format(self.name, self.version) osx_pkg_name = "{0}.pkg".format(osx_dist_name) osx_pkg_contents = os.path.join(root, osx_pkg_name, 'Contents') osx_pkg_resrc = os.path.join(osx_pkg_contents, 'Resources') self.mkpath(osx_pkg_resrc) copy_file_src_dst = [ (os.path.join("support", "osx", "PkgInfo"), os.path.join(osx_pkg_contents, "PkgInfo")), #(os.path.join("support", "osx", "background.jpg"), # os.path.join(osx_pkg_resrc, "background.jpg")), (os.path.join("support", "osx", "Welcome.rtf"), os.path.join(osx_pkg_resrc, "Welcome.rtf")) ] if gpl: copy_file_src_dst += [ (os.path.join(os.getcwd(), "README"), os.path.join(osx_pkg_resrc, "ReadMe.txt")), (os.path.join(os.getcwd(), "COPYING"), os.path.join(osx_pkg_resrc, "License.txt")) ] else: copy_file_src_dst += [ (os.path.join(os.getcwd(), "README_com.txt"), os.path.join(osx_pkg_resrc, "ReadMe.txt")), (os.path.join(os.getcwd(), "LICENSE_com.txt"), os.path.join(osx_pkg_resrc, "License.txt")) ] property_files = [ (os.path.join("support", "osx", "Info.plist"), os.path.join(osx_pkg_contents, "Info.plist")), (os.path.join("support", "osx", "Description.plist"), os.path.join(osx_pkg_resrc, "Description.plist")) ] for pro_file, dest_file in property_files: with open(pro_file) as temp_f: lines = (ln.replace('\n', '') for ln in temp_f) major_version = self.version.split('.')[0] minor_version = self.version.split('.')[1] content = '\n'.join(lines).format(VERSION=self.version, MAJORVERSION=major_version, MINORVERSION=minor_version) with open(dest_file, 'w') as dest_f: dest_f.write('{0}\n'.format(content)) for src, dst in copy_file_src_dst: copy_file(src, dst)
def export_note_to_hexo_path(from_path, to_path, categories): print('Processing qvnote:',from_path) meta = json.loads(open(os.path.join(from_path, 'meta.json'), 'r').read()) content = json.loads(open(os.path.join(from_path, 'content.json'), 'r').read()) md_filename = fetch_hexomd_filename(content) md_tpl = gen_hexomd_template(meta, categories) md_content, resources = note_to_md(meta, content, md_filename) # write markdown content to hexo post dir with open(os.path.join(to_path, md_filename + '.md'), 'w') as f: f.write(md_tpl + md_content) print('Write markdown content to',os.path.join(to_path,md_filename + '.md')) if resources: # make dir for resources, resources stored in to_path/static/img/md_filenmame/ res_dirpath = os.path.join(to_path, 'static', 'img', md_filename) if not os.path.exists(res_dirpath): os.mkdir(res_dirpath) print('Make dir for resources:',res_dirpath) # copy resources to corresponding dir of the note if os.path.exists(res_dirpath): for f in resources: file_util.copy_file(os.path.join(from_path, 'resources', f), os.path.join(res_dirpath, f)) print('Copy', f, 'to resources dir')
def run(self): if sys.platform not in ('linux3', 'linux2', 'win32', 'darwin'): msg = "**Error: Can't install on this platform: %s" % sys.platform print msg sys.exit(1) if not self.single_version_externally_managed: self.do_egg_install() else: _install.run(self) # install bauble.desktop and icons if sys.platform in ('linux3', 'linux2'): # install everything in share dir_util.copy_tree(os.path.join(self.build_base, 'share'), os.path.join(self.install_data, 'share')) elif sys.platform == 'win32': # install only i18n files locales = os.path.dirname(locale_path) install_cmd = self.get_finalized_command('install') build_base = install_cmd.build_base src = os.path.join(build_base, locales) dir_util.copy_tree(src, os.path.join(self.install_data, locales)) file_util.copy_file( "LICENSE", os.path.join(self.install_data, 'share', 'LICENSE.bauble'))
def run(self): # build the string of all the source files to be translated sources = '' for dirpath, dirs, files in os.walk('epymc'): for name in fnmatch.filter(files, '*.py'): sources += ' ' + os.path.join(dirpath, name) # create or update the reference pot file pot_file = os.path.join('data', 'locale', 'epymc.pot') info('updating pot file: %s' % (pot_file)) cmd = 'xgettext --from-code=UTF-8 --force-po ' \ '--output=%s %s' % (pot_file, sources) os.system(cmd) # create or update all the .po files linguas_file = os.path.join('data', 'locale', 'LINGUAS') for lang in open(linguas_file).read().split(): po_file = os.path.join('data', 'locale', lang + '.po') mo_file = os.path.join('epymc', 'locale', lang, 'LC_MESSAGES', 'epymc.mo') if os.path.exists(po_file): # update an existing po file info('updating po file: %s' % (po_file)) cmd = 'msgmerge -N -U -q %s %s' % (po_file, pot_file) os.system(cmd) else: # create a new po file info('creating po file: %s' % (po_file)) mkpath(os.path.dirname(po_file), verbose=False) copy_file(pot_file, po_file, verbose=False)
def copy_python_framework(info, dst): # XXX - In this particular case we know exactly what we can # get away with.. should this be extended to the general # case? Per-framework recipes? indir = os.path.dirname(os.path.join(info["location"], info["name"])) outdir = os.path.dirname(os.path.join(dst, info["name"])) mkpath(os.path.join(outdir, "Resources")) # Since python 3.2, the naming scheme for config files location has considerably # complexified. The old, simple way doesn't work anymore. Fortunately, a new module was # added to get such paths easily. # It's possible that virtualenv is messing with us here, so we only use the rightmost part of # each of the two paths below. For pyconfig_path, it's the last 3 elements of the path # (include/python3.2m/pyconfig.h) and for makefile_path it's the last 4 # (lib/python3.2/config-3.2m/Makefile). Yes, this kind of location can change depending on the # platform, but we're only supporting Mac OS X eh? We'll take these last path parts and append # them to indir and we'll have our non-virtualenv paths. pyconfig_path = sysconfig.get_config_h_filename() makefile_path = sysconfig.get_makefile_filename() pyconfig_path = op.join(*pyconfig_path.split(os.sep)[-3:]) makefile_path = op.join(*makefile_path.split(os.sep)[-4:]) assert pyconfig_path.startswith("include") assert makefile_path.startswith("lib") # distutils looks for some files relative to sys.executable, which # means they have to be in the framework... mkpath(op.join(outdir, op.dirname(pyconfig_path))) mkpath(op.join(outdir, op.dirname(makefile_path))) fmwkfiles = [os.path.basename(info["name"]), "Resources/Info.plist", pyconfig_path, makefile_path] for fn in fmwkfiles: copy_file(os.path.join(indir, fn), os.path.join(outdir, fn))
def build_extension(self,ext): # command-line arguments prevail over extension arguments # but if no command-line argument is defined,extension argument is # taken into account self.__ext=ext build_ext.build_extension(self,ext) if(os.name!='posix'): # Copy the .lib file fullname = self.get_ext_fullname(ext.name) modpath = string.split(fullname, '.') package = string.join(modpath[0:-1], '.') base = modpath[-1] if self.inplace: # ignore build-lib -- put the compiled extension into # the source tree along with pure Python modules build_py = self.get_finalized_command('build_py') package_dir = build_py.get_package_dir(package) dstLib=package_dir else: dstLib=self.build_lib srcLib=os.path.join(self.build_temp,base+".lib") dstLib=os.path.join(dstLib,package) copy_file(srcLib,dstLib)
def build_extension(self, ext): copy_file( ext.sources[0], self.get_ext_fullpath(ext.name), verbose=self.verbose, dry_run=self.dry_run )
def add_docs(doc_path, doc_files=None): """Prepare documentation files for Connector/Python""" mkpath(doc_path) if not doc_files: doc_files = [ 'mysql-connector-python.pdf', 'mysql-connector-python.html', 'mysql-html.css', ] for file_name in doc_files: # Check if we have file in docs/ doc_file = os.path.join('docs', file_name) if not os.path.exists(doc_file): # it might be in build/ doc_file = os.path.join('build', file_name) if not os.path.exists(doc_file): # we do not have it, create a fake one log.warn("documentation '%s' does not exist; creating" " empty", doc_file) open(doc_file, "w").close() if not os.path.exists(doc_file): # don't copy yourself copy_file(doc_file, doc_path)
def wsjt_install(install): # # In a true python environment, Audio.so would be compiled from python # I'm doing a nasty hack here to support our hybrid build system -db # if install == 1: os.makedirs('build/lib/WsjtMod') copy_file('WsjtMod/Audio.so', 'build/lib/WsjtMod') setup(name='Wsjt', version=version, description='Wsjt Python Module for Weak Signal detection', long_description=''' WSJT is a computer program designed to facilitate Amateur Radio communication under extreme weak-signal conditions. Three very different coding and modulation methods are provided: one for communication by "meteor scatter" techniques on the VHF bands; one for meteor and ionospheric scatter, primarily on the 6 meter band; and one for the very challenging EME (Earth-Moon-Earth) path. ''', author='Joe Taylor', author_email='*****@*****.**', license='GPL', url='http://physics.princeton.edu/pulsar/K1JT', scripts=['wsjt','wsjt.py'], packages=['WsjtMod'], )
def copy_file_from(self, source, incremental=False): """ Copies the given source file to this directory. If incremental is True the move is performed only if the source file is newer. """ file_util.copy_file(str(source), self.path, update=incremental)
def run(self): if not isdir(self.dist_dir): os.makedirs(self.dist_dir) # Analyze dependencies modules, dlls = self.analyze() # Create the executable. # # Note that you must add resources before appending the zip file. shutil.copyfile(self.get_src_exe(), self.exe_path) # Add the version as a resource. resources.AddVersionResource(self.exe_path, self.distribution.metadata.version, self.version_strings) # Append the Python modules and extra files to the executable. fd = open(self.exe_path, 'ab') fd.write(self.zip_files(modules)) fd.close() # Copy extensions and DLLs to the dist directory. # self.verbose defaults to 1, which stinks (in my opinion). Shell commands should be # silent unless you ask for output. Therefore, we'll only set the copy verbosity if # self.verbose > 1. (Put this somewhere at the top of the file or in a hacking # document.) for dll in dlls: copy_file(dll, join(self.dist_dir, basename(dll)), verbose=(self.verbose > 1))
def copy_gstreamer_dlls(self, dest_path): """Copy gstreamer DLLs to a directory. This method does the work for setup_gstreamer_bin_dir(). It copies over DLLs from the gstreamer bin directory, unless they also exist in GTK_BIN_PATH. """ src_path = os.path.join(GSTREAMER_PATH, 'bin') files_to_copy = [] for name in os.listdir(src_path): if name == 'libgtkgl-2.0-1.dll': # This one isn't in the GTK bin directory, but we still want # to skiip it. It's definitely not needed, and might get in # the way. continue if os.path.exists(os.path.join(GTK_BIN_PATH, name)): # This file is also in the GTK runtime directory. We want to # use the GTK version, so don't copy the file continue files_to_copy.append(name) dir_util.create_tree(dest_path, files_to_copy) for name in files_to_copy: file_util.copy_file(os.path.join(src_path, name), dest_path, update=True)
def createMoFile(self): try: mkpath(self.LOCALEDIR) copy_file(self.POFILEPATH, self.LOCALEDIR) cwd = os.getcwd() os.chdir(self.LOCALEDIR) if self.USE_MSGFMT_BINARY: # The msgfmt binary that ships as part of GNU gettext tools # is more robust then the Python version and includes # error checking capabilities. moFile = self.POFILE[:-2] + "mo" exp = ["msgfmt", "-c", "--check-accelerators", "-o%s" % moFile, self.POFILE] else: # The msgfmt gettext binary is not installed by default on # Windows and OS X. The Python version of msgfmt is included # however with Chandler. msgfmt = os.path.join(self.CHANDLERHOME, "tools", "msgfmt.py") exp = [self.PYTHON, msgfmt, self.POFILE] result = build_lib.runCommand(exp, timeout=TIMEOUT, logger=ignore, ignorepreexec=True) os.chdir(cwd) if result != 0: raise Exception(' '.join(exp) + ' failed with error code %d' % result) except Exception, e: self.raiseError("Unable to create mo file from %s': %s." % (self.POFILEPATH, e))
def build_extension(self, ext, copy=True): self.announce("building '%s' in %s"%(ext[1], ext[0])) if os.name == 'nt': # same as in config.py here: even on 'nt' we have to # use posix paths because we run in a cygwin shell at this point path = self.build_temp.replace('\\', '/') + '/' + ext[0] temp_target = self.build_temp.replace('\\', '/') + '/' + ext[0] else: path = os.path.join(self.build_temp, ext[0]) temp_target = os.path.join(self.build_temp, ext[0]) make = os.environ.get('MAKE', 'make') command = '%s -C "%s" %s'%(make, path, ext[1]) spawn(['sh', '-c', command], self.verbose, self.dry_run) #The extension may not be compiled. For now just skip it. if copy and os.path.isfile(os.path.join(temp_target, ext[1])): if self.inplace: build_path = ext[0] else: build_path = os.path.join(self.build_lib, ext[0]) mkpath (build_path, 0777, self.verbose, self.dry_run) copy_file(os.path.join(path, ext[1]), os.path.join(build_path, ext[1]), 1, 1, 0, None, self.verbose, self.dry_run)
def _populate_debian(self): """Copy and make files ready in the debian/ folder """ for afile in self.debian_files: copy_file(os.path.join(self.debian_support_dir, afile), self.debian_base) copy_file(os.path.join(self.debian_support_dir, 'source', 'format'), os.path.join(self.debian_base, 'source')) # This code is for now disabled. It allows to make distribution # specific Debian packages, but we seem not to need this for now. if False: changelog_file = os.path.join(self.debian_base, 'changelog') changelog = open(changelog_file, 'r').readlines() log.info( "changing changelog '%s' to include platform" % changelog_file) newchangelog = [] regex = re.compile(r'.*\((\d+\.\d+.\d+)\).*') for line in changelog: line = line.rstrip() match = regex.match(line) if match: version = match.groups()[0] line = line.replace( version, '%s-%s%s' % (version, self.platform, self.platform_version) ) newchangelog.append(line) newchangelog.append('\n') changelog = open(changelog_file, 'w') changelog.write('\n'.join(newchangelog))
def postprocess(self): ''' Update the metadata after processing is performed. ''' xp = L3_XmlParser(self, 'UP2A') auxdata = xp.getTree('L2A_Auxiliary_Data_Info', 'Aux_Data') gipp = auxdata.L2A_GIPP_List dirname, basename = os.path.split(self.product.L3_TILE_MTD_XML) fn1r = basename.replace('_MTD_', '_GIP_') fn2r = fn1r.replace('.xml', '') gippFn = etree.Element('GIPP_FILENAME', type='GIP_Level-2Ap', version=self._processorVersion) gippFn.text = fn2r gipp.append(gippFn) xp.export() # copy log to QI data as a report: report = basename.replace('.xml', '_Report.xml') report = dirname + '/QI_DATA/' + report if((os.path.isfile(self._fnLog)) == False): self.logger.fatal('Missing file: ' + self._fnLog) self.exitError() f = open(self._fnLog, 'a') f.write('</Sen2Cor_Level-2A_Report_File>') f.close() copy_file(self._fnLog, report) ''' if os.path.exists(self._fnTrace): os.remove(self._fnTrace) if os.path.exists(self._fnLog): os.remove(self._fnLog) ''' return
def handle(self, *args, **options): app_name = args[0] module = importlib.import_module(app_name) path = os.path.dirname(module.__file__) + os.sep project_path = os.path.dirname(os.path.normpath(os.sys.modules[settings.SETTINGS_MODULE].__file__)) install_app('social_auth') copy_tree(os.path.dirname(djangobp.__file__) + os.sep + 'scaffold/socialauth/templates/accounts', path + '/templates/accounts', update=True) copy_file(os.path.dirname(djangobp.__file__) + os.sep + 'scaffold/socialauth/controllers/accounts.py', path + '/controllers', update=True) copy_file(os.path.dirname(djangobp.__file__) + os.sep + 'scaffold/scaffold/socialauthsettings.py', project_path, update=True) urls_edit = CodeEditor(path + 'urls.py') urls_edit.insert_line("url(r'', include('social_auth.urls')),", 'urlpatterns') urls_edit.commit() settings_edit = CodeEditor(project_path + os.sep + 'settings.py') settings_edit.append_line("from socialauthsettings import *") settings_edit.commit() # TODO copy controllers/accounts.py # TODO copy templates/accounts/login.html # TODO urls social auth # TODO django-social-auth settings
def _stage_files_recursive(src, dest, skip=None): """Hard link or copy all of the files in the path src into the path dest. Subdirectories are created as needed, and files in dest are overwritten.""" # Use hard links if they are supported on this system. if hasattr(os, "link"): link = "hard" elif hasattr(os, "symlink"): link = "sym" else: link = None for dirpath, dirnames, filenames in os.walk(src, followlinks=True): if not any(p.startswith(".") for p in dirpath.split(os.sep)): dest_dirpath = os.path.join( dest, dirpath.split(src, 1)[1].lstrip(os.sep) ) mkpath(dest_dirpath) for filename in filenames: if not filename.startswith("."): src_path = os.path.join(dirpath, filename) dest_path = os.path.join(dest_dirpath, filename) if not os.path.exists(dest_path): copy_file( os.path.join(dirpath, filename), os.path.join(dest_dirpath, filename), )
def reown_paxboms(base_path, user, group, TOOL=CHOWN_TOOL): """ Change ownership files in pax/boms within `base_path` Parameters ---------- base_path : str path to tree somewhere containing bom / pax pairs. We will change ownership of files within the pax archive, and record this in the bom. user : str user to which to change ownership group : str group to which to change ownership TOOL : str, optional path to ``chown`` binary """ for pxbom in find_paxboms(base_path): px, bm = [abspath(f) for f in pxbom] with InTemporaryDirectory() as tmpdir: arch_path = pjoin(tmpdir, 'archive') os.mkdir(arch_path) unpax(px, arch_path) spawn([TOOL, '-R', '%s:%s' % (user, group), arch_path]) os.mkdir('Contents') pax(arch_path, tmpdir) mkbom(arch_path, tmpdir) rs1 = copy_file(pjoin('Contents', 'Archive.bom'), bm) rs2 = copy_file(pjoin('Contents', 'Archive.pax.gz'), px) assert rs1 == (bm, True) assert rs2 == (px, True)
def _libs_with_msvc_and_fortran(self, fcompiler, c_libraries, c_library_dirs): if fcompiler is None: return for libname in c_libraries: if libname.startswith('msvc'): continue fileexists = False for libdir in c_library_dirs or []: libfile = os.path.join(libdir, '%s.lib' % (libname)) if os.path.isfile(libfile): fileexists = True break if fileexists: continue # make g77-compiled static libs available to MSVC fileexists = False for libdir in c_library_dirs: libfile = os.path.join(libdir, 'lib%s.a' % (libname)) if os.path.isfile(libfile): # copy libname.a file to name.lib so that MSVC linker # can find it libfile2 = os.path.join(self.build_temp, libname + '.lib') copy_file(libfile, libfile2) if self.build_temp not in c_library_dirs: c_library_dirs.append(self.build_temp) fileexists = True break if fileexists: continue log.warn('could not find library %r in directories %s' % (libname, c_library_dirs)) # Always use system linker when using MSVC compiler. f_lib_dirs = [] for dir in fcompiler.library_dirs: # correct path when compiling in Cygwin but with normal Win # Python if dir.startswith('/usr/lib'): try: dir = subprocess.check_output(['cygpath', '-w', dir]) except (OSError, subprocess.CalledProcessError): pass else: dir = filepath_from_subprocess_output(dir) f_lib_dirs.append(dir) c_library_dirs.extend(f_lib_dirs) # make g77-compiled static libs available to MSVC for lib in fcompiler.libraries: if not lib.startswith('msvc'): c_libraries.append(lib) p = combine_paths(f_lib_dirs, 'lib' + lib + '.a') if p: dst_name = os.path.join(self.build_temp, lib + '.lib') if not os.path.isfile(dst_name): copy_file(p[0], dst_name) if self.build_temp not in c_library_dirs: c_library_dirs.append(self.build_temp)
def generate_deb(packagename): if(not len(packagename)>0): packagename = parse_control_file() print packagename os.system('rm -rf `find . -type d -name .git`') rootpath = appRootDir+'/'+packagename; os.makedirs(rootpath) #create root direction #copy the directory tree to the root of the destination dir_util.copy_tree(appServerDir+'/html', rootpath+baseDir+'upstage/DEFAULT/html/') dir_util.copy_tree(appServerDir+'/config', rootpath+baseDir+'upstage/DEFAULT/config/') dir_util.copy_tree(appServerDir+'/upstage', rootpath+serverDir+'upstage/upstage/') dir_util.copy_tree(appRootDir+'/DEBIAN', rootpath+'/DEBIAN') for file in server_files: if(file == 'upstage-admin.conf'): if(not os.path.exists(rootpath+config_path+'upstage/')): print 'Creating: '+rootpath+config_path+'upstage/' os.makedirs(rootpath+config_path) os.makedirs(rootpath+config_path+'upstage/') file_util.copy_file(appServerDir+'/'+file, rootpath+config_path+'upstage/'+file) print 'Copied: '+appServerDir+'/'+file+ ' -to- '+ rootpath+config_path+'upstage/'+file if(file == 'upstage-backup'): os.makedirs(rootpath+'/etc') os.makedirs(rootpath+'/etc/cron.weekly') file_util.copy_file(appServerDir+'/'+file, rootpath+backup_location+file) print 'copied: '+ appServerDir+'/'+file+' -to- '+ rootpath+backup_location+file shutil.copyfile(appServerDir+'/'+file, rootpath+serverDir+'upstage/'+file) print 'copied: '+ appServerDir+'/'+file+' -to- '+ rootpath+serverDir+'upstage/' os.system('dpkg -b '+packagename) #create the deb package os.system('rm -r '+packagename) #cleanup
def handle(self, *args, **options): app_name = args[0] module = importlib.import_module(app_name) path = os.path.dirname(module.__file__) + os.sep copy_tree(os.path.dirname(djangobp.__file__) + os.sep + 'scaffold/app/controllers', path + 'controllers', update=True) copy_tree(os.path.dirname(djangobp.__file__) + os.sep + 'scaffold/app/templates', path + '/templates', update=True) copy_tree(os.path.dirname(djangobp.__file__) + os.sep + 'scaffold/app/static', path + '/static', update=True) copy_file(os.path.dirname(djangobp.__file__) + os.sep + 'scaffold/app/urls.py', path, update=True) urls_edit = CodeEditor(path + 'urls.py') urls_edit.replace_all('app', app_name) urls_edit.commit() project_path = os.path.dirname(os.path.normpath(os.sys.modules[settings.SETTINGS_MODULE].__file__)) main_urls_edit = CodeEditor(project_path + os.sep + 'urls.py') main_urls_edit.insert_line(" (r'', include('%s'))," % (app_name + '.urls'), after='urlpatterns') main_urls_edit.commit() settings_edit = CodeEditor(project_path + os.sep + 'settings.py') settings_edit.insert_line(" '%s'," % app_name, 'INSTALLED_APPS') settings_edit.commit() # TODO urls.py edit: urlpatterns += (controller_method_resource_pattern, route(controller)) # TODO settings.py edit: app
def copy_extensions_to_source(self): """This function is only-called when doing inplace build. It is customized to ensure the extensions compiled using distutils are copied back to the source tree instead of the :func:`skbuild.constants.CMAKE_INSTALL_DIR()`. """ build_py = self.get_finalized_command('build_py') for ext in self.extensions: fullname = self.get_ext_fullname(ext.name) filename = self.get_ext_filename(fullname) modpath = fullname.split('.') package = '.'.join(modpath[:-1]) package_dir = build_py.get_package_dir(package) # skbuild: strip install dir for inplace build package_dir = package_dir[len(CMAKE_INSTALL_DIR()) + 1:] dest_filename = os.path.join(package_dir, os.path.basename(filename)) src_filename = os.path.join(self.build_lib, filename) # Always copy, even if source is older than destination, to ensure # that the right extensions for the current Python/platform are # used. copy_file( src_filename, dest_filename, verbose=self.verbose, dry_run=self.dry_run ) if ext._needs_stub: self.write_stub(package_dir or os.curdir, ext, True)
def build_extension(self, ext): """ Compile manually the py_mini_racer extension, bypass setuptools """ try: if not is_v8_built(): self.run_command('build_v8') self.debug = True if V8_PATH: dest_filename = join(self.build_lib, "py_mini_racer") copy_file(V8_PATH, dest_filename, verbose=self.verbose, dry_run=self.dry_run) else: build_ext.build_extension(self, ext) except Exception as e: traceback.print_exc() # Alter message err_msg = """py_mini_racer failed to build, ensure you have an up-to-date pip (>= 8.1) to use the wheel instead To update pip: 'pip install -U pip' See also: https://github.com/sqreen/PyMiniRacer#binary-builds-availability Original error: %s""" raise Exception(err_msg % repr(e))
def write_projects_for_main_categories(self): url = "Job/getCategoryJobList.json" resp = self.auth.send_request(url) jobs = self.parser.parseMainCategories(resp) jobs.sort(key=lambda x: x.projects_count, reverse=True) top_jobs = jobs[:100] #self.foa.writeJobsToFile(top_jobs, file_name="jobs_TOP100_in_main_categories.json") # print "finished getting top jobs..." top_job_names = [job.name for job in top_jobs] # print top_job_names escaped_top_job_names = [urllib2.quote(job_name) for job_name in top_job_names] result = ",".join(escaped_top_job_names) #print "Joined job names" + result + "\n\n\n\n\n" all_projects_in_season = {} for base in range(0,2000000,200): for i in range(200): url = "Project/searchProjects.json?searchjobtypecsv="+result+"&status=Closed&count=200&page=%d"%(i + base) resp = self.auth.send_request(url) #print "\n" projects = self.parser.parseProjects(resp) print "\tpage %d of %d"%(i + base, 2000000) self.foa.appendProjectsToCSVFile(projects, file_name="spring_2012.csv") dest = "spring_2012_%d.csv"%(i + base) print "COPYING FILE spring_2012.csv to "+dest copy_file("spring_2012.csv", dest)
def problem_part(part_name, metadata_sorter): part_metadata = [] part_dir = prob.prob_dir + "/" + part_name + "/" makedirs_exist_ok(part_dir) raw_htmls = [] for part in getattr(prob, part_name): fp = path.join(prob.prob_meta['prob_base'], part_name) if (part.endswith('.inline.html')) or (part.endswith('.inline.md')): continue (content, metadata, url) = get_content_and_metadata(part, fp) logger.debug( (part, metadata, content[0:5])) if not url: name = path.basename(part) filename = name + ".html" res = apply_template("file.html", problemContent=content, name=name, part=part_name, rel_path="{0}/{1}".format(part_name, name), prob=prob, **prob.prob_meta) write(res, part_name + "/" + filename) file_util.copy_file(part, path.join(part_dir, name)) else: filename = path.basename(url) name = path.basename(filename) file_util.copy_file(part, path.join(part_dir, name)) part_metadata.append({"name": name, "filename": filename, "meta": metadata}) prob.parts[part_name] = part_metadata
def run(self): # TODO: make sure we have everything installed that we need to # bundle e.g. sqlite, psycopg2, others... _py2exe_cmd.run(self) # install locale files locales = os.path.dirname(locale_path) build_base = self.get_finalized_command("build").build_base # print build_base src = os.path.join(build_base, locales) dir_util.copy_tree(src, os.path.join(self.dist_dir, locales)) # copy GTK to the dist directory, assuming PyGTK # all-in-one installer gtk_root = "c:\\python27\\lib\\site-packages\\gtk-2.0\\runtime" dist_gtk = os.path.join(self.dist_dir, "gtk") import shutil if not os.path.exists(dist_gtk): ignore = shutil.ignore_patterns("src", "gtk-doc", "icons", "man", "demo", "aclocal", "doc", "include") shutil.copytree(gtk_root, dist_gtk, ignore=ignore) # register the pixbuf loaders exe = "%s\\bin\\gdk-pixbuf-query-loaders.exe" % dist_gtk dest = "%s\\etc\\gtk-2.0\\gdk-pixbuf.loaders" % dist_gtk cmd = 'call "%s" > "%s"' % (exe, dest) print cmd os.system(cmd) # copy the the MS-Windows gtkrc to make it the default theme rc = "%s\\share\\themes\\MS-Windows\\gtk-2.0\\gtkrc" % dist_gtk dest = "%s\\etc\\gtk-2.0" % dist_gtk file_util.copy_file(rc, dest)
def _create_rpm(self, rpm_name, spec): log.info("creating RPM using rpmbuild") macro_bdist_dir = "bdist_dir " + os.path.join(rpm_name, '') cmd = ['rpmbuild', '-bb', '--define', macro_bdist_dir, '--define', "_topdir " + os.path.abspath(self.rpm_base), '--define', "version " + VERSION_TEXT_SHORT, spec ] if not self.verbose: cmd.append('--quiet') if self.edition: cmd.extend(['--define', "edition " + self.edition]) if self.with_mysql_capi: cmd.extend(['--define', "mysql_capi " + self.with_mysql_capi]) self.spawn(cmd) rpms = os.path.join(self.rpm_base, 'RPMS') for base, dirs, files in os.walk(rpms): for filename in files: if filename.endswith('.rpm'): filepath = os.path.join(base, filename) copy_file(filepath, self.dist_dir)
def copy_doc(path, name, pkgdir, exts=TEXT_EXTS, language=None, dry_run=0, copy_tree=copy_tree, copy_file=copy_file, mkpath=mkpath): if path is None: return is_string = hasattr(path, 'getvalue') if is_string: ext = '.txt' else: ext = os.path.splitext(path)[1].lower() if ext == '': ext = '.txt' if ext not in exts: raise ValueError('Invalid extension for %s' % (path,)) destdir = os.path.join(pkgdir, 'Contents', 'Resources') if language is not None: destdir = os.path.join(destdir, language + '.lproj') mkpath(destdir) dest = os.path.join(destdir, name + ext) if is_string: if not dry_run: f = open(dest, 'w') f.write(path.getvalue()) f.close() elif ext == '.rtfd': copy_tree(path, dest) else: copy_file(path, dest)
def test_copy_file(self): src_dir = self.mkdtemp() foo = os.path.join(src_dir, 'foo') write_file(foo, 'content') dst_dir = self.mkdtemp() copy_file(foo, dst_dir) self.assertTrue(os.path.exists(os.path.join(dst_dir, 'foo')))
def get_updated_configure_params(self): print "\n\nPlease download MARCIE installer from http://www-dssz.informatik.tu-cottbus.de/DSSZ/Software/Marcie#download, click at \"Linux (64bit)\" link" print "\nPlace the MARCIE installer file in ./package directory\n\n" MESSAGE = "Is the MARCIE installer file (already) available is ./package directory? [Yes/No]" confirm = raw_input(MESSAGE) if confirm != "Yes": print "action aborted by user" raise sys.exit() MESSAGE = "Please enter the MARCIE package name:" package_name = raw_input(MESSAGE) package_path = "/var/chiminey/package/" + package_name if not os.path.isfile(package_path): print "package does not exist : ./package/%s" %package_name print "exiting..." raise sys.exit() elif os.path.getsize(package_path) <= 0 or not tarfile.is_tarfile(package_path): print "invalid package : ./package/%s" %package_name print "exiting..." raise sys.exit() destination = os.path.join(django_settings.LOCAL_FILESYS_ROOT_PATH, django_settings.PAYLOAD_DESTINATION, 'payload_marcie') print destination copy_file(package_path, destination) with open ( destination + "/" + "package_metadata.txt", "w") as fh: fh.write("MARCIE_PACKAGE_NAME=" + package_name) settings = \ { u'http://rmit.edu.au/schemas/system': { u'random_numbers': 'file://127.0.0.1/randomnums.txt', #u'metadata_builder': 'chiminey.mytardis.metadata.MetadataBuilder', }, } return { 'settings': settings}
def backup_database(): if os.path.isfile(DATABASES['default']['NAME']): # Create any intermediate directories that do not exist. # Note that there is a race between os.path.exists and os.makedirs: # if os.makedirs fails with EEXIST, the directory was created # concurrently, and we can continue normally. Refs #16082. if not os.path.exists(BACKUPDIR): try: os.makedirs(BACKUPDIR) except OSError as e: if e.errno != errno.EEXIST: raise if not os.path.isdir(BACKUPDIR): raise IOError("%s exists and is not a directory." % BACKUPDIR) dbname = os.path.basename(DATABASES['default']['NAME']) backedupDatabase = os.path.join(BACKUPDIR, dbname) copy_file(src=DATABASES['default']['NAME'], dst=backedupDatabase, update=1) return
def copyGraphspellCore(bJavaScript=False): "copy Graphspell package in Grammalecte package" print("> Copy Graphspell package in Grammalecte package") helpers.createCleanFolder("grammalecte/graphspell") dir_util.mkpath("grammalecte/graphspell/_dictionaries") for sf in os.listdir("graphspell"): if not os.path.isdir("graphspell/" + sf): file_util.copy_file("graphspell/" + sf, "grammalecte/graphspell") if bJavaScript: helpers.createCleanFolder("grammalecte-js/graphspell") dir_util.mkpath("grammalecte-js/graphspell/_dictionaries") dVars = {} for sf in os.listdir("js_extension"): dVars[sf[:-3]] = open("js_extension/" + sf, "r", encoding="utf-8").read() for sf in os.listdir("graphspell-js"): if not os.path.isdir("graphspell-js/" + sf): file_util.copy_file("graphspell-js/" + sf, "grammalecte-js/graphspell") helpers.copyAndFileTemplate("graphspell-js/" + sf, "grammalecte-js/graphspell/" + sf, dVars)
def run(self): setuptools.command.build_ext.build_ext.run(self) # assumes only one extension is being built ext_fullname = self.get_ext_fullname(self.extensions[0].name) ext_filename = self.get_ext_filename(ext_fullname) source_filename = os.path.join(self.build_lib, ext_filename) build_py = self.get_finalized_command('build_py') output_filename = ext_fullname if os.name == 'nt': output_filename += '.dll' else: output_filename += '.so' output_filename = os.path.join('../Plugins', output_filename) copy_file( source_filename, output_filename, verbose=self.verbose, dry_run=self.dry_run )
def get_updated_configure_params(self): print "\nPlease place the Spike binay package file in ./package directory\n\n" MESSAGE = "Is Spike (already) available is ./package directory? [Yes/No]" confirm = raw_input(MESSAGE) if confirm != "Yes": print "action aborted by user" raise sys.exit() MESSAGE = "Please enter the SPIKE package name:" package_name = raw_input(MESSAGE) package_path = "/var/chiminey/package/" + package_name if not os.path.isfile(package_path): print "package does not exist : ./package/%s" % package_name print "exiting..." raise sys.exit() elif os.path.getsize(package_path) <= 0 or not zipfile.is_zipfile( package_path): print "invalid package : ./package/%s" % package_name print "exiting..." raise sys.exit() destination = os.path.join(django_settings.LOCAL_FILESYS_ROOT_PATH, django_settings.PAYLOAD_DESTINATION, 'payload_spike') print destination copy_file(package_path, destination) with open(destination + "/" + "package_metadata.txt", "w") as fh: fh.write("SPIKE_PACKAGE_NAME=" + package_name) settings = \ { u'http://rmit.edu.au/schemas/system': { u'random_numbers': 'file://127.0.0.1/randomnums.txt', #u'metadata_builder': 'chiminey.mytardis.metadata.MetadataBuilder', }, } return {'settings': settings}
def setup_example(): ''' Setup example ''' if os.path.exists(EXAMPLE_TARGET): print("Error, `psiturk-example` directory already exists. Please \ remove it then re-run the command.") else: print("Creating new folder `psiturk-example` in the current working \ directory") os.mkdir(EXAMPLE_TARGET) print("Copying", EXAMPLE_DIR, "to", EXAMPLE_TARGET) dir_util.copy_tree(EXAMPLE_DIR, EXAMPLE_TARGET) # change to target director print("Creating default configuration file (config.txt)") file_util.copy_file(DEFAULT_CONFIG_FILE, CONFIG_TARGET) os.chdir(EXAMPLE_TARGET) os.rename('custom.py.txt', 'custom.py') if not os.path.exists(GLOBAL_CONFIG_FILE): print("The following config file does not exist:\n{}\ \nCreating default config file at that \ location.".format(GLOBAL_CONFIG_FILE)) file_util.copy_file(DEFAULT_GLOBAL_CONFIG_FILE, GLOBAL_CONFIG_FILE)
def copy_file (self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1): """Copy a file respecting verbose, dry-run and force flags. (The former two default to whatever is in the Distribution object, and the latter defaults to false for commands that don't define it.)""" return file_util.copy_file( infile, outfile, preserve_mode, preserve_times, not self.force, link, self.verbose >= level, self.dry_run)
def CopyCygwinObjects(platform): # distribute necessary Cygwin components winPathList = os.getenv('Path').split(';') cygPath = "" cygwinDll = ( 'cygwin1.dll', # add your necessary Cygwin libraries your toolhain needs. 'cygz.dll' # these two are always needed # 'cygiconv-2.dll', # 'cygintl-8.dll', # 'cyggcc_s-seh-1.dll' if not platform else 'cyggcc_s-1.dll', # de careful with platform-depending Cygwin library names # etc. ) print( "\nPreparing Cygwin distribution\n------------------------------------" ) for _path in winPathList: if "cygwin" + "64" * ( not platform ) + "\\bin" in _path: # find the Cygwin installation path in the PATH global variable cygPath = _path if cygPath == "": print( "\nERROR: Can't find current Cygwin installation in Path. Aborting" ) exit(-1) if not os.path.isdir("my-ide"): os.mkdir("my-ide") os.chdir("my-ide") os.mkdir("lib") for dll in cygwinDll: copy_file(cygPath + "\\" + dll, "lib\\" + dll) # copy the dll's terminfoEncoderPath = cygPath + "\\..\\usr\\share\\terminfo\\63\\cygwin" # file needed for correct console input behaviour terminfoDstPath = os.path.dirname( "usr\\share\\terminfo\\63\\cygwin" ) # file needed for correct console input behaviour if not os.path.exists(terminfoDstPath): os.makedirs(terminfoDstPath) copy_file(terminfoEncoderPath, terminfoDstPath) os.chdir("../") print("Done.\n")
def test_copy_file_hard_link_failure(self): # If hard linking fails, copy_file() falls back on copying file # (some special filesystems don't support hard linking even under # Unix, see issue #8876). with open(self.source, 'w') as f: f.write('some content') st = os.stat(self.source) def _os_link(*args): raise OSError(0, "linking unsupported") old_link = os.link os.link = _os_link try: copy_file(self.source, self.target, link='hard') finally: os.link = old_link st2 = os.stat(self.source) st3 = os.stat(self.target) self.assertTrue(os.path.samestat(st, st2), (st, st2)) self.assertFalse(os.path.samestat(st2, st3), (st2, st3)) for fn in (self.source, self.target): with open(fn, 'r') as f: self.assertEqual(f.read(), 'some content')
def copy_deps(): print("\nCopying dependencies...") # Copy GLFW files print("- Copying GLFW") copy_tree(DEPS_TEMP_DIR + "/glfw-3.3.bin.WIN64/glfw-3.3.bin.WIN64/include", DEPS_INCLUDE_DIR) copy_tree(DEPS_TEMP_DIR + "/glfw-3.3.bin.WIN64/glfw-3.3.bin.WIN64/lib-vc" + VS_DEFAULT_YEAR, DEPS_LIB_DIR) # Copy glad files print("- Copying glad") copy_tree(DEPS_TEMP_DIR + "/glad/include", DEPS_INCLUDE_DIR) # Copy glm files print("- Copying glm") copy_tree(DEPS_TEMP_DIR + "/glm-0.9.9.5/glm/glm", DEPS_INCLUDE_DIR + "/glm") # Copy stb image file print("- Copying stb image") copy_file(DEPS_TEMP_DIR + "/stb_image.h", DEPS_INCLUDE_DIR) print("Done")
def run(self): for extension in self.extensions: if not isinstance(extension, PrebuiltExtension): raise DistutilsSetupError( f"copy_ext can accept PrebuiltExtension only, but got {extension.name}" ) src = extension.sources[0] dst = self.get_ext_fullpath(extension.name) os.makedirs(os.path.dirname(dst), exist_ok=True) # setting relative path to find dlls if sys.platform != "win32": rpath = os.path.relpath(get_package_dir(PY_INSTALL_CFG), os.path.dirname(src)) if sys.platform == "linux": rpath = os.path.join("$ORIGIN", rpath, WHEEL_LIBS_INSTALL_DIR) elif sys.platform == "darwin": rpath = os.path.join("@loader_path", rpath, WHEEL_LIBS_INSTALL_DIR) set_rpath(rpath, src) copy_file(src, dst, verbose=self.verbose, dry_run=self.dry_run)
def install_scons(self): os.chdir(eiffel_loop_home_dir) if not environ.command_exists(['scons', '-v'], shell=True): scons_package = ZIP_SOFTWARE_PACKAGE( 'http://freefr.dl.sourceforge.net/project/scons/scons/2.2.0/scons-2.2.0.zip' ) scons_package.extract_all(package.download_dir) scons_name = path.basename(scons_package.url) os.chdir(path.splitext(scons_name)[0]) install_scons_cmd = [ 'python', 'setup.py', 'install', '--standard-lib' ] print install_scons_cmd if subprocess.call(install_scons_cmd) == 0: file_util.copy_file( path.join(python_home_dir, r'Scripts\scons.py'), python_home_dir) dir_util.remove_tree(scons_name[0]) else: print 'ERROR: scons installation failed'
def _move_other(file_dir_name: str = "", download_path: str = ""): ''' Move downloaded other file/folder to other directory specified below. ''' if not file_dir_name.strip() or not download_path.strip(): return other_dir = download_path + directory_names.other_directory() if not os.path.isdir(other_dir): os.mkdir(other_dir) source_path = os.path.join(download_path, file_dir_name) destination_path = os.path.join(other_dir, file_dir_name) if os.path.isdir(source_path): try: copy_tree(src=source_path, dst=destination_path) remove_tree(directory=source_path) except (PermissionError, distutils_errors.DistutilsFileError) as Error: return elif os.path.isfile(source_path): try: copy_file(src=source_path, dst=destination_path) os.remove(path=source_path) except (PermissionError, distutils_errors.DistutilsFileError) as Error: return
def copy_extensions_to_source(self): """ Support inplace installation used in:: setup.py develop or:: pip install -e . """ from distutils.file_util import copy_file from distutils.dir_util import copy_tree build_py = self.get_finalized_command('build_py') for ext in self.extensions: fullname = self.get_ext_fullname(ext.name) filename = self.get_ext_filename(fullname) modpath = fullname.split('.') package = '.'.join(modpath[:-1]) package_dir = build_py.get_package_dir(package) if isinstance(ext, CMakeExtension): old_inplace, self.inplace = self.inplace, 0 # copy install directory output_dir = os.path.abspath( os.path.dirname(self.get_ext_fullpath(ext.name))) self.inplace = old_inplace copy_tree(output_dir, package_dir) else: dest_filename = os.path.join(package_dir, os.path.basename(filename)) src_filename = os.path.join(self.build_lib, filename) # Always copy, even if source is older than destination, to ensure # that the right extensions for the current Python/platform are # used. copy_file(src_filename, dest_filename, verbose=self.verbose, dry_run=self.dry_run) if ext._needs_stub: self.write_stub(package_dir or os.curdir, ext, True)
def _copy_data_from_dot_folders_xdg_folders(): # ---------------------- CONFIG print "Copying CONFIG..." file_util.copy_file(_dot_dir + "prefs", get_config_dir() + "prefs", verbose=1) file_util.copy_file(_dot_dir + "recent", get_config_dir() + "recent", verbose=1) # --------------------- DATA print "Copying DATA..." dir_util.copy_tree(_dot_dir + appconsts.USER_PROFILES_DIR, get_data_dir() + appconsts.USER_PROFILES_DIR, verbose=0) dir_util.copy_tree(_dot_dir + appconsts.RENDERED_CLIPS_DIR, get_render_dir(), verbose=1) # --------------------- CACHE print "Copying CACHE..." dir_util.copy_tree(_dot_dir + appconsts.AUTOSAVE_DIR, get_cache_dir() + appconsts.AUTOSAVE_DIR, verbose=1) dir_util.copy_tree(_dot_dir + appconsts.THUMBNAILS_DIR, get_cache_dir() + appconsts.THUMBNAILS_DIR, verbose=1) dir_util.copy_tree(_dot_dir + appconsts.GMIC_DIR, get_cache_dir() + appconsts.GMIC_DIR, verbose=1) dir_util.copy_tree(_dot_dir + appconsts.MATCH_FRAME_DIR, get_cache_dir() + appconsts.MATCH_FRAME_DIR, verbose=1) dir_util.copy_tree(_dot_dir + appconsts.AUDIO_LEVELS_DIR, get_cache_dir() + appconsts.AUDIO_LEVELS_DIR, verbose=1) dir_util.copy_tree(_dot_dir + appconsts.TRIM_VIEW_DIR, get_cache_dir() + appconsts.TRIM_VIEW_DIR, verbose=1) dir_util.copy_tree(_dot_dir + appconsts.BATCH_DIR, get_cache_dir() + appconsts.BATCH_DIR, verbose=1) print "XDG Copy done."
def testAddToAutostart(base_fixture): auto_update_file = base_fixture.testdata_path / "auto_updater" / "autostart.txt" temp_autostart_file = Path(tempfile.gettempdir()) / "tmp.txt" copy_file(str(auto_update_file), str(temp_autostart_file)) setup_system.add_to_autostart("xscreensaver -no-splash", ["waqd"], temp_autostart_file) with open(temp_autostart_file) as ft: read = ft.readlines() assert read[0] == "@lxpanel --profile LXDE-pi\n" assert read[1] == "@pcmanfm --desktop --profile LXDE-pi\n" assert read[2] == "@xscreensaver -no-splash\n" # 2nd run - don't change anything setup_system.add_to_autostart("xscreensaver -no-splash", [], temp_autostart_file) with open(temp_autostart_file) as ft: read = ft.readlines() assert read[0] == "@lxpanel --profile LXDE-pi\n" assert read[1] == "@pcmanfm --desktop --profile LXDE-pi\n" assert read[2] == "@xscreensaver -no-splash\n" assert len(read) == 3
def transcribe(self, on_complete: Callable = None): self.status = "transcribing" infer_path = self.model.path.joinpath('espnet-asr1', 'data', 'infer') exp_path = self.model.path.joinpath('espnet-asr1', 'exp') os.makedirs(f"{infer_path}", exist_ok=True) dir_util.copy_tree(f'{self.path}', f"{infer_path}") file_util.copy_file(f'{self.audio_file_path}', f"{self.model.path.joinpath('espnet-asr1', 'audio.wav')}") local_espnet_path = Path(self.model.path) / "espnet-asr1" # TODO This is now not a single point of control. Make this dir an attribute of the model. prepare_log_path = Path(self.model.path) / "prepare_transcribe_log.txt" transcribe_log_path = Path(self.model.path) / "transcribe_log.txt" from elpis.engines.common.objects.command import run run(f"cd {local_espnet_path}; ./decode.sh --nj 1 --stage 0 --stop_stage 2 --recog_set infer &> {prepare_log_path}") run(f"cd {local_espnet_path}; ./decode.sh --nj 1 --stage 5 --recog_set infer &> {transcribe_log_path}") result_paths = list(exp_path.glob("train_nodev*/decode_infer*")) assert len(result_paths) == 1, f"Incorrect number of result files ({len(result_paths)})" result_path = result_paths[0] / "data.json" self.result_path = file_util.copy_file(result_path, f'{self.path}/results.txt')[0] self.convert_to_text() self.convert_to_elan() self.status = "transcribed" if on_complete is not None: on_complete()
def _copy_hc_files(source_paths, dest_path): """Copies all .js files (excluding .src.js) from source_path into dest_path.""" if source_paths is None: logging.warn( "***** Option --highcharts not specified. Highcharts blocks will not work! ******" ) return hc_files = [] for source_path in source_paths: hc_files += set([ i for i in glob.glob(os.path.join(source_path, '*.js')) if not i.endswith('.src.js') ]) if len(hc_files) > 0: for f in hc_files: copy_file(f, dest_path) else: logging.error( " No *.js files (excluding *.src.js) found in highcharts-paths: {}" .format(source_paths)) sys.exit(1)
def copy_extensions_to_source(self): """Like the base class method, but copy libs into proper directory in develop.""" build_py = self.get_finalized_command("build_py") for ext in self.extensions: fullname = self.get_ext_fullname(ext.name) filename = self.get_ext_filename(fullname) modpath = fullname.split(".") package = ".".join(modpath[:-1]) package_dir = build_py.get_package_dir(package) # unlike the method from `setuptools`, we do not call `os.path.basename` here dest_filename = os.path.join(package_dir, filename) src_filename = os.path.join(self.build_lib, filename) os.makedirs(os.path.dirname(dest_filename), exist_ok=True) copy_file(src_filename, dest_filename, verbose=self.verbose, dry_run=self.dry_run) if ext._needs_stub: self.write_stub(package_dir or os.curdir, ext, True)
def get_updated_configure_params(self): print "\nPlease download LoLA from http://service-technology.org/files/lola/" print "\nFollow the build instructuons and build binry file for Linux OS. Create a tar.gz file for the lola binary" print "\nPlace tar.gz file for the lola binary executable in ./package directory" MESSAGE = "Is tar.gz file for the lola binary executable (already) available is ./package directory? [Yes/No]" confirm = raw_input(MESSAGE) if confirm != "Yes": print "action aborted by user" raise sys.exit() MESSAGE = "Please enter the LoLA package name:" package_name = raw_input(MESSAGE) package_path = "/var/chiminey/package/" + package_name if not os.path.isfile(package_path): print "package does not exist : ./package/%s" %package_name print "exiting..." raise sys.exit() elif os.path.getsize(package_path) <= 0 or not tarfile.is_tarfile(package_path): print "invalid package : ./package/%s" %package_name print "exiting..." raise sys.exit() destination = os.path.join(django_settings.LOCAL_FILESYS_ROOT_PATH, django_settings.PAYLOAD_DESTINATION, 'payload_lola') print destination copy_file(package_path, destination) with open ( destination + "/" + "package_metadata.txt", "w") as fh: fh.write("LOLA_PACKAGE_NAME=" + package_name) settings = \ { u'http://rmit.edu.au/schemas/system': { u'random_numbers': 'file://127.0.0.1/randomnums.txt', #u'metadata_builder': 'chiminey.mytardis.metadata.MetadataBuilder', }, } return { 'settings': settings}
def postProcessing(self): ''' update the user product and product metadata, copy the logfile to QI data as a report, Check if at target product is present. ''' self.updateProductMetadata() xp = L3_XmlParser(self.config, 'UP03') auxdata = xp.getTree('L3_Auxiliary_Data_Info', 'Aux_Data') auxdata.clear() dirname, basename = os.path.split(self.config.L3_TILE_MTD_XML) fn1r = basename.replace('_MTD_', '_GIP_') fn2r = fn1r.replace('.xml', '') gippFn = etree.Element('GIPP_FILENAME', type='GIP_Level-3p', version=self.config.processorVersion) gippFn.text = fn2r gippList = objectify.Element('L3_GIPP_LIST') gippList.append(gippFn) auxdata.append(gippList) xp.export() dirname, basename = os.path.split(self.config.L3_TILE_MTD_XML) report = basename.replace('.xml', '_Report.xml') report = os.path.join(dirname, 'QI_DATA', report) if ((os.path.isfile(self.config.fnLog)) == False): self.logger.fatal('Missing file: ' + self.config.fnLog) self.config.exitError() f = open(self.config.fnLog, 'a') f.write('</Sen2Three_Level-3_Report_File>') f.flush() f.close() copy_file(self.config.fnLog, report) return
def new(): # \u2714 is a check mark # \u2717 is an x # TODO: include default README with instructions for starting your course click.echo(u'\u2714 Glorious README') source_dir = os.path.split(__file__)[0].replace('cli', '') static_dir = os.path.join(os.getcwd(), 'static') dir_util.copy_tree(source_dir + 'static', static_dir, update=True) click.echo(u'\u2714 CSS/Javascript for browser art') templates_dir = os.path.join(os.getcwd(), 'templates') dir_util.copy_tree(os.path.join(source_dir, '..', 'ofcourse-templates'), templates_dir, update=True) click.echo(u'\u2714 Starter Mako templates for great good') yamls_dir = os.path.join(source_dir, 'yamls') people_dir = os.path.join(os.getcwd(), 'people', year(), season()) if not os.path.isdir(people_dir): os.makedirs(people_dir) file_util.copy_file(os.path.join(yamls_dir, 'fake_student.yaml'), people_dir, update=True) file_util.copy_file(os.path.join(yamls_dir, 'assignments.yaml'), os.getcwd(), update=True) file_util.copy_file(os.path.join(yamls_dir, 'site.yaml'), os.getcwd(), update=True) file_util.copy_file(os.path.join(yamls_dir, 'oer.yaml'), os.getcwd(), update=True) file_util.copy_file(os.path.join(source_dir, ".travis.yml"), os.getcwd(), update=True) click.echo(u'\u2714 Starter yaml files for data driven education')
def copy_package_data(package, target_dir): """ Copy any package data in a python package into the target_dir. This is a bit of a hack, it would be better to identify python eggs and copy those in whole. """ exts = [i[0] for i in imp.get_suffixes()] exts.append('.py') exts.append('.pyc') exts.append('.pyo') def datafilter(item): for e in exts: if item.endswith(e): return False return True target_dir = os.path.join(target_dir, *(package.identifier.split('.'))) for dname in package.packagepath: filenames = list(filter(datafilter, os_listdir(dname))) for fname in filenames: if fname in SCMDIRS: # Scrub revision manager junk continue if fname in ('__pycache__', ): # Ignore PEP 3147 bytecode cache continue pth = os.path.join(dname, fname) # Check if we have found a package, exclude those if is_python_package(pth): continue copydest = op.join(target_dir, fname) if op.isdir(pth): copy_tree(pth, copydest) else: copy_file(pth, copydest)
def run(self): for cmd_name in self.get_sub_commands(): self.run_command(cmd_name) addon_name = self.distribution.get_name() dist_dir = Path(self.get_finalized_command('bdist').dist_dir) archive_name = '{}-v{}'.format(addon_name, self.distribution.get_version()) if self.tag_plat: archive_name = '{}.{}'.format(archive_name, self.plat_name) addon_archive = dist_dir / archive_name build_lib = Path(self.get_finalized_command('build').build_lib) build_addon = build_lib / addon_name for name in self.addon_require: p = spec2path(find_spec(name)) if(p[1]): structure = name.split(".") if len(structure) > 1: # Need to create separate files for it print("Splitting the compound directory") destination = build_addon for pathsection in structure: destination = destination / pathsection mkpath(str(destination)) write_file(str(destination / "__init__.py"),"") copy_tree(str(p[0]), str(destination)) else: copy_tree(str(p[0]), str(build_addon/name)) else: copy_file(str(p[0]), str(build_addon)) for pycache in build_addon.glob('**/__pycache__'): remove_tree(str(pycache)) print(addon_archive) print(build_lib) print(addon_name) self.make_archive(str(addon_archive), 'zip', str(build_lib), addon_name)
def build_extension(self, ext): extdir = os.path.abspath( os.path.join(os.path.dirname(self.get_ext_fullpath(ext.name)), 'tflogger')) cmake_args = [ '-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + extdir, '-DPython_ROOT_DIR=' + sys.exec_prefix, '-DINTERFACE_PYTHON=ON' ] cfg = 'Debug' if self.debug else 'Release' build_args = ['--config', cfg] if platform.system() == "Windows": cmake_args += [ '-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}'.format( cfg.upper(), extdir) ] if sys.maxsize > 2**32: cmake_args += ['-A', 'x64'] build_args += ['--', '/m'] else: cmake_args += ['-DCMAKE_BUILD_TYPE=' + cfg] build_args += ['--', '-j2'] env = os.environ.copy() env['CXXFLAGS'] = '{} -DVERSION_INFO=\\"{}\\"'.format( env.get('CXXFLAGS', ''), self.distribution.get_version()) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) subprocess.check_call(['cmake', ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env) subprocess.check_call(['cmake', '--build', '.'] + build_args, cwd=self.build_temp) dir_util.copy_tree(os.path.join(self.build_temp, 'python', 'tflogger'), extdir) file_util.copy_file( os.path.join(self.build_temp, 'python', 'tflogger.py'), extdir)
def copy_static_dir(src, dst, cb_checksum=None, cb_extensions=(None,), cb_ignores=[], _recurse=None): ''' To copy a src directory to dst directory Files or folder starting with _ or . will not be copied over ''' try: names = os.listdir(src) except OSError as e: raise DistutilsFileError("error listing files in '%s': %s" % (src, e.strerror)) mkpath(dst) outputs = [] base_src = src if _recurse is None else _recurse for n in names: src_name = os.path.join(src, n) base_src_name = src_name.replace(base_src, "").lstrip("/") dst_name = os.path.join(dst, n) # skip files and folders starting with . or _ or a symlink if n.startswith(('.', '_')) or os.path.islink(src_name): continue # Apply cache busting if cb_checksum is not None \ and len(cb_checksum) > 0 \ and os.path.isfile(src_name) \ and n.endswith(cb_extensions) \ and base_src_name not in cb_ignores: dst_name = insert_checksum_in_filepath(dst_name, cb_checksum) if os.path.isdir(src_name): outputs.extend(copy_static_dir(src_name, dst_name, cb_checksum, cb_extensions, cb_ignores, src)) else: copy_file(src_name, dst_name) outputs.append(dst_name) return outputs
def create_story_base(title, waypoints, masks, folder=""): """ Creates a new minerva-story instance under subfolder named title. The subfolder will be created. Args: title: Story title, the subfolder will be named waypoints: List of waypoints with visData and Masks masks: List of masks with names and paths folder: Parent path to contain folders """ out_dir = get_story_folders(title, folder, create=True)[0] export_dir = os.path.join(folder, title) story_dir = get_story_dir() data_dir = os.path.join(export_dir, "data") os.makedirs(data_dir, exist_ok=True) os.makedirs(out_dir, exist_ok=True) try: file_util.copy_file(os.path.join(story_dir, "index.html"), export_dir) except DistutilsFileError as e: print(f"Cannot copy index.html from {story_dir}") print(e) vis_path_dict = deduplicate_data(waypoints, data_dir) for i in range(len(masks)): path_i = mask_path_from_index(masks, i, out_dir) os.makedirs(path_i, exist_ok=True) for in_path, out_path in vis_path_dict.items(): if pathlib.Path(in_path).suffix in [".csv"]: try: file_util.copy_file(in_path, out_path) except DistutilsFileError as e: print(f"Cannot copy {in_path}") print(e) else: print(f"Refusing to copy non-csv infovis: {in_path}")
def backup_database(resetTimer=True): if os.path.isfile(DATABASEPATH): # Create any intermediate directories that do not exist. # Note that there is a race between os.path.exists and os.makedirs: # if os.makedirs fails with EEXIST, the directory was created # concurrently, and we can continue normally. Refs #16082. if not os.path.exists(BACKUPDIR): try: os.makedirs(BACKUPDIR) except OSError as e: if e.errno != errno.EEXIST: raise if not os.path.isdir(BACKUPDIR): raise IOError("%s exists and is not a directory." % BACKUPDIR) dbname = os.path.basename(DATABASEPATH) backedupDatabase = os.path.join(BACKUPDIR, dbname) copy_file(src=DATABASEPATH, dst=backedupDatabase, update=1) if resetTimer: # Run the backup function every 2 mins Timer(120, backup_database, ()).start()
def _copy_windows_dlls(self): # copy DLLs next to the extension module for ext in self.extensions: for lib_name in ext.libraries: for lib_dir in ext.library_dirs: dll_filename = lib_name + ".dll" dll_fullpath = os.path.join(lib_dir, dll_filename) if os.path.exists(dll_fullpath): break else: log.debug("cannot find '{}' in: {}".format( dll_filename, ", ".join(ext.library_dirs))) continue ext_path = self.get_ext_fullpath(ext.name) dest_dir = os.path.dirname(ext_path) mkpath(dest_dir, verbose=self.verbose, dry_run=self.dry_run) copy_file( dll_fullpath, os.path.join(dest_dir, dll_filename), verbose=self.verbose, dry_run=self.dry_run, )
def _copy_data_from_dot_folders_xdg_folders(): # ---------------------- CONFIG print("Copying CONFIG...") file_util.copy_file(_dot_dir + "prefs", get_config_dir() + "prefs", verbose=1) file_util.copy_file(_dot_dir + "recent", get_config_dir() + "recent", verbose=1) # --------------------- DATA print("Copying DATA...") dir_util.copy_tree(_dot_dir + appconsts.USER_PROFILES_DIR, get_data_dir() + appconsts.USER_PROFILES_DIR, verbose=0) dir_util.copy_tree(_dot_dir + appconsts.RENDERED_CLIPS_DIR, get_render_dir(), verbose=1) # --------------------- CACHE print("CACHE DATA WILL BE LOST...") print("XDG Copy done.")