def test_by_name(self): ''' Tests attaching to a process by process name. ''' self.build_and_create_debug_adaptor() orig_program = self.getBuildArtifact("a.out") # Since we are going to attach by process name, we need a unique # process name that has minimal chance to match a process that is # already running. To do this we use tempfile.mktemp() to give us a # full path to a location where we can copy our executable. We then # run this copy to ensure we don't get the error "more that one # process matches 'a.out'". program = tempfile.mktemp() shutil.copyfile(orig_program, program) shutil.copymode(orig_program, program) def cleanup(): if os.path.exists(program): os.unlink(program) # Execute the cleanup function during test case tear down. self.addTearDownHook(cleanup) self.process = subprocess.Popen([program], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Wait for a bit to ensure the process is launched time.sleep(5) self.attach(program=program) self.set_and_hit_breakpoint(continueToExit=True)
def copy_template_file(src, dest, replace=None): """ Copy a source file to a new destination file. To replace boilerplate strings in the source data, pass a dictionary to the ``replace`` argument where each key is the boilerplate string and the corresponding value is the string which should replace it. """ replace = replace or {} # Read the data from the source file. src_file = open(src, 'r') data = src_file.read() src_file.close() # Replace boilerplate strings. for old_val, new_val in replace.items(): data = data.replace(old_val, new_val) # Write the data to the destination file. dest_file = open(dest, 'w') dest_file.write(data) dest_file.close() # Copy permissions from source file. shutil.copymode(src, dest) # Make new file writable. if os.access(dest, os.W_OK): st = os.stat(dest) new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR os.chmod(dest, new_permissions)
def WriteIndex(self): """Generate an index for libnss-cache from this map.""" for index_name in self._indices: # index file write to tmp file first, magic string ".ix" tmp_index_filename = '%s.ix%s.tmp' % (self.GetCacheFilename(), index_name) self.log.debug('Writing index %s', tmp_index_filename) index = self._indices[index_name] key_length = LongestLength(index.keys()) pos_length = LongestLength(index.values()) max_length = key_length + pos_length # Open for write/truncate index_file = open(tmp_index_filename, 'w') # setup permissions try: shutil.copymode(self.GetCompatFilename(), tmp_index_filename) stat_info = os.stat(self.GetCompatFilename()) uid = stat_info.st_uid gid = stat_info.st_gid os.chown(tmp_index_filename, uid, gid) except OSError, e: if e.errno == errno.ENOENT: os.chmod(tmp_index_filename, stat.S_IRUSR|stat.S_IWUSR|stat.S_IRGRP|stat.S_IROTH) for key in sorted(index): pos = index[key] index_line = ('%s\0%s\0%s\n' % (key, pos, '\0' * (max_length - len(key) - len(pos)))) index_file.write(index_line) index_file.close()
def main(): DB = db.open(config) tmppath = TXTPATH+".tmp" outfile = open(tmppath, 'w') count = util.Counter() storage = util.Counter() def onStatusUpdate(asset, status, db_asset): if status.status == bithorde.message.SUCCESS: if int(count): outfile.write(',\n') count.inc() storage.inc(status.size) json.dump(db_asset, outfile, cls=Encoder, indent=2) outfile.write('[') client = bithorde.BitHordeIteratorClient(list_db(DB), onStatusUpdate) bithorde.connectUNIX(UNIXSOCKET, client) bithorde.reactor.run() outfile.write(']') outfile.close() if os.path.exists(TXTPATH): shutil.copymode(TXTPATH, tmppath) os.rename(tmppath, TXTPATH) print "Exported %d assets, with %.2fGB worth of data." % (count, storage.inGibi())
def compile_src(srcs, exe, for_evaluation, lang, assume=None): if lang != 'pas' or len(srcs) == 1: call(base_dir, get_compilation_command( lang, srcs, exe, for_evaluation=for_evaluation)) # When using Pascal with graders, file naming conventions # require us to do a bit of trickery, i.e., performing the # compilation in a separate temporary directory else: tempdir = tempfile.mkdtemp() task_name = detect_task_name(base_dir) new_srcs = [os.path.split(srcs[0])[1], '%s.pas' % (task_name)] new_exe = os.path.split(srcs[1])[1][:-4] shutil.copyfile(os.path.join(base_dir, srcs[0]), os.path.join(tempdir, new_srcs[0])) shutil.copyfile(os.path.join(base_dir, srcs[1]), os.path.join(tempdir, new_srcs[1])) lib_filename = '%slib.pas' % (task_name) if os.path.exists(os.path.join(SOL_DIRNAME, lib_filename)): shutil.copyfile(os.path.join(SOL_DIRNAME, lib_filename), os.path.join(tempdir, lib_filename)) call(tempdir, get_compilation_command( lang, new_srcs, new_exe, for_evaluation=for_evaluation)) shutil.copyfile(os.path.join(tempdir, new_exe), os.path.join(base_dir, exe)) shutil.copymode(os.path.join(tempdir, new_exe), os.path.join(base_dir, exe)) shutil.rmtree(tempdir)
def copy_template(app_template, copy_to, app_name): """copies the specified template directory to the copy_to location""" app_name_spaces = " ".join(word.capitalize() for word in app_name.split("_")) app_name_camel = "".join(word.capitalize() for word in app_name.split("_")) # walks the template structure and copies it for directory, subdirs, files in os.walk(app_template): relative_dir = directory[len(app_template)+1:].replace('app_name_camel', app_name_camel).replace('app_name',app_name) if not os.path.exists(os.path.join(copy_to, relative_dir)): os.mkdir(os.path.join(copy_to, relative_dir)) for f in files: if f.endswith('.pyc') or f.startswith("."): continue path_old = os.path.join(directory, f) path_new = os.path.join(copy_to, relative_dir, f.replace('app_name_camel', app_name_camel).replace('app_name', app_name)) LOG.info("Writing %s" % path_new) fp_new = open(path_new, 'w') if path_old.endswith(".png"): shutil.copyfileobj(file(path_old), fp_new) else: fp_new.write( Template(filename=path_old).render(app_name=app_name, app_name_camel=app_name_camel, app_name_spaces=app_name_spaces) ) fp_new.close() shutil.copymode(path_old, path_new)
def copy_file_permissions(src, dst): """Copies file permissions :param str src: source file :param str dst: destination """ shutil.copymode(src, dst)
def build(self, install_dirs, reconfigure): print("Installing %s -> %s" % (self.src_dir, self.inst_dir)) parent = os.path.dirname(self.inst_dir) if not os.path.exists(parent): os.makedirs(parent) install_files = self.manifest.get_section_as_ordered_pairs( "install.files", self.ctx ) if install_files: for src_name, dest_name in self.manifest.get_section_as_ordered_pairs( "install.files", self.ctx ): full_dest = os.path.join(self.inst_dir, dest_name) full_src = os.path.join(self.src_dir, src_name) dest_parent = os.path.dirname(full_dest) if not os.path.exists(dest_parent): os.makedirs(dest_parent) if os.path.isdir(full_src): if not os.path.exists(full_dest): shutil.copytree(full_src, full_dest) else: shutil.copyfile(full_src, full_dest) shutil.copymode(full_src, full_dest) # This is a bit gross, but the mac ninja.zip doesn't # give ninja execute permissions, so force them on # for things that look like they live in a bin dir if os.path.dirname(dest_name) == "bin": st = os.lstat(full_dest) os.chmod(full_dest, st.st_mode | stat.S_IXUSR) else: if not os.path.exists(self.inst_dir): shutil.copytree(self.src_dir, self.inst_dir)
def _set_mode(self, path, like): """ Set mode of `path` with the mode of `like`. """ _LOG.info("Set mode of %r to '%o'", path, get_file_mode(like)) if not self.run_dry: shutil.copymode(like, path)
def copytree(src, dest, symlink=None): """ This is the same as shutil.copytree, but doesn't error out if the directories already exist. """ for root, dirs, files in os.walk(src, True): for d in dirs: if d.startswith('.'): continue srcpath = os.path.join(root, d) destpath = os.path.join(dest, root, d) if symlink and os.path.islink(srcpath): if os.path.exists(destpath): os.remove(destpath) os.symlink(os.readlink(srcpath), destpath) elif not os.path.isdir(destpath): os.makedirs(destpath) try: shutil.copymode(srcpath, destpath) except: pass try: shutil.copystat(srcpath, destpath) except: pass for f in files: if f.startswith('.'): continue srcpath = os.path.join(root, f) destpath = os.path.join(dest, root, f) if symlink and os.path.islink(srcpath): if os.path.exists(destpath): os.remove(destpath) os.symlink(os.readlink(srcpath), destpath) else: shutil.copy2(srcpath, destpath)
def build(self, parent, filename, args, shared_args, emcc_args, native_args, native_exec, lib_builder): self.parent = parent if lib_builder: native_args = native_args + lib_builder(self.name, native=True, env_init={"CC": self.cc, "CXX": self.cxx}) if not native_exec: compiler = self.cxx if filename.endswith("cpp") else self.cc process = Popen( [compiler, "-fno-math-errno", filename, "-o", filename + ".native"] + self.args + shared_args + native_args, stdout=PIPE, stderr=parent.stderr_redirect, ) output = process.communicate() if process.returncode is not 0: print >> sys.stderr, "Building native executable with command failed" print "Output: " + output[0] else: shutil.copyfile(native_exec, filename + ".native") shutil.copymode(native_exec, filename + ".native") final = os.path.dirname(filename) + os.path.sep + self.name + "_" + os.path.basename(filename) + ".native" shutil.move(filename + ".native", final) self.filename = final
def copyf(src, dst, root): if dst.startswith("/"): dst = dst[1:] if os.path.isdir(src): # # Copy entire src directory to target directory # dstpath = os.path.join(root, dst) logger.debug("Copytree %s -> %s" % (src, dstpath)) shutil.copytree(src, dstpath) else: # # If the destination ends in a '/' it means copy the filename # as-is to that directory. # # If not, its a full rename to the destination. # if dst.endswith("/"): dstpath = os.path.join(root, dst) if not os.path.exists(dstpath): os.makedirs(dstpath) shutil.copy(src, dstpath) else: dstpath = os.path.join(root, os.path.dirname(dst)) if not os.path.exists(dstpath): os.makedirs(dstpath) shutil.copyfile(src, os.path.join(root, dst)) shutil.copymode(src, os.path.join(root, dst))
def copyfile(self, src, dst): if dst.endswith(".in"): dst = dst[:-3] text = open(src, "rU").read() # perform replacements for var, string in self.replacements: text = text.replace(var, string) # If the file exists, keep the old file. This is a # hopefully temporary hack to get around distutils # stripping the permissions on the server skeletin files. # We reuse the original default files, which have the # right permissions. old = os.path.exists(dst) if old: f = open(dst, "r+") f.truncate(0) else: f = open(dst, "w") f.write(text) f.close() if not old: shutil.copymode(src, dst) shutil.copystat(src, dst) else: shutil.copy2(src, dst)
def process_config_file(self, f, install_dir, **kwargs): # The path where the weewx.conf configuration file will be installed install_path = os.path.join(install_dir, os.path.basename(f)) new_config = merge_config_files(f, install_path, install_dir) # Get a temporary file: tmpfile = tempfile.NamedTemporaryFile("w", 1) # Write the new configuration file to it: new_config.write(tmpfile) # Save the old config file if it exists: if os.path.exists(install_path): backup_path = save_path(install_path) print "Saved old configuration file as %s" % backup_path # Now install the temporary file (holding the merged config data) # into the proper place: rv = install_data.copy_file(self, tmpfile.name, install_path, **kwargs) # Set the permission bits unless this is a dry run: if not self.dry_run: shutil.copymode(f, install_path) return rv
def set_awk_path(): # find awk awk_path = '' for path in os.environ['PATH'].split(':'): if os.path.isfile(os.path.join(path,'awk')): awk_path = os.path.join(path,'awk') break if awk_path == '': print >> sys.stderr, 'Cannot find Awk' exit(1) else: # change all scripts for awkf in glob.glob('scripts/*.awk'): os.rename(awkf,awkf+'.tmp') newf = open(awkf, 'w') print >> newf, '#!%s -f' % awk_path oldf = open(awkf+'.tmp') line = oldf.readline() line = oldf.readline() while line: print >> newf, line, line = oldf.readline() oldf.close() newf.close() shutil.copymode(awkf+'.tmp', awkf) os.remove(awkf+'.tmp')
def copy_files(target_dir, source_dirs): '''Copy all the files from the source directory to the target directory''' skipfiles = ['policy.cfg.template', 'policy_cli.cfg.template'] for key in source_dirs.keys(): sdir = source_dirs[key] for adir, dirs, files in os.walk(sdir): tdir = adir.replace('..', target_dir) if not os.path.isdir(tdir): try: os.makedirs(tdir) except Exception as err: print "problem creating directory %s" % tdir print err sys.exit(-5) for afile in files: if afile in skipfiles: continue sfile = os.path.abspath(os.path.join(adir, afile)) tfile = os.path.abspath(os.path.join(tdir, afile)) try: shutil.copyfile(sfile, tfile) shutil.copymode(sfile, tfile) except Exception as err: print "problem copying file %s to %s" % (sfile, tfile) print err sys.exit(-10)
def autoconf(self, args=[], inplace=False): for dirname, filename in util.walk_files(self._path): path = os.path.join(dirname, filename) if filename == 'config.sub': # Replace config.sub files by an up-to-date copy. The copy # provided by the tarball rarely supports CloudABI. shutil.copy( os.path.join( config.DIR_RESOURCES, 'config.sub'), path) elif filename == 'configure': # Patch up configure scripts to remove constructs that are known # to fail, for example due to functions being missing. with open(path, 'r') as fin: with open(path + '.new', 'w') as fout: for l in fin.readlines(): # Bad C99 features test. if l.startswith('#define showlist(...)'): l = '#define showlist(...) fputs (stderr, #__VA_ARGS__)\n' elif l.startswith('#define report(test,...)'): l = '#define report(...) fprintf (stderr, __VA_ARGS__)\n' fout.write(l) shutil.copymode(path, path + '.new') os.rename(path + '.new', path) # Run the configure script in a separate directory. builddir = self._path if inplace else self._builder.get_new_directory() self._builder.autoconf( builddir, os.path.join(self._path, 'configure'), args) return FileHandle(self._builder, builddir)
def copy_template(template_name, copy_to, tag_library_name): """copies the specified template directory to the copy_to location""" import django_extensions import shutil template_dir = os.path.join(django_extensions.__path__[0], 'conf', template_name) # walks the template structure and copies it for d, subdirs, files in os.walk(template_dir): relative_dir = d[len(template_dir) + 1:] if relative_dir and not os.path.exists(os.path.join(copy_to, relative_dir)): os.mkdir(os.path.join(copy_to, relative_dir)) for i, subdir in enumerate(subdirs): if subdir.startswith('.'): del subdirs[i] for f in files: if f.endswith('.pyc') or f.startswith('.DS_Store'): continue path_old = os.path.join(d, f) path_new = os.path.join(copy_to, relative_dir, f.replace('sample', tag_library_name)) if os.path.exists(path_new): path_new = os.path.join(copy_to, relative_dir, f) if os.path.exists(path_new): continue path_new = path_new.rstrip(".tmpl") fp_old = open(path_old, 'r') fp_new = open(path_new, 'w') fp_new.write(fp_old.read()) fp_old.close() fp_new.close() try: shutil.copymode(path_old, path_new) _make_writeable(path_new) except OSError: sys.stderr.write("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new)
def __process_template_folder(path, subs): items = os.listdir(path) processed_items = [] for item in list(items): item = os.path.abspath(os.path.join(path, item)) if os.path.basename(item) in ['.', '..', '.git', '.svn']: continue if os.path.isdir(item): sub_items = __process_template_folder(item, subs) processed_items.extend([os.path.join(item, s) for s in sub_items]) if not item.endswith(TEMPLATE_EXTENSION): continue with open(item, 'r') as f: template = f.read() # Remove extension template_path = item[:-len(TEMPLATE_EXTENSION)] # Expand template info("Expanding '{0}' -> '{1}'".format( os.path.relpath(item), os.path.relpath(template_path))) result = em.expand(template, **subs) # Write the result with open(template_path, 'w') as f: f.write(result.encode('utf8')) # Copy the permissions shutil.copymode(item, template_path) processed_items.append(item) return processed_items
def save_file(filename, data, encoding, keep_tmp=False): tmpfilename = realpath(filename) + '.bak' try: f = open(tmpfilename, 'w') except IOError: dname = dirname(tmpfilename) if not exists(dname): os.makedirs(dname, mode=0755) f = open(tmpfilename, 'w') else: raise f.write(data.encode(encoding)) f.close() if exists(filename): try: shutil.copymode(filename, tmpfilename) except OSError: pass if keep_tmp: return tmpfilename else: os.rename(tmpfilename, filename)
def sed_i(files, expr, replace_exp, only_first_occurrence=False): """ Massively search/replace matching lines in files. Similar to: sed -i "s/expr/replace_expr/g" files... :type files: enumerate or list :param files: file names generator :type expr: str or pattern :type replace_exp: str :param only_first_occurrence: replace only first occurrence per line """ r = _compiled_re(expr) for f in files: with open(f, 'r') as source: tmp_f = f + '.pygrep.tmp' with open(tmp_f, 'w') as dest: sed(source, r, replace_exp, dest, only_first_occurrence) shutil.copymode(f, tmp_f) ori_f = f + '.pygrep.ori' os.rename(f, ori_f) os.rename(tmp_f, f) os.remove(ori_f)
def copy_template_file(src, dest, replace=None): """ Copy a source file to a new destination file. To replace boilerplate strings in the source data, pass a dictionary to the ``replace`` argument where each key is the boilerplate string and the corresponding value is the string which should replace it. """ replace = replace or {} # Read the data from the source file. src_file = open(src, 'r') data = src_file.read() src_file.close() # Replace boilerplate strings. for old_val, new_val in replace.items(): data = data.replace(old_val, new_val) # Generate SECRET_KEY for settings file secret_key = ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) data = re.sub(r"(?<=SECRET_KEY = ')'", secret_key + "'", data) # Write the data to the destination file. dest_file = open(dest, 'w') dest_file.write(data) dest_file.close() # Copy permissions from source file. shutil.copymode(src, dest) # Make new file writable. if os.access(dest, os.W_OK): st = os.stat(dest) new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR os.chmod(dest, new_permissions)
def do_conf_file(src, dst, confdata, format): try: with open(src, encoding='utf-8') as f: data = f.readlines() except Exception as e: raise MesonException('Could not read input file %s: %s' % (src, str(e))) # Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define # Also allow escaping '@' with '\@' if format in ['meson', 'cmake@']: regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@') elif format == 'cmake': regex = re.compile(r'(?:\\\\)+(?=\\?\$)|\\\${|\${([-a-zA-Z0-9_]+)}') else: raise MesonException('Format "{}" not handled'.format(format)) search_token = '#mesondefine' if format != 'meson': search_token = '#cmakedefine' result = [] missing_variables = set() for line in data: if line.startswith(search_token): line = do_mesondefine(line, confdata) else: line, missing = do_replacement(regex, line, format, confdata) missing_variables.update(missing) result.append(line) dst_tmp = dst + '~' with open(dst_tmp, 'w', encoding='utf-8') as f: f.writelines(result) shutil.copymode(src, dst_tmp) replace_if_different(dst, dst_tmp) return missing_variables
def traverse(path): for item in os.listdir(path): source = os.path.join(path, item) relative = os.path.relpath(source) destination = os.path.join(options.prefix, ('' if relative.startswith('.') else '.') + relative) print('p', source) if source in excludes: print('ex') continue if not source.startswith(starts) and not source in to_install: print('not ins') continue if os.path.isfile(source): install(source, destination) elif os.path.isdir(source): if not os.path.exists(destination): os.mkdir(destination) shutil.copymode(source, destination) traverse(source) else: print >> sys.stderr, '%s: Don\'t know how to handle that' % \ relative
def buildpyMySQL(mysqlLocation): # Unzip the pyMySQL zip file zipfile.ZipFile('lib/pyMySQL.zip').extractall(path='lib') # Change to the directory of the src code for pyMySQL os.chdir('lib/pyMySQL/src') # Builds the arg list buildArgs = [sys.executable, 'pyMySQL_setup.py', 'build'] if mysqlLocation is not None: buildArgs.append('--with-mysql=' + mysqlLocation) # Attempts to build the pyMySQL module retcode = subprocess.call(buildArgs) if retcode != 0: sys.exit('Error building pyMySQL C extension module') # Gets the filename of library libfiles = glob.glob('build/lib.*/pyMySQL.so') # Checks the file exists if len(libfiles) == 0: sys.exit('Error building pyMySQL C extension module') # Copies the file to the lib directory shutil.copyfile(libfiles[0], '../../pyMySQL.so') shutil.copymode(libfiles[0], '../../pyMySQL.so') # Return to the original directory os.chdir('../../../')
def _create_repo(self): cwd = os.getcwd() path = tempfile.mkdtemp() os.chdir(path) subprocess.check_call(["git", "init"]) subprocess.check_call(["git", "config", "user.name", "Test Test"]) subprocess.check_call(["git", "config", "user.email", "*****@*****.**"]) for source in self._source_files: source_path = os.path.join(data_dir, "sample.activity", source) dest_path = os.path.join(path, source) try: os.makedirs(os.path.dirname(dest_path)) except OSError: pass shutil.copyfile(source_path, dest_path) shutil.copymode(source_path, dest_path) subprocess.check_call(["git", "add", source]) subprocess.check_call(["git", "commit", "-m", "Initial commit", "-a"]) os.chdir(cwd) return path
def run(self): values = list() for argument in self.user_options: if argument[0].endswith('='): print argument[0][:-1],'is', print getattr(self, argument[0][:-1]) values.append((argument[0][:-1], getattr(self, argument[0][:-1].replace('-','_')))) else: print "Found switch",argument,getattr(self, argument[0].replace('-','_')) values.append((argument[0], bool(getattr(self, argument[0].replace('-','_'))))) print 'Replacing values in template files...' for item in os.listdir('in'): if item.endswith('.in'): print 'Replacing values in',item, original_name = os.path.join('in',item) item_in = open(original_name, 'r') final_name = item[:-3].replace('=','/') print final_name item_out = open(final_name, 'w') for line in item_in.readlines(): for item, value in values: line = line.replace('%' + str(item.upper().replace('-','_')) + '%', str(value)) item_out.write(line) item_out.close() item_in.close() shutil.copymode(original_name, final_name)
def apply(self): "merges all decisions for this proposal (and those with lower revisions)" file_path = self.get_file_path() merged_configpath = file_path + '.merged' bak_configpath = file_path + '.bak' fd = open(merged_configpath, 'w') fd.writelines(self.get_merged_content()) fd.close() try: shutil.copymode(file_path, merged_configpath) shutil.copystat(file_path, merged_configpath) shutil.move(file_path, bak_configpath) except OSError: pass shutil.move(merged_configpath, file_path) try: os.unlink(file_path + '.bak') except OSError: pass self.base_lines = None self._changes = None try: os.unlink(self.path) except OSError: pass self.clear_state()
def __exit__(self, exc_type, exc_val, exc_tb): if self.caps.get("rename_works"): if exc_type: # error try: os.remove(self.tmpname) except Exception, e: logging.warning("An error was raised, so I was doing " "some cleanup first, but I couldn't remove " "'%s'!", self.tmpname) else: # copy permission bits, if needed if self.caps.get("chmod_works") and os.path.exists(self.destname): try: shutil.copymode(self.destname, self.tmpname) except OSError, e: # Ignore errno ENOENT: file does not exist. Due to a race # condition, two processes could conceivably try and update # the same temp file at the same time if e.errno != errno.ENOENT: raise # atomic rename into place try: os.rename(self.tmpname, self.destname) except OSError, e: # Ignore errno ENOENT: file does not exist. Due to a race # condition, two processes could conceivably try and update # the same temp file at the same time if e.errno != errno.ENOENT: raise
def run(self): values = list() for argument in self.user_options: if argument[0].endswith("="): print argument[0][:-1], "is", print getattr(self, argument[0][:-1]) values.append((argument[0][:-1], getattr(self, argument[0][:-1].replace("-", "_")))) else: print "Found switch", argument, getattr(self, argument[0].replace("-", "_")) values.append((argument[0], bool(getattr(self, argument[0].replace("-", "_"))))) print "Replacing values in template files..." for item in os.listdir("in"): if item.endswith(".in"): print "Replacing values in", item, original_name = os.path.join("in", item) item_in = open(original_name, "r") final_name = item[:-3].replace("=", "/") print final_name item_out = open(final_name, "w") for line in item_in.readlines(): for item, value in values: line = line.replace("%" + str(item.upper().replace("-", "_")) + "%", str(value)) item_out.write(line) item_out.close() item_in.close() shutil.copymode(original_name, final_name)
(self.__target_file, line_count) self.logger.error(self.module_name, msg) break outfile.write(line + '\n') infile.close() outfile.close() action_record = tcs_utils.generate_diff_record(self.__tmp_file, self.__target_file) try: shutil.copymode(self.__target_file, self.__tmp_file) shutil.copy2(self.__tmp_file, self.__target_file) os.unlink(self.__tmp_file) except OSError: msg = "Unable to replace %s with new version." % self.__target_file self.logger.info(self.module_name, 'Apply Error: ' + msg) raise tcs_utils.ActionError('%s %s' % (self.module_name, msg)) return 1, action_record ########################################################################## def undo(self, change_record=None): """Undo previous change application.""" result, reason = self.scan() if result == 'Fail':
def copy(self, exclusions, files, path): """ exclusions - The list of exclusion names (not wildcards) files - arguments of the copy command path - search path where to take the copy files from """ # destination is the subdirectory in the distribution tree destination = "" if len(files) > 1: destination = files[-1] files = files[:-1] tree_destination = destination destination = os.path.join(self.distDir, self.packageDir, destination) # Analysing exclusions for i in exclusions: if i not in self.exclusions.keys(): raise InvalidExclusion(i) if Impl.excl not in exclusions: exclusions.append(Impl.excl) # Global exclusion is always there if self.srcDir: path = string.replace(path, "%src%", self.srcDir) print "\nCopying:%s\nExcluding %s\nFrom path %s\nTo %s" % ( files, exclusions, path, destination) # Find all the files in the given path path = os.path.abspath(path) if not os.path.isdir(path): print "Path", path, os.path.isdir(path), os.curdir raise WrongSearchPath(path) if path not in self.pathCache.keys(): self.pathCache[path] = findall(path) #print "Found in %s:\n%s" % (path, self.pathCache[path]) # Remove the exclusions: clean_file_list = [] for f in self.pathCache[path]: to_exclude = 0 for i in exclusions: for j in self.exclusions[i]: if fnmatch.fnmatch(f, j): to_exclude = 1 if not to_exclude: clean_file_list.append(f) #print "Files after exclusions:\n", clean_file_list # Extract the specified files from given wildcards requested_files = {} for i in clean_file_list: for f in files: if fnmatch.fnmatch(i, f): requested_files[i] = "" # If no requested files really found: error for f in files: is_match = 0 for i in requested_files.keys(): if fnmatch.fnmatch(i, f): is_match = 1 if not is_match: raise FilesNotFound(f, path) if len(requested_files.keys()) == 0: raise FilesNotFound(files, path) # Check before copying if file already exist!.. requested_files = requested_files.keys() #print "REQUESTED:", requested_files for i in requested_files: copy_path = os.path.join(destination, i) tree_path = os.path.join(tree_destination, i) if os.path.isdir(copy_path): raise CopyToDir(copy_path) original = os.path.join(path, i) print "%s -> \t\t %s" % (original, tree_path) dest_dir = os.path.dirname(copy_path) if not os.path.exists(dest_dir): os.makedirs(dest_dir) if os.path.isfile(dest_dir): raise CopyToFile(dest_dir) shutil.copyfile(original, copy_path) shutil.copymode(original, copy_path)
def copymode(src, dest): """ copy permission from src to dst. """ import shutil shutil.copymode(src, dest)
with open(old_path, 'rb') as template_file: content = template_file.read() if filename.endswith(extensions) or filename in extra_files: content = content.decode('utf-8') template = Template(content) content = template.render(context) content = content.encode('utf-8') with open(new_path, 'wb') as new_file: new_file.write(content) if self.verbosity >= 2: self.stdout.write("Creating %s\n" % new_path) try: shutil.copymode(old_path, new_path) self.make_writeable(new_path) except OSError: self.stderr.write( "Notice: Couldn't set permission bits on %s. You're " "probably using an uncommon filesystem setup. No " "problem." % new_path, self.style.NOTICE) if self.paths_to_remove: if self.verbosity >= 2: self.stdout.write("Cleaning up temporary files.\n") for path_to_remove in self.paths_to_remove: if path.isfile(path_to_remove): os.remove(path_to_remove) else: shutil.rmtree(path_to_remove,
def generate_ssh_keys(): keys_dir = '{}/keys'.format(os.getcwd()) ssh_dir = '{}/.ssh'.format(os.getenv('HOME')) pidfile = os.path.join(ssh_dir, 'ssh.pid') def add_ssh_keys(): IGNORE_FILES = ('README.md', 'ssh.pid') keys_to_add = [ entry.name for entry in os.scandir(ssh_dir) if entry.name not in IGNORE_FILES ] keys_to_add = ' '.join(os.path.join(ssh_dir, f) for f in keys_to_add) subprocess.run( ['ssh-add {}'.format(keys_to_add)], # nosec shell=True, stderr=subprocess.PIPE, # lets set the timeout if ssh-add requires a input passphrase for key # otherwise the process will be freezed timeout=30, ) with open(pidfile, "w") as pid: fcntl.flock(pid, fcntl.LOCK_EX) try: add_ssh_keys() keys = subprocess.run( ['ssh-add', '-l'], # nosec stdout=subprocess.PIPE).stdout.decode('utf-8').split('\n') if 'has no identities' in keys[0]: print('SSH keys were not found') volume_keys = os.listdir(keys_dir) if not ('id_rsa' in volume_keys and 'id_rsa.pub' in volume_keys): print('New pair of keys are being generated') subprocess.run([ 'ssh-keygen -b 4096 -t rsa -f {}/id_rsa -q -N ""'. format(ssh_dir) ], shell=True) # nosec shutil.copyfile('{}/id_rsa'.format(ssh_dir), '{}/id_rsa'.format(keys_dir)) shutil.copymode('{}/id_rsa'.format(ssh_dir), '{}/id_rsa'.format(keys_dir)) shutil.copyfile('{}/id_rsa.pub'.format(ssh_dir), '{}/id_rsa.pub'.format(keys_dir)) shutil.copymode('{}/id_rsa.pub'.format(ssh_dir), '{}/id_rsa.pub'.format(keys_dir)) else: print('Copying them from keys volume') shutil.copyfile('{}/id_rsa'.format(keys_dir), '{}/id_rsa'.format(ssh_dir)) shutil.copymode('{}/id_rsa'.format(keys_dir), '{}/id_rsa'.format(ssh_dir)) shutil.copyfile('{}/id_rsa.pub'.format(keys_dir), '{}/id_rsa.pub'.format(ssh_dir)) shutil.copymode('{}/id_rsa.pub'.format(keys_dir), '{}/id_rsa.pub'.format(ssh_dir)) subprocess.run(['ssh-add', '{}/id_rsa'.format(ssh_dir)]) # nosec finally: fcntl.flock(pid, fcntl.LOCK_UN)
def handle(self, app_or_project, name, target=None, **options): self.app_or_project = app_or_project self.paths_to_remove = [] self.verbosity = options["verbosity"] self.validate_name(name, app_or_project) # if some directory is given, make sure it's nicely expanded if target is None: top_dir = path.join(os.getcwd(), name) try: os.makedirs(top_dir) except FileExistsError: raise CommandError("'%s' already exists" % top_dir) except OSError as e: raise CommandError(e) else: top_dir = os.path.abspath(path.expanduser(target)) if not os.path.exists(top_dir): raise CommandError("Destination directory '%s' does not " "exist, please create it first." % top_dir) extensions = tuple(handle_extensions(options["extensions"])) extra_files = [] for file in options["files"]: extra_files.extend(map(lambda x: x.strip(), file.split(","))) if self.verbosity >= 2: self.stdout.write("Rendering %s template files with " "extensions: %s\n" % (app_or_project, ", ".join(extensions))) self.stdout.write("Rendering %s template files with " "filenames: %s\n" % (app_or_project, ", ".join(extra_files))) base_name = "%s_name" % app_or_project base_subdir = "%s_template" % app_or_project base_directory = "%s_directory" % app_or_project camel_case_name = "camel_case_%s_name" % app_or_project camel_case_value = "".join(x for x in name.title() if x != "_") context = Context( dict( options, **{ base_name: name, base_directory: top_dir, camel_case_name: camel_case_value, "docs_version": get_docs_version(), "django_version": django.__version__, }), autoescape=False, ) # Setup a stub settings environment for template rendering if not settings.configured: settings.configure() django.setup() template_dir = self.handle_template(options["template"], base_subdir) prefix_length = len(template_dir) + 1 for root, dirs, files in os.walk(template_dir): path_rest = root[prefix_length:] relative_dir = path_rest.replace(base_name, name) if relative_dir: target_dir = path.join(top_dir, relative_dir) if not path.exists(target_dir): os.mkdir(target_dir) for dirname in dirs[:]: if dirname.startswith(".") or dirname == "__pycache__": dirs.remove(dirname) for filename in files: if filename.endswith((".pyo", ".pyc", ".py.class")): # Ignore some files as they cause various breakages. continue old_path = path.join(root, filename) new_path = path.join(top_dir, relative_dir, filename.replace(base_name, name)) for old_suffix, new_suffix in self.rewrite_template_suffixes: if new_path.endswith(old_suffix): new_path = new_path[:-len(old_suffix)] + new_suffix break # Only rewrite once if path.exists(new_path): raise CommandError("%s already exists, overlaying a " "project or app into an existing " "directory won't replace conflicting " "files" % new_path) # Only render the Python files, as we don't want to # accidentally render Django templates files if new_path.endswith(extensions) or filename in extra_files: with open(old_path, "r", encoding="utf-8") as template_file: content = template_file.read() template = Engine().from_string(content) content = template.render(context) with open(new_path, "w", encoding="utf-8") as new_file: new_file.write(content) else: shutil.copyfile(old_path, new_path) if self.verbosity >= 2: self.stdout.write("Creating %s\n" % new_path) try: shutil.copymode(old_path, new_path) self.make_writeable(new_path) except OSError: self.stderr.write( "Notice: Couldn't set permission bits on %s. You're " "probably using an uncommon filesystem setup. No " "problem." % new_path, self.style.NOTICE, ) if self.paths_to_remove: if self.verbosity >= 2: self.stdout.write("Cleaning up temporary files.\n") for path_to_remove in self.paths_to_remove: if path.isfile(path_to_remove): os.remove(path_to_remove) else: shutil.rmtree(path_to_remove)
def copyfile(src, dst, copymode=True): print 'cp %s %s' % (quot(src), quot(dst)) shutil.copyfile(src, dst) if copymode: shutil.copymode(src, dst)
def package(self, items): out = self.pkgdir extensions = dict() s = items[Package.extensionTag] for x in s: plat = x['os'] if plat == sys.platform: sfx = '' prx = '' if 'suffix' in x: sfx = x['suffix'] if 'prefix' in x: prx = x['prefix'] extensions[x['type']] = {'suffix': sfx, 'prefix': prx} # Start copy file loop for name in items: if name == Package.extensionTag: continue for kvp in items[name]: path = kvp['path'] fname = kvp['name'] sfx = '' pfx = '' if name in extensions: sfx = extensions[name]['suffix'] pfx = extensions[name]['prefix'] path = os.path.join( os.path.split(path)[0], pfx + os.path.split(path)[1]) src = os.path.normpath(self.root + os.path.sep + path) destname = self.map[name] dest = os.path.normpath(self.pkgdir + os.path.sep + destname) fname = os.path.join(dest, kvp['name']) destname = os.path.join(destname, kvp['name']) src = src + sfx destname = destname + sfx # make the "file system" path in pkgdir try: p = os.path.dirname(fname) os.makedirs(p) except: pass # Copy the src file to dest file name shutil.copyfile(src, fname) shutil.copymode(src, fname) # Generate m5sum m = hashlib.md5() f = open(fname, 'r') while True: data = f.read(128000) if len(data) == 0: break m.update(data) sum = m.hexdigest() self.md5sumFile.write(sum + ' ' + destname + '\n') # End copy file loop self.md5sumFile.close() # Build the package target = os.path.join(self.outdir, self.name + '.deb') ps = subprocess.Popen(['dpkg', '-b', self.pkgdir, target], close_fds=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) ps.wait()
def run(self, app_or_project, name, target=None, **options): self.app_or_project = app_or_project self.paths_to_remove = [] self.validate_name(name, app_or_project) # if some directory is given, make sure it's nicely expanded if target is None: top_dir = path.join(os.getcwd(), name) try: os.makedirs(top_dir) except FileExistsError: raise InvalidCommand("'%s' already exists" % top_dir) except OSError as e: raise InvalidCommand(e) else: top_dir = os.path.abspath(path.expanduser(target)) if not os.path.exists(top_dir): raise InvalidCommand("Destination directory '%s' does not " "exist, please create it first." % top_dir) extensions = tuple(handle_extensions(options['extensions'])) extra_files = [] for file in options['files']: extra_files.extend(map(lambda x: x.strip(), file.split(','))) self.stdout.write("Rendering %s template files with " "extensions: %s\n" % (app_or_project, ', '.join(extensions))) self.stdout.write("Rendering %s template files with " "filenames: %s\n" % (app_or_project, ', '.join(extra_files))) base_name = '%s_name' % app_or_project base_subdir = '%s_template' % app_or_project base_directory = '%s_directory' % app_or_project camel_case_name = 'camel_case_%s_name' % app_or_project camel_case_value = ''.join(x for x in name.title() if x != '_') context = { base_name: name, base_directory: top_dir, camel_case_name: camel_case_value, 'docs_version': get_docs_version(), 'anthill_version': anthill.framework.__version__, } context = dict(context, **options) template_dir = self.handle_template(options['template'], base_subdir) prefix_length = len(template_dir) + 1 for root, dirs, files in os.walk(template_dir): path_rest = root[prefix_length:] relative_dir = path_rest.replace(base_name, name) if relative_dir: target_dir = path.join(top_dir, relative_dir) if not path.exists(target_dir): os.mkdir(target_dir) for dirname in dirs[:]: if dirname.startswith('.') or dirname == '__pycache__': dirs.remove(dirname) for filename in files: if filename.endswith(('.pyo', '.pyc', '.py.class')): # Ignore some files as they cause various breakages. continue old_path = path.join(root, filename) new_path = path.join(top_dir, relative_dir, filename.replace(base_name, name)) for old_suffix, new_suffix in self.rewrite_template_suffixes: if new_path.endswith(old_suffix): new_path = new_path[:-len(old_suffix)] + new_suffix break # Only rewrite once if path.exists(new_path): raise InvalidCommand("%s already exists, overlaying a " "project or app into an existing " "directory won't replace conflicting " "files" % new_path) # Only render the Python files, as we don't want to # accidentally render Anthill templates files if new_path.endswith(extensions) or filename in extra_files: with open(old_path, 'r', encoding='utf-8') as template_file: content = template_file.read() template = Template(content) content = template.generate(**context) with open(new_path, 'w', encoding='utf-8') as new_file: new_file.write(to_unicode(content)) else: shutil.copyfile(old_path, new_path) self.stdout.write("Creating %s\n" % new_path) try: shutil.copymode(old_path, new_path) self.make_writeable(new_path) except OSError: self.stderr.write( "Notice: Couldn't set permission bits on %s. You're " "probably using an uncommon filesystem setup. No " "problem." % new_path) if self.paths_to_remove: self.stdout.write("Cleaning up temporary files.\n") for path_to_remove in self.paths_to_remove: if path.isfile(path_to_remove): os.remove(path_to_remove) else: shutil.rmtree(path_to_remove)
def sort_file( filename: Union[str, Path], extension: Optional[str] = None, config: Config = DEFAULT_CONFIG, file_path: Optional[Path] = None, disregard_skip: bool = True, ask_to_apply: bool = False, show_diff: Union[bool, TextIO] = False, write_to_stdout: bool = False, **config_kwargs, ) -> bool: """Sorts and formats any groups of imports imports within the provided file or Path. Returns `True` if the file has been changed, otherwise `False`. - **filename**: The name or Path of the file to format. - **extension**: The file extension that contains imports. Defaults to filename extension or py. - **config**: The config object to use when sorting imports. - **file_path**: The disk location where the code string was pulled from. - **disregard_skip**: set to `True` if you want to ignore a skip set in config for this file. - **ask_to_apply**: If `True`, prompt before applying any changes. - **show_diff**: If `True` the changes that need to be done will be printed to stdout, if a TextIO stream is provided results will be written to it, otherwise no diff will be computed. - **write_to_stdout**: If `True`, write to stdout instead of the input file. - ****config_kwargs**: Any config modifications. """ with io.File.read(filename) as source_file: actual_file_path = file_path or source_file.path config = _config(path=actual_file_path, config=config, **config_kwargs) changed: bool = False try: if write_to_stdout: changed = sort_stream( input_stream=source_file.stream, output_stream=sys.stdout, config=config, file_path=actual_file_path, disregard_skip=disregard_skip, extension=extension, ) else: tmp_file = source_file.path.with_suffix(source_file.path.suffix + ".isorted") try: with tmp_file.open( "w", encoding=source_file.encoding, newline="" ) as output_stream: shutil.copymode(filename, tmp_file) changed = sort_stream( input_stream=source_file.stream, output_stream=output_stream, config=config, file_path=actual_file_path, disregard_skip=disregard_skip, extension=extension, ) if changed: if show_diff or ask_to_apply: source_file.stream.seek(0) with tmp_file.open( encoding=source_file.encoding, newline="" ) as tmp_out: show_unified_diff( file_input=source_file.stream.read(), file_output=tmp_out.read(), file_path=actual_file_path, output=None if show_diff is True else cast(TextIO, show_diff), color_output=config.color_output, ) if show_diff or ( ask_to_apply and not ask_whether_to_apply_changes_to_file( str(source_file.path) ) ): return False source_file.stream.close() tmp_file.replace(source_file.path) if not config.quiet: print(f"Fixing {source_file.path}") finally: try: # Python 3.8+: use `missing_ok=True` instead of try except. tmp_file.unlink() except FileNotFoundError: pass # pragma: no cover except ExistingSyntaxErrors: warn(f"{actual_file_path} unable to sort due to existing syntax errors") except IntroducedSyntaxErrors: # pragma: no cover warn(f"{actual_file_path} unable to sort as isort introduces new syntax errors") return changed
config_dict.write(tmpfile) tmpfile.flush() # Save the old config file if it exists: if not self.dry_run and os.path.exists(install_path): backup_path = weeutil.weeutil.move_with_timestamp(install_path) print "Saved old configuration file as %s" % backup_path # Now install the temporary file (holding the merged config data) # into the proper place: rv = install_data.copy_file(self, tmpfile.name, install_path, **kwargs) # Set the permission bits unless this is a dry run: if not self.dry_run: shutil.copymode(f, install_path) return rv def massage_start_file(self, f, install_dir, **kwargs): outname = os.path.join(install_dir, os.path.basename(f)) sre = re.compile(r"WEEWX_ROOT\s*=") with open(f, 'r') as infile: with tempfile.NamedTemporaryFile("w") as tmpfile: for line in infile: if sre.match(line): tmpfile.writelines("WEEWX_ROOT=%s\n" % self.install_dir) else:
def copyfile(src, dest): shutil.copyfile(src, dest) shutil.copymode(src, dest)
def handle(self, app_or_project, name, target=None, **options): self.app_or_project = app_or_project self.paths_to_remove = [] self.verbosity = options['verbosity'] self.validate_name(name, app_or_project) # if some directory is given, make sure it's nicely expanded if target is None: top_dir = path.join(os.getcwd(), name) try: os.makedirs(top_dir) except OSError as e: if e.errno == errno.EEXIST: message = "'%s' already exists" % top_dir else: message = e raise CommandError(message) else: top_dir = os.path.abspath(path.expanduser(target)) if not os.path.exists(top_dir): raise CommandError("Destination directory '%s' does not " "exist, please create it first." % top_dir) extensions = tuple(handle_extensions(options['extensions'])) extra_files = [] for file in options['files']: extra_files.extend(map(lambda x: x.strip(), file.split(','))) if self.verbosity >= 2: self.stdout.write("Rendering %s template files with " "extensions: %s\n" % (app_or_project, ', '.join(extensions))) self.stdout.write("Rendering %s template files with " "filenames: %s\n" % (app_or_project, ', '.join(extra_files))) base_name = '%s_name' % app_or_project base_subdir = '%s_template' % app_or_project base_directory = '%s_directory' % app_or_project context = Context(dict( options, **{ base_name: name, base_directory: top_dir, 'docs_version': get_docs_version(), 'django_version': django.__version__, }), autoescape=False) # Setup a stub settings environment for template rendering from django.conf import settings if not settings.configured: settings.configure() template_dir = self.handle_template(options['template'], base_subdir) prefix_length = len(template_dir) + 1 for root, dirs, files in os.walk(template_dir): path_rest = root[prefix_length:] relative_dir = path_rest.replace(base_name, name) if relative_dir: target_dir = path.join(top_dir, relative_dir) if not path.exists(target_dir): os.mkdir(target_dir) for dirname in dirs[:]: if dirname.startswith('.') or dirname == '__pycache__': dirs.remove(dirname) for filename in files: if filename.endswith(('.pyo', '.pyc', '.py.class')): # Ignore some files as they cause various breakages. continue old_path = path.join(root, filename) new_path = path.join(top_dir, relative_dir, filename.replace(base_name, name)) if path.exists(new_path): raise CommandError("%s already exists, overlaying a " "project or app into an existing " "directory won't replace conflicting " "files" % new_path) # Only render the Python files, as we don't want to # accidentally render Django templates files with open(old_path, 'rb') as template_file: content = template_file.read() if filename.endswith(extensions) or filename in extra_files: content = content.decode('utf-8') template = Engine().from_string(content) content = template.render(context) content = content.encode('utf-8') with open(new_path, 'wb') as new_file: new_file.write(content) if self.verbosity >= 2: self.stdout.write("Creating %s\n" % new_path) try: shutil.copymode(old_path, new_path) self.make_writeable(new_path) except OSError: self.stderr.write( "Notice: Couldn't set permission bits on %s. You're " "probably using an uncommon filesystem setup. No " "problem." % new_path, self.style.NOTICE) if self.paths_to_remove: if self.verbosity >= 2: self.stdout.write("Cleaning up temporary files.\n") for path_to_remove in self.paths_to_remove: if path.isfile(path_to_remove): os.remove(path_to_remove) else: shutil.rmtree(path_to_remove, onerror=rmtree_errorhandler)
def save_config(force=False): """ Update Setup file with current option values """ global CFG, database, modified if not (modified or force): return True for section in database: if section in ('servers', 'categories', 'rss'): try: CFG[section] except KeyError: CFG[section] = {} for subsec in database[section]: if section == 'servers': subsec_mod = subsec.replace('[', '{').replace(']', '}') else: subsec_mod = subsec try: CFG[section][subsec_mod] except KeyError: CFG[section][subsec_mod] = {} items = database[section][subsec].get_dict() CFG[section][subsec_mod] = items else: for option in database[section]: sec, kw = database[section][option].ident() sec = sec[-1] try: CFG[sec] except KeyError: CFG[sec] = {} value = database[section][option]() # bool is a subclass of int, check first if isinstance(value, bool): # convert bool to int when saving so we store 0 or 1 CFG[sec][kw] = str(int(value)) elif isinstance(value, int): CFG[sec][kw] = str(value) else: CFG[sec][kw] = value res = False filename = CFG.filename bakname = filename + '.bak' # Check if file is writable if not sabnzbd.misc.is_writable(filename): logging.error(T('Cannot write to INI file %s'), filename) return res # copy current file to backup try: shutil.copyfile(filename, bakname) shutil.copymode(filename, bakname) except: # Something wrong with the backup, logging.error(T('Cannot create backup file for %s'), bakname) logging.info("Traceback: ", exc_info=True) return res # Write new config file try: logging.info('Writing settings to INI file %s', filename) CFG.write() shutil.copymode(bakname, filename) modified = False res = True except: logging.error(T('Cannot write to INI file %s'), filename) logging.info("Traceback: ", exc_info=True) try: sabnzbd.misc.remove_file(filename) except: pass # Restore INI file from backup sabnzbd.misc.renamer(bakname, filename) return res
def Copy(src, dest): copyfile(src, dest) copymode(src, dest)
def handle(self, app_or_project, name, target=None, **options): self.app_or_project = app_or_project self.paths_to_remove = [] self.verbosity = options['verbosity'] self.validate_name(name, app_or_project) # if some directory is given, make sure it's nicely expanded if target is None: top_dir = path.join(os.getcwd(), name) try: os.makedirs(top_dir) except OSError as e: if e.errno == errno.EEXIST: message = "'%s' already exists" % top_dir else: message = e raise CommandError(message) else: top_dir = os.path.abspath(path.expanduser(target)) if not os.path.exists(top_dir): raise CommandError("Destination directory '%s' does not " "exist, please create it first." % top_dir) extensions = tuple(handle_extensions(options['extensions'])) extra_files = [] for file in options['files']: extra_files.extend(map(lambda x: x.strip(), file.split(','))) if self.verbosity >= 2: self.stdout.write("Rendering %s template files with " "extensions: %s\n" % (app_or_project, ', '.join(extensions))) self.stdout.write("Rendering %s template files with " "filenames: %s\n" % (app_or_project, ', '.join(extra_files))) base_name = '%s_name' % app_or_project base_subdir = '%s_template' % app_or_project base_directory = '%s_directory' % app_or_project camel_case_name = 'camel_case_%s_name' % app_or_project camel_case_value = ''.join(x for x in name.title() if x != '_') # context = Context(dict(options, **{ # base_name: name, # base_directory: top_dir, # camel_case_name: camel_case_value, # # 'docs_version': get_docs_version(), # 'moose_version': moose.__version__, # 'unicode_literals': '' if six.PY3 else '# -*- coding: utf-8 -*-\n' # 'from __future__ import unicode_literals\n\n', # }), autoescape=False) context = dict( options, **{ base_name: name, base_directory: top_dir, camel_case_name: camel_case_value, # 'docs_version': get_docs_version(), 'moose_version': moose.__version__, 'unicode_literals': '' if six.PY3 else '# -*- coding: utf-8 -*-\n' 'from __future__ import unicode_literals\n\n', }) # Setup a stub settings environment for template rendering if not settings.configured: settings.configure() if app_or_project == 'app': moose.setup() template_dir = self.handle_template(options['template'], base_subdir) prefix_length = len(template_dir) + 1 for root, dirs, files in os.walk(template_dir): path_rest = root[prefix_length:] relative_dir = path_rest.replace(base_name, name) if relative_dir: target_dir = path.join(top_dir, relative_dir) if not path.exists(target_dir): os.mkdir(target_dir) for dirname in dirs[:]: if dirname.startswith('.') or dirname == '__pycache__': dirs.remove(dirname) for filename in files: if filename.endswith(('.pyo', '.pyc', '.py.class')): # Ignore some files as they cause various breakages. continue old_path = path.join(root, filename) new_path = path.join(top_dir, relative_dir, filename.replace(base_name, name)) for old_suffix, new_suffix in self.rewrite_template_suffixes: if new_path.endswith(old_suffix): new_path = new_path[:-len(old_suffix)] + new_suffix break # Only rewrite once if path.exists(new_path): raise CommandError("%s already exists, overlaying a " "project or app into an existing " "directory won't replace conflicting " "files" % new_path) # Only render the Python files, as we don't want to # accidentally render Moose templates files if new_path.endswith(extensions) or filename in extra_files: with io.open(old_path, 'r', encoding='utf-8') as template_file: content = template_file.read() # TODO: use Jinja2 instead of implementing template engine self # template = Engine().from_string(content) # content = template.render(context) content = string.Template(content).substitute(**context) with io.open(new_path, 'w', encoding='utf-8') as new_file: new_file.write(content) else: shutil.copyfile(old_path, new_path) if self.verbosity >= 2: self.stdout.write("Creating %s\n" % new_path) try: shutil.copymode(old_path, new_path) self.make_writeable(new_path) except OSError: self.stderr.write( "Notice: Couldn't set permission bits on %s. You're " "probably using an uncommon filesystem setup. No " "problem." % new_path, self.style.NOTICE) if self.paths_to_remove: if self.verbosity >= 2: self.stdout.write("Cleaning up temporary files.\n") for path_to_remove in self.paths_to_remove: if path.isfile(path_to_remove): os.remove(path_to_remove) else: shutil.rmtree(path_to_remove)
def PrepareForTestMac(self): """Runs dsymutil if needed. Valgrind for Mac OS X requires that debugging information be in a .dSYM bundle generated by dsymutil. It is not currently able to chase DWARF data into .o files like gdb does, so executables without .dSYM bundles or with the Chromium-specific "fake_dsym" bundles generated by build/mac/strip_save_dsym won't give source file and line number information in valgrind. This function will run dsymutil if the .dSYM bundle is missing or if it looks like a fake_dsym. A non-fake dsym that already exists is assumed to be up-to-date. """ test_command = self._args[0] dsym_bundle = self._args[0] + '.dSYM' dsym_file = os.path.join(dsym_bundle, 'Contents', 'Resources', 'DWARF', os.path.basename(test_command)) dsym_info_plist = os.path.join(dsym_bundle, 'Contents', 'Info.plist') needs_dsymutil = True saved_test_command = None if os.path.exists(dsym_file) and os.path.exists(dsym_info_plist): # Look for the special fake_dsym tag in dsym_info_plist. dsym_info_plist_contents = open(dsym_info_plist).read() if not re.search('^\s*<key>fake_dsym</key>$', dsym_info_plist_contents, re.MULTILINE): # fake_dsym is not set, this is a real .dSYM bundle produced by # dsymutil. dsymutil does not need to be run again. needs_dsymutil = False else: # fake_dsym is set. dsym_file is a copy of the original test_command # before it was stripped. Copy it back to test_command so that # dsymutil has unstripped input to work with. Move the stripped # test_command out of the way, it will be restored when this is # done. saved_test_command = test_command + '.stripped' os.rename(test_command, saved_test_command) shutil.copyfile(dsym_file, test_command) shutil.copymode(saved_test_command, test_command) if needs_dsymutil: if self._options.generate_dsym: # Remove the .dSYM bundle if it exists. shutil.rmtree(dsym_bundle, True) dsymutil_command = ['dsymutil', test_command] # dsymutil is crazy slow. Ideally we'd have a timeout here, # but common.RunSubprocess' timeout is only checked # after each line of output; dsymutil is silent # until the end, and is then killed, which is silly. common.RunSubprocess(dsymutil_command) if saved_test_command: os.rename(saved_test_command, test_command) else: logging.info("No real .dSYM for test_command. Line numbers will " "not be shown. Either tell xcode to generate .dSYM " "file, or use --generate_dsym option to this tool.")
for name in files: fil = os.path.join(root, name) #print('File: %s' %fil) if mimetypes.guess_type(fil)[0] == 'text/plain': open_file = open(fil, 'r') if search_string in open_file.read(): print('Found string in file: %s' % open_file) open_file.close() fh, abs_path = mkstemp() with fdopen(fh, 'w') as new_file: with open(fil) as old_file: for line in old_file: new_file.write( line.replace(search_string, replace_string)) # Copy over permissions from original copymode(fil, abs_path) # Remove Original (backup) move(fil, fil + ".bak") move(abs_path, fil) else: open_file.close() # End Text File # End of Inner Loop # End of outer loop # Rename the world folder os.rename(foundry_data + os.sep + user_input, foundry_data + os.sep + replace_string)
# shutil module is a python integrated module and provide capabilities for operations on file and folders like: copy, move , remove import shutil print(dir(shutil)) # shutil module is more advanced than OS module for such operations ########## copy files or directories ################################ src="sorcefile" dest="destfile" shutil.copyfile(src,dest) # providing source and destination will copy file from source to dest ( no premissions preserved) shutil.copy(src,dest) # this operation will also copy file from source to destination but it woul also keep same permissions shutil.copy2(src,dest) # this operation will keep also metadata of your original file ( ex same permission and timestamp) shutil.copymode(src,dest) # it will give permissions from source file to destinantion file ( it is a a copy permission not file content) shutil.copystat(src,dest) # it will copy metadata from source file to destination file ( it is a metadata copy not file content) fo1=("filename1","r") #open file in read mode fo2=("filename2", "w") # open file in write mode shutil.copyfileobj(fo1,fo2) # copy content file 1 to file 2 with shutil function dirsrc="directory to copy" dirdest="destination directory" shutil.copytree(dirsrc,dirdest) # it will copy entire directory structure into source directory (including permissions and metadate) shutil.rmtree(dirdest) # it will remove all the direcotry tree
def Main(argv): # Pull in all of the gypi files which will be munged into the sdk. HOME = dirname(dirname(realpath(__file__))) (options, args) = GetOptions() SDK = options.sdk_output_dir SDK_tmp = '%s.tmp' % SDK SNAPSHOT = options.snapshot_location # TODO(dgrove) - deal with architectures that are not ia32. if exists(SDK): rmtree(SDK) if exists(SDK_tmp): rmtree(SDK_tmp) os.makedirs(SDK_tmp) # Create and populate sdk/bin. BIN = join(SDK_tmp, 'bin') os.makedirs(BIN) os.makedirs(join(BIN, 'snapshots')) # Copy the Dart VM binary and the Windows Dart VM link library # into sdk/bin. # # TODO(dgrove) - deal with architectures that are not ia32. build_dir = os.path.dirname(SDK) dart_file_extension = '' analyzer_file_extension = '' if HOST_OS == 'win32': dart_file_extension = '.exe' analyzer_file_extension = '.bat' dart_import_lib_src = join(HOME, build_dir, 'dart.lib') dart_import_lib_dest = join(BIN, 'dart.lib') copyfile(dart_import_lib_src, dart_import_lib_dest) dart_src_binary = join(HOME, build_dir, 'dart' + dart_file_extension) dart_dest_binary = join(BIN, 'dart' + dart_file_extension) copyfile(dart_src_binary, dart_dest_binary) copymode(dart_src_binary, dart_dest_binary) # Strip the binaries on platforms where that is supported. if HOST_OS == 'linux': subprocess.call(['strip', dart_dest_binary]) elif HOST_OS == 'macos': subprocess.call(['strip', '-x', dart_dest_binary]) # Copy analyzer into sdk/bin ANALYZER_HOME = join(HOME, build_dir, 'analyzer') dart_analyzer_src_binary = join(ANALYZER_HOME, 'bin', 'dart_analyzer' + analyzer_file_extension) dart_analyzer_dest_binary = join(BIN, 'dart_analyzer' + analyzer_file_extension) copyfile(dart_analyzer_src_binary, dart_analyzer_dest_binary) copymode(dart_analyzer_src_binary, dart_analyzer_dest_binary) # # Create and populate sdk/include. # INCLUDE = join(SDK_tmp, 'include') os.makedirs(INCLUDE) copyfile(join(HOME, 'runtime', 'include', 'dart_api.h'), join(INCLUDE, 'dart_api.h')) copyfile(join(HOME, 'runtime', 'include', 'dart_debugger_api.h'), join(INCLUDE, 'dart_debugger_api.h')) # # Create and populate sdk/lib. # LIB = join(SDK_tmp, 'lib') os.makedirs(LIB) # # Create and populate lib/{core, crypto, isolate, json, uri, utf, ...}. # os.makedirs(join(LIB, 'html')) for library in [ join('_internal', 'compiler'), join('_internal', 'dartdoc'), join('_internal', 'pub', 'resource'), 'async', 'collection', '_collection_dev', 'core', 'crypto', 'io', 'isolate', join('chrome', 'dart2js'), join('chrome', 'dartium'), join('html', 'dart2js'), join('html', 'dartium'), join('html', 'html_common'), join('indexed_db', 'dart2js'), join('indexed_db', 'dartium'), 'json', 'math', 'mdv_observe_impl', 'mirrors', 'typed_data', join('svg', 'dart2js'), join('svg', 'dartium'), 'uri', 'utf', join('web_audio', 'dart2js'), join('web_audio', 'dartium'), join('web_gl', 'dart2js'), join('web_gl', 'dartium'), join('web_sql', 'dart2js'), join('web_sql', 'dartium') ]: copytree(join(HOME, 'sdk', 'lib', library), join(LIB, library), ignore=ignore_patterns('*.svn', 'doc', '*.py', '*.gypi', '*.sh', '.gitignore')) # Copy lib/_internal/libraries.dart. copyfile(join(HOME, 'sdk', 'lib', '_internal', 'libraries.dart'), join(LIB, '_internal', 'libraries.dart')) # Create and copy packages. PACKAGES = join(SDK_tmp, 'packages') os.makedirs(PACKAGES) # # Create and populate packages/{args, intl, logging, meta, unittest, ...} # for library in [ 'args', 'http', 'intl', 'logging', 'meta', 'oauth2', 'pathos', 'serialization', 'unittest', 'yaml', 'analyzer_experimental' ]: copytree(join(HOME, 'pkg', library, 'lib'), join(PACKAGES, library), ignore=ignore_patterns('*.svn')) # Create and copy tools. UTIL = join(SDK_tmp, 'util') os.makedirs(UTIL) # Create and copy Analyzer library into 'util' ANALYZER_DEST = join(UTIL, 'analyzer') os.makedirs(ANALYZER_DEST) analyzer_src_jar = join(ANALYZER_HOME, 'util', 'analyzer', 'dart_analyzer.jar') analyzer_dest_jar = join(ANALYZER_DEST, 'dart_analyzer.jar') copyfile(analyzer_src_jar, analyzer_dest_jar) jarsToCopy = [ join("args4j", "2.0.12", "args4j-2.0.12.jar"), join("guava", "r13", "guava-13.0.1.jar"), join("json", "r2_20080312", "json.jar") ] for jarToCopy in jarsToCopy: dest_dir = join(ANALYZER_DEST, os.path.dirname(jarToCopy)) os.makedirs(dest_dir) dest_file = join(ANALYZER_DEST, jarToCopy) src_file = join(ANALYZER_HOME, 'util', 'analyzer', jarToCopy) copyfile(src_file, dest_file) # Create and copy dartanalyzer into 'util' DARTANALYZER_SRC = join(HOME, build_dir, 'dartanalyzer') DARTANALYZER_DEST = join(UTIL, 'dartanalyzer') os.makedirs(DARTANALYZER_DEST) jarFiles = glob.glob(join(DARTANALYZER_SRC, '*.jar')) for jarFile in jarFiles: copyfile(jarFile, join(DARTANALYZER_DEST, os.path.basename(jarFile))) # Copy in 7zip for Windows. if HOST_OS == 'win32': copytree(join(HOME, 'third_party', '7zip'), join(SDK_tmp, 'lib', '_internal', 'pub', 'resource', '7zip'), ignore=ignore_patterns('.svn')) # Copy dart2js/dartdoc/pub. CopyDartScripts(HOME, SDK_tmp) CopySnapshots(SNAPSHOT, SDK_tmp) # Write the 'version' file version = utils.GetVersion() versionFile = open(os.path.join(SDK_tmp, 'version'), 'w') versionFile.write(version + '\n') versionFile.close() # Write the 'revision' file revision = utils.GetSVNRevision() if revision is not None: with open(os.path.join(SDK_tmp, 'revision'), 'w') as f: f.write(revision + '\n') f.close() Copy(join(HOME, 'README.dart-sdk'), join(SDK_tmp, 'README')) move(SDK_tmp, SDK)
def build_command(args): """ Builds a zip file from the source_dir or source_file. Installs dependencies with pip automatically. """ def list_files(top_path): """ Returns a sorted list of all files in a directory. """ results = [] for root, dirs, files in os.walk(top_path): for file_name in files: file_path = os.path.join(root, file_name) relative_path = os.path.relpath(file_path, top_path) results.append(relative_path) results.sort() return results def create_zip_file(source_dir, target_file): """ Creates a zip file from a directory. """ target_file = os.path.abspath(target_file) target_dir = os.path.dirname(target_file) if not os.path.exists(target_dir): os.makedirs(target_dir) target_base, _ = os.path.splitext(target_file) shutil.make_archive( target_base, format='zip', root_dir=source_dir, ) args.dump_env and dump_env('build_command') args.dump_input and print( args.filename, args.runtime, args.source_path, file=open('build_command.args', 'a')) build_data = json.loads(args.build_data) filename = build_data['filename'] runtime = build_data['runtime'] source_path = build_data['source_path'] timestamp = build_data['timestamp'] docker = build_data.get('docker') absolute_filename = os.path.abspath(filename) if os.path.exists(absolute_filename): print('Reused: {}'.format(shlex.quote(filename))) return # Create a temporary directory for building the archive, # so no changes will be made to the source directory. with tempdir() as temp_dir: # Find all source files. if os.path.isdir(source_path): source_dir = source_path source_files = list_files(source_path) else: source_dir = os.path.dirname(source_path) source_files = [os.path.basename(source_path)] # Copy them into the temporary directory. with cd(source_dir): for file_name in source_files: target_path = os.path.join(temp_dir, file_name) target_dir = os.path.dirname(target_path) if not os.path.exists(target_dir): print('mkdir -p {}'.format(shlex.quote(target_dir))) os.makedirs(target_dir) print('cp {} {}'.format(shlex.quote(file_name), shlex.quote(target_path))) shutil.copyfile(file_name, target_path) shutil.copymode(file_name, target_path) shutil.copystat(file_name, target_path) # Install dependencies into the temporary directory. if runtime.startswith('python'): requirements = os.path.join(temp_dir, 'requirements.txt') if os.path.exists(requirements): with cd(temp_dir): if runtime.startswith('python3'): pip_command = ['pip3'] else: pip_command = ['pip2'] pip_command.extend([ 'install', '--prefix=', '--target=.', '--requirement=requirements.txt', ]) if docker: chown_mask = '{}:{}'.format(os.getuid(), os.getgid()) docker_command = [shlex_join(pip_command), '&&', shlex_join(['chown', '-R', chown_mask, '/var/task'])] docker_command = [' '.join(docker_command)] check_call(docker_run_command('.', docker_command, runtime, shell=True)) else: print(pip_command, flush=True) check_call(pip_command) # Zip up the temporary directory and write it to the target filename. # This will be used by the Lambda function as the source code package. create_zip_file(temp_dir, absolute_filename) os.utime(absolute_filename, ns=(timestamp, timestamp)) print('Created: {}'.format(shlex.quote(filename)))
comment_pattern = re.compile('^#') lines = in_obj.readlines() for line in lines: if search_pattern.search(line): out_obj.write('PASSREQ=YES\n') else: out_obj.write(line) out_obj.close() in_obj.close() action_record = tcs_utils.generate_diff_record( self.__target_file + '.new', self.__target_file) try: shutil.copymode(self.__target_file, self.__target_file + '.new') shutil.copy2(self.__target_file + '.new', self.__target_file) os.unlink(self.__target_file + '.new') except OSError: msg = "Unable to replace %s with new version." % self.__target_file self.logger.info(self.module_name, 'Apply Error: ' + msg) raise tcs_utils.ActionError('%s %s' % (self.module_name, msg)) msg = 'PASSREQ=YES set in %s' % self.__target_file self.logger.notice(self.module_name, 'Apply Performed: ' + msg) return 1, action_record ########################################################################## def undo(self, action_record=None): """ Remove sulogin addition."""
def execute(argv): command, project_name = argv[1], argv[2] print(f'command: {command}') print(f'project_name: {project_name}') base_name = 'project_name' top_dir = os.path.join(os.getcwd(), project_name) print(f"top_dir: {top_dir}") os.makedirs(top_dir) camel_case_value = '_'.join(x for x in project_name.split('-')) context = Context( { base_name: project_name, 'project_directory': top_dir, 'camel_case_project_name': camel_case_value, }, autoescape=False) template_dir = os.path.join(fastproject.__path__[0], 'project_template') print(template_dir) prefix_length = len(template_dir) + 1 for root, dirs, files in os.walk(template_dir): print(root, dirs, files) path_rest = root[prefix_length:] relative_dir = path_rest.replace(base_name, camel_case_value) if relative_dir: target_dir = os.path.join(top_dir, relative_dir) os.makedirs(target_dir, exist_ok=True) for dirname in dirs[:]: if dirname.startswith('.') or dirname == '__pycache__': dirs.remove(dirname) for filename in files: if filename.endswith(('.pyo', '.pyc', '.py.class')): continue old_path = os.path.join(root, filename) new_path = os.path.join(top_dir, relative_dir, filename.replace(base_name, project_name)) for old_suffix, new_suffix in [('.py-tpl', '.py')]: if new_path.endswith(old_suffix): new_path = new_path[:-len(old_suffix)] + new_suffix break # Only rewrite once # if os.path.exists(new_path): # raise CommandError( # "%s already exists. Overlaying %s %s into an existing " # "directory won't replace conflicting files." % ( # new_path, self.a_or_an, app_or_project, # ) # ) # Only render the Python files, as we don't want to # accidentally render Django templates files if new_path.endswith('.py') or filename in []: with open(old_path, encoding='utf-8') as template_file: content = template_file.read() template = Engine().from_string(content) content = template.render(context) with open(new_path, 'w', encoding='utf-8') as new_file: new_file.write(content) else: shutil.copyfile(old_path, new_path) try: shutil.copymode(old_path, new_path) make_writeable(new_path) except OSError: # self.stderr.write( # "Notice: Couldn't set permission bits on %s. You're " # "probably using an uncommon filesystem setup. No " # "problem." % new_path, self.style.NOTICE) pass
print 'Source :' show_file_info('6.5-shutil.copy.py') shutil.copy2('6.5-shutil.copy.py', 'data') print 'Destination :' show_file_info('data/6.5-shutil.copy.py') print ## 6.5.2 Copying File Metadata # importing commands... wooooooooooo # to copy file permissions from one file to another, use copymode() with open('data/6.5-shutil.copy.py', 'wt') as f: f.write('These are the new contents of this file.') os.chmod('data/6.5-shutil.copy.py', 0444) print 'BEFORE:' print commands.getstatus('data/6.5-shutil.copy.py') shutil.copymode('6.5-shutil.copy.py', 'data/6.5-shutil.copy.py') print 'AFTER :' print commands.getstatus('data/6.5-shutil.copy.py') print # copying other metadata can be done with copystat() with open('data/6.5-shutil.copy.py', 'wt') as f: f.write('These are the new contents of this file.') os.chmod('data/6.5-shutil.copy.py', 0444) print 'BEFORE:' show_file_info('data/6.5-shutil.copy.py') shutil.copystat('6.5-shutil.copy.py', 'data/6.5-shutil.copy.py') print 'AFTER :' show_file_info('data/6.5-shutil.copy.py') print
def generate_file(project_dir, infile, context, env, skip_if_file_exists=False): """Render filename of infile as name of outfile, handle infile correctly. Dealing with infile appropriately: a. If infile is a binary file, copy it over without rendering. b. If infile is a text file, render its contents and write the rendered infile to outfile. Precondition: When calling `generate_file()`, the root template dir must be the current working directory. Using `utils.work_in()` is the recommended way to perform this directory change. :param project_dir: Absolute path to the resulting generated project. :param infile: Input file to generate the file from. Relative to the root template dir. :param context: Dict for populating the cookiecutter's variables. :param env: Jinja2 template execution environment. """ logger.debug('Processing file %s', infile) # Render the path to the output file (not including the root project dir) outfile_tmpl = env.from_string(infile) outfile = os.path.join(project_dir, outfile_tmpl.render(**context)) file_name_is_empty = os.path.isdir(outfile) if file_name_is_empty: logger.debug('The resulting file name is empty: %s', outfile) return if skip_if_file_exists and os.path.exists(outfile): logger.debug('The resulting file already exists: %s', outfile) return logger.debug('Created file at %s', outfile) # Just copy over binary files. Don't render. logger.debug("Check %s to see if it's a binary", infile) if is_binary(infile): logger.debug('Copying binary %s to %s without rendering', infile, outfile) shutil.copyfile(infile, outfile) else: # Force fwd slashes on Windows for get_template # This is a by-design Jinja issue infile_fwd_slashes = infile.replace(os.path.sep, '/') # Render the file try: tmpl = env.get_template(infile_fwd_slashes) except TemplateSyntaxError as exception: # Disable translated so that printed exception contains verbose # information about syntax error location exception.translated = False raise rendered_file = tmpl.render(**context) # Detect original file newline to output the rendered file # note: newline='' ensures newlines are not converted with open(infile, 'r', encoding='utf-8', newline='') as rd: rd.readline() # Read the first line to load 'newlines' value # Use `_new_lines` overwrite from context, if configured. newline = rd.newlines if context['cookiecutter'].get('_new_lines', False): newline = context['cookiecutter']['_new_lines'] logger.debug('Overwriting end line character with %s', newline) logger.debug('Writing contents to file %s', outfile) with open(outfile, 'w', encoding='utf-8', newline=newline) as fh: fh.write(rendered_file) # Apply file permissions to output file shutil.copymode(infile, outfile)
def Main(): # Pull in all of the gypi files which will be munged into the sdk. HOME = dirname(dirname(realpath(__file__))) (options, args) = GetOptions() SDK = options.sdk_output_dir SDK_tmp = '%s.tmp' % SDK SNAPSHOT = options.snapshot_location # TODO(dgrove) - deal with architectures that are not ia32. if exists(SDK): rmtree(SDK) if exists(SDK_tmp): rmtree(SDK_tmp) os.makedirs(SDK_tmp) # Create and populate sdk/bin. BIN = join(SDK_tmp, 'bin') os.makedirs(BIN) os.makedirs(join(BIN, 'snapshots')) # Copy the Dart VM binary and the Windows Dart VM link library # into sdk/bin. # # TODO(dgrove) - deal with architectures that are not ia32. build_dir = os.path.dirname(SDK) dart_file_extension = '' if HOST_OS == 'win32': dart_file_extension = '.exe' dart_import_lib_src = join(HOME, build_dir, 'dart.lib') dart_import_lib_dest = join(BIN, 'dart.lib') copyfile(dart_import_lib_src, dart_import_lib_dest) dart_src_binary = join(HOME, build_dir, 'dart' + dart_file_extension) dart_dest_binary = join(BIN, 'dart' + dart_file_extension) copyfile(dart_src_binary, dart_dest_binary) copymode(dart_src_binary, dart_dest_binary) # Strip the binaries on platforms where that is supported. if HOST_OS == 'linux': subprocess.call(['strip', dart_dest_binary]) elif HOST_OS == 'macos': subprocess.call(['strip', '-x', dart_dest_binary]) # # Create and populate sdk/include. # INCLUDE = join(SDK_tmp, 'include') os.makedirs(INCLUDE) copyfile(join(HOME, 'runtime', 'include', 'dart_api.h'), join(INCLUDE, 'dart_api.h')) copyfile(join(HOME, 'runtime', 'include', 'dart_mirrors_api.h'), join(INCLUDE, 'dart_mirrors_api.h')) copyfile(join(HOME, 'runtime', 'include', 'dart_native_api.h'), join(INCLUDE, 'dart_native_api.h')) copyfile(join(HOME, 'runtime', 'include', 'dart_tools_api.h'), join(INCLUDE, 'dart_tools_api.h')) # # Create and populate sdk/lib. # LIB = join(SDK_tmp, 'lib') os.makedirs(LIB) # # Create and populate lib/{async, core, isolate, ...}. # os.makedirs(join(LIB, 'html')) for library in [ join('_blink', 'dartium'), join('_chrome', 'dart2js'), join('_chrome', 'dartium'), join('_internal', 'js_runtime'), join('_internal', 'sdk_library_metadata'), 'async', 'collection', 'convert', 'core', 'developer', 'internal', 'io', 'isolate', join('html', 'dart2js'), join('html', 'dartium'), join('html', 'html_common'), join('indexed_db', 'dart2js'), join('indexed_db', 'dartium'), 'js', 'js_util', 'math', 'mirrors', 'profiler', 'typed_data', join('svg', 'dart2js'), join('svg', 'dartium'), join('web_audio', 'dart2js'), join('web_audio', 'dartium'), join('web_gl', 'dart2js'), join('web_gl', 'dartium'), join('web_sql', 'dart2js'), join('web_sql', 'dartium') ]: copytree(join(HOME, 'sdk', 'lib', library), join(LIB, library), ignore=ignore_patterns('*.svn', 'doc', '*.py', '*.gypi', '*.sh', '.gitignore')) # Copy the platform descriptors. for file_name in [ "dart_client.platform", "dart_server.platform", "dart_shared.platform" ]: copyfile(join(HOME, 'sdk', 'lib', file_name), join(LIB, file_name)) # Copy libraries.dart to lib/_internal/libraries.dart for backwards # compatibility. # # TODO(sigmund): stop copying libraries.dart. Old versions (<=0.25.1-alpha.4) # of the analyzer package do not support the new location of this file. We # should be able to remove the old file once we release a newer version of # analyzer and popular frameworks have migrated to use it. copyfile( join(HOME, 'sdk', 'lib', '_internal', 'sdk_library_metadata', 'lib', 'libraries.dart'), join(LIB, '_internal', 'libraries.dart')) # Create and copy tools. UTIL = join(SDK_tmp, 'util') os.makedirs(UTIL) RESOURCE = join(SDK_tmp, 'lib', '_internal', 'pub', 'asset') os.makedirs(os.path.dirname(RESOURCE)) copytree(join(HOME, 'third_party', 'pkg', 'pub', 'lib', 'src', 'asset'), join(RESOURCE), ignore=ignore_patterns('.svn')) # Copy in 7zip for Windows. if HOST_OS == 'win32': copytree(join(HOME, 'third_party', '7zip'), join(RESOURCE, '7zip'), ignore=ignore_patterns('.svn')) # Copy dart2js/pub. CopyDartScripts(HOME, SDK_tmp) CopySnapshots(SNAPSHOT, SDK_tmp) CopyDartdocResources(HOME, SDK_tmp) CopyAnalyzerSources(HOME, LIB) CopyAnalysisSummaries(SNAPSHOT, LIB) CopyDevCompilerSdk(HOME, LIB) # Write the 'version' file version = utils.GetVersion() versionFile = open(os.path.join(SDK_tmp, 'version'), 'w') versionFile.write(version + '\n') versionFile.close() # Write the 'revision' file revision = utils.GetGitRevision() if revision is not None: with open(os.path.join(SDK_tmp, 'revision'), 'w') as f: f.write('%s\n' % revision) f.close() Copy(join(HOME, 'README.dart-sdk'), join(SDK_tmp, 'README')) Copy(join(HOME, 'LICENSE'), join(SDK_tmp, 'LICENSE')) Copy(join(HOME, 'sdk', 'api_readme.md'), join(SDK_tmp, 'lib', 'api_readme.md')) move(SDK_tmp, SDK)
def main(): usage_str = 'Usage: deploy_emsdk.py ' parser = optparse.OptionParser(usage=usage_str) parser.add_option('--emsdk_dir', dest='emsdk_dir', default='', help='Root path of Emscripten SDK.') parser.add_option( '--deploy_s3', dest='deploy_s3', action='store_true', default=False, help= 'If true, deploys Emsdk packages to S3. If false, only creates local zip/tar.gz files' ) parser.add_option( '--delete_temp_files', dest='delete_temp_files', action='store_true', default=False, help='If true, all generated local files are deleted after done.') parser.add_option( '--deploy_update_package', dest='deploy_update_package', action='store_true', default=False, help= 'If true, deploys update zip. If false, deploys portable installer zip.' ) (options, args) = parser.parse_args(sys.argv) if not options.emsdk_dir: print >> sys.stderr, 'Please specify --emsdk_dir /path/to/emsdk' sys.exit(1) # Update to latest git_pull_emsdk(options.emsdk_dir) # Create temp directory to stage to. stage_root_dir = tempfile.mkdtemp('_emsdk') try: stage_dir = os.path.join(stage_root_dir, 'emsdk-portable') mkdir_p(stage_dir) print 'Staging to "' + stage_dir + '"' dirs = [] files = [ 'binaryen-tags.txt', 'emcmdprompt.bat', 'emscripten-tags.txt', 'emsdk', 'emsdk.bat', 'emsdk_env.bat', 'emsdk_env.sh', 'emsdk_manifest.json', 'README.md' ] emsdk_packages = [] if WINDOWS: dirs += ['bin'] if not options.deploy_update_package: emsdk_packages += ['python-2.7.13.1-64bit'] dirs += ['python'] if len(emsdk_packages) > 0: print 'Installing ' + str(emsdk_packages) run([os.path.join(options.emsdk_dir, 'emsdk' + BAT), 'install'] + emsdk_packages, cwd=options.emsdk_dir) for d in dirs: print 'Deploying directory "' + d + '"...' shutil.copytree(os.path.join(options.emsdk_dir, d), os.path.join(stage_dir, d)) for f in files: print 'Deploying file "' + f + '"...' src = os.path.join(options.emsdk_dir, f) dst = os.path.join(stage_dir, f) shutil.copyfile(src, dst) if not WINDOWS: # On Windows the file read only bits from DLLs in Program Files are copied, which is not desirable. shutil.copymode(src, dst) # Zip up zip_basename = 'emsdk-portable' if WINDOWS: zip_basename += '-64bit' zip_filename_without_directory = add_zip_suffix(zip_basename) zip_filename = os.path.join(stage_root_dir, zip_filename_without_directory) print 'Zipping up "' + zip_filename + '"' zip_up_directory(stage_dir, zip_filename) print zip_filename + ': ' + str( os.path.getsize(zip_filename)) + ' bytes.' # Upload to S3 if options.deploy_s3: if options.deploy_update_package: # Deploy portable SDK updater package if WINDOWS: update_zip_name = 'emsdk_windows_update.zip' elif OSX: update_zip_name = 'emsdk_osx_update.tar.gz' elif LINUX: update_zip_name = 'emsdk_unix_update.tar.gz' else: raise Exception('Unknown OS') s3_emscripten_deployment_url = 's3://mozilla-games/emscripten/packages/' + update_zip_name upload_to_s3(zip_filename, s3_emscripten_deployment_url) else: # Deploy portable SDK release package s3_emscripten_deployment_url = 's3://mozilla-games/emscripten/releases/' + zip_filename_without_directory upload_to_s3(zip_filename, s3_emscripten_deployment_url) except Exception, e: print >> sys.stderr, str(e)
def generate_files( repo_dir, context=None, output_dir='.', overwrite_if_exists=False, skip_if_file_exists=False, skip_hooks=False, ): """Render the templates and saves them to files. :param repo_dir: Project template input directory. :param context: Dict for populating the template's variables. :param output_dir: Where to output the generated project dir into. :param overwrite_if_exists: Overwrite the contents of the output directory if it exists. """ template_dir = find_template(repo_dir) logger.debug('Generating project from %s...', template_dir) context = context or OrderedDict([]) unrendered_dir = os.path.split(template_dir)[1] ensure_dir_is_templated(unrendered_dir) env = StrictEnvironment(context=context, keep_trailing_newline=True) try: project_dir, output_directory_created = render_and_create_dir( unrendered_dir, context, output_dir, env, overwrite_if_exists) except UndefinedError as err: msg = "Unable to create project directory '{}'".format(unrendered_dir) raise UndefinedVariableInTemplate(msg, err, context) # We want the Jinja path and the OS paths to match. Consequently, we'll: # + CD to the template folder # + Set Jinja's path to '.' # # In order to build our files to the correct folder(s), we'll use an # absolute path for the target folder (project_dir) project_dir = os.path.abspath(project_dir) logger.debug('Project directory is %s', project_dir) # if we created the output directory, then it's ok to remove it # if rendering fails delete_project_on_failure = output_directory_created if not skip_hooks: _run_hook_from_repo_dir(repo_dir, 'pre_gen_project', project_dir, context, delete_project_on_failure) else: logging.info('Skipping pre-gen hooks') with work_in(template_dir): env.loader = FileSystemLoader('.') for root, dirs, files in os.walk('.'): # We must separate the two types of dirs into different lists. # The reason is that we don't want ``os.walk`` to go through the # unrendered directories, since they will just be copied. copy_dirs = [] render_dirs = [] for d in dirs: d_ = os.path.normpath(os.path.join(root, d)) # We check the full path, because that's how it can be # specified in the ``_copy_without_render`` setting, but # we store just the dir name if is_copy_only_path(d_, context): copy_dirs.append(d) else: render_dirs.append(d) for copy_dir in copy_dirs: indir = os.path.normpath(os.path.join(root, copy_dir)) outdir = os.path.normpath(os.path.join(project_dir, indir)) outdir = env.from_string(outdir).render(**context) logger.debug('Copying dir %s to %s without rendering', indir, outdir) shutil.copytree(indir, outdir) # We mutate ``dirs``, because we only want to go through these dirs # recursively dirs[:] = render_dirs for d in dirs: unrendered_dir = os.path.join(project_dir, root, d) try: render_and_create_dir(unrendered_dir, context, output_dir, env, overwrite_if_exists) except UndefinedError as err: if delete_project_on_failure: rmtree(project_dir) _dir = os.path.relpath(unrendered_dir, output_dir) msg = "Unable to create directory '{}'".format(_dir) raise UndefinedVariableInTemplate(msg, err, context) for f in files: infile = os.path.normpath(os.path.join(root, f)) if is_copy_only_path(infile, context): outfile_tmpl = env.from_string(infile) outfile_rendered = outfile_tmpl.render(**context) outfile = os.path.join(project_dir, outfile_rendered) logger.debug('Copying file %s to %s without rendering', infile, outfile) shutil.copyfile(infile, outfile) shutil.copymode(infile, outfile) continue try: generate_file(project_dir, infile, context, env, skip_if_file_exists) except UndefinedError as err: if delete_project_on_failure: rmtree(project_dir) msg = "Unable to create file '{}'".format(infile) raise UndefinedVariableInTemplate(msg, err, context) if not skip_hooks: _run_hook_from_repo_dir( repo_dir, 'post_gen_project', project_dir, context, delete_project_on_failure, ) else: logging.info('Skipping post-gen hooks') return project_dir
def csv2ods(csvname, odsname, encoding='', singleFileDirectory=None, knownChecksums={}, verbose=False): filesSavedinManifest = {} if knownChecksums: checksumCache = {} if verbose: print 'converting from %s to %s' % (csvname, odsname) if singleFileDirectory: if not os.path.isdir(os.path.join(os.getcwd(), singleFileDirectory)): os.mkdir(singleFileDirectory) doc = ooolib2.Calc() # add a pagebreak style style = 'pagebreak' style_pagebreak = doc.styles.get_next_style('row') style_data = tuple( [style, ('style:row-height', doc.styles.property_row_height)]) doc.styles.style_config[style_data] = style_pagebreak # add a currency style style = 'currency' style_currency = doc.styles.get_next_style('cell') style_data = tuple([style]) doc.styles.style_config[style_data] = style_currency row = 1 csvdir = os.path.dirname(csvname) if len(csvdir) == 0: csvdir = '.' csvfile = open(csvname, 'rb') reader = csv.reader(csvfile, delimiter=',', quotechar='"') for fields in reader: if len(fields) > 0: for col in range(len(fields)): val = fields[col] if encoding != '' and val[ 0: 5] != "link:": # Only utf8 encode if it's not a filename val = unicode(val, 'utf8') if len(val) > 0 and val[0] == '$': doc.set_cell_value(col + 1, row, 'currency', val[1:]) else: if (len(val) > 0 and val[0:5] == "link:"): val = val[5:] linkname = os.path.basename( val) # name is just the last component newFile = None if not singleFileDirectory: newFile = val if knownChecksums: if not checksumCache.has_key(val): checksum = ChecksumFile(val) checksumCache[val] = checksum else: checksum = checksumCache[val] if knownChecksums.has_key(checksum): newFile = knownChecksums[checksum] print "FOUND new file in known: " + newFile if not newFile: relativeFileWithPath = os.path.basename(val) fileName, fileExtension = os.path.splitext( relativeFileWithPath) newFile = fileName[: 15] # 15 is an arbitrary choice. newFile = newFile + fileExtension # We'll now test to see if we made this file # before, and if it matched the same file we # now want. If it doesn't, try to make a # short file name for it. if filesSavedinManifest.has_key( newFile ) and filesSavedinManifest[newFile] != val: testFile = None for cc in list(string.letters) + list( string.digits): testFile = cc + newFile if not filesSavedinManifest.has_key( testFile): break testFile = None if not testFile: raise Exception( "too many similar file names for linkage; giving up" ) else: newFile = testFile if not os.path.exists(csvdir + '/' + val): raise Exception( "File" + csvdir + '/' + val + " does not exist in single file directory mode; giving up" ) src = os.path.join(csvdir, val) dest = os.path.join(csvdir, singleFileDirectory, newFile) shutil.copyfile(src, dest) shutil.copystat(src, dest) shutil.copymode(src, dest) newFile = os.path.join(singleFileDirectory, newFile) if knownChecksums: checksumCache[checksum] = newFile knownChecksums[checksum] = newFile linkrel = '../' + newFile # ../ means remove the name of the *.ods doc.set_cell_value(col + 1, row, 'link', (linkrel, linkname)) linkpath = csvdir + '/' + val if not val in filesSavedinManifest: filesSavedinManifest[newFile] = val if not os.path.exists(linkpath): print "WARNING: link %s DOES NOT EXIST at %s" % ( val, linkpath) if verbose: if os.path.exists(linkpath): print 'relative link %s EXISTS at %s' % ( val, linkpath) else: if val == "pagebreak": doc.sheets[doc.sheet_index].set_sheet_config( ('row', row), style_pagebreak) else: if val[0:6] == "title:": doc.sheets[doc.sheet_index].set_name(val[6:]) else: doc.set_cell_value(col + 1, row, 'string', val) else: # enter an empty string for blank lines doc.set_cell_value(1, row, 'string', '') row += 1 # save manifest file if filesSavedinManifest.keys() != []: manifestFH = open("MANIFEST", "a") manifestFH.write("# Files from %s\n" % odsname) for file in filesSavedinManifest.keys(): manifestFH.write("%s\n" % file) manifestFH.close() # Save spreadsheet file. doc.save(odsname)
def handle(self, app_or_project, name, target=None, **options): self.app_or_project = app_or_project self.a_or_an = 'an' if app_or_project == 'app' else 'a' self.paths_to_remove = [] self.verbosity = options['verbosity'] self.validate_name(name) # if some directory is given, make sure it's nicely expanded if target is None: top_dir = os.path.join(os.getcwd(), name) try: os.makedirs(top_dir) except FileExistsError: raise CommandError("'%s' already exists" % top_dir) except OSError as e: raise CommandError(e) else: if app_or_project == 'app': self.validate_name(os.path.basename(target), 'directory') top_dir = os.path.abspath(os.path.expanduser(target)) if not os.path.exists(top_dir): raise CommandError("Destination directory '%s' does not " "exist, please create it first." % top_dir) extensions = tuple(handle_extensions(options['extensions'])) extra_files = [] for file in options['files']: extra_files.extend(map(lambda x: x.strip(), file.split(','))) if self.verbosity >= 2: self.stdout.write( 'Rendering %s template files with extensions: %s' % (app_or_project, ', '.join(extensions))) self.stdout.write( 'Rendering %s template files with filenames: %s' % (app_or_project, ', '.join(extra_files))) base_name = '%s_name' % app_or_project base_subdir = '%s_template' % app_or_project base_directory = '%s_directory' % app_or_project camel_case_name = 'camel_case_%s_name' % app_or_project camel_case_value = ''.join(x for x in name.title() if x != '_') context = Context( { **options, base_name: name, base_directory: top_dir, camel_case_name: camel_case_value, 'docs_version': get_docs_version(), 'django_version': django.__version__, }, autoescape=False) # Setup a stub settings environment for template rendering if not settings.configured: settings.configure() django.setup() template_dir = self.handle_template(options['template'], base_subdir) prefix_length = len(template_dir) + 1 for root, dirs, files in os.walk(template_dir): path_rest = root[prefix_length:] relative_dir = path_rest.replace(base_name, name) if relative_dir: target_dir = os.path.join(top_dir, relative_dir) os.makedirs(target_dir, exist_ok=True) for dirname in dirs[:]: if dirname.startswith('.') or dirname == '__pycache__': dirs.remove(dirname) for filename in files: if filename.endswith(('.pyo', '.pyc', '.py.class')): # Ignore some files as they cause various breakages. continue old_path = os.path.join(root, filename) new_path = os.path.join(top_dir, relative_dir, filename.replace(base_name, name)) for old_suffix, new_suffix in self.rewrite_template_suffixes: if new_path.endswith(old_suffix): new_path = new_path[:-len(old_suffix)] + new_suffix break # Only rewrite once if os.path.exists(new_path): raise CommandError( "%s already exists. Overlaying %s %s into an existing " "directory won't replace conflicting files." % ( new_path, self.a_or_an, app_or_project, )) # Only render the Python files, as we don't want to # accidentally render Django template files if new_path.endswith(extensions) or filename in extra_files: with open(old_path, encoding='utf-8') as template_file: content = template_file.read() template = Engine().from_string(content) content = template.render(context) with open(new_path, 'w', encoding='utf-8') as new_file: new_file.write(content) else: shutil.copyfile(old_path, new_path) if self.verbosity >= 2: self.stdout.write('Creating %s' % new_path) try: shutil.copymode(old_path, new_path) self.make_writeable(new_path) except OSError: self.stderr.write( "Notice: Couldn't set permission bits on %s. You're " "probably using an uncommon filesystem setup. No " "problem." % new_path, self.style.NOTICE) if self.paths_to_remove: if self.verbosity >= 2: self.stdout.write('Cleaning up temporary files.') for path_to_remove in self.paths_to_remove: if os.path.isfile(path_to_remove): os.remove(path_to_remove) else: shutil.rmtree(path_to_remove)