def pyversion_patch(filename): '''Find the best pyversion-fixup patch for a given filename and apply it. ''' dir, file = os.path.split(filename) best_ver = (0, ) patchfile = None for dirfile in os.listdir(dir): m = patchfile_re.match(dirfile) if not m: continue base, ver = m.groups() if base != file: continue ver = tuple([int(v) for v in ver.split('.')]) if sys.version_info >= ver and ver > best_ver: best_ver = ver patchfile = dirfile if not patchfile: return False log.info("Applying %s to %s..." % (patchfile, filename)) cwd = os.getcwd() os.chdir(dir) try: p = patch.fromfile(patchfile) p.apply() finally: os.chdir(cwd) return True
def test_apply_returns_false_on_failure(self): self.tmpcopy([ 'data/failing/non-empty-patch-for-empty-file.diff', 'data/failing/upload.py' ]) pto = patch.fromfile('non-empty-patch-for-empty-file.diff') self.assertFalse(pto.apply())
def get_patch_lst_by_cu(path): """ Get dict of patches with compile units as key :param path: base path where patches are located :returns: Dict of compile units with the following fields: - items: items of a patch - commit: commit hash - patch: file name of patch """ flist = os.listdir(path) patch_lst = {} patches_to_compare = [] sortp.sort_nicely(flist) for fitem in flist: fname = path + fitem ppatch = patch.fromfile(fname) if isinstance(ppatch, bool): continue for item in ppatch.items: commit = get_hash(ppatch.items[0].header) cu_name = '/'.join(item.target.decode("utf-8").split('/')[1:]) if cu_name not in patch_lst: patch_lst.update({cu_name: []}) patch_lst[cu_name].append({ "items": ppatch.items, "commit": commit, "patch": fitem }) return patch_lst
def patch_siputils(self): patch_file = os.path.join(config['__Umbrella_path'], "patches", "siputils.py.patch") savedpath = os.getcwd() os.chdir(self._context["build_path"]) pset = patch.fromfile(patch_file) pset.apply() os.chdir(savedpath)
def pyversion_patch(filename): '''Find the best pyversion-fixup patch for a given filename and apply it. ''' dir, file = os.path.split(filename) best_ver = (0,) patchfile = None for dirfile in os.listdir(dir): m = patchfile_re.match(dirfile) if not m: continue base, ver = m.groups() if base != file: continue ver = tuple([int(v) for v in ver.split('.')]) if sys.version_info >= ver and ver > best_ver: best_ver = ver patchfile = dirfile if not patchfile: return False log.info("Applying %s to %s..." % (patchfile, filename)) cwd = os.getcwd() os.chdir(dir) try: p = patch.fromfile(patchfile) p.apply() finally: os.chdir(cwd) return True
def prepare_smesh(): """ Prepare sources for SMESH. """ # Delete source directories if they exist if os.path.exists('src/SMESH'): shutil.rmtree('src/SMESH') # Copy sources shutil.copytree('external/SMESH/src', 'src/SMESH/src') # Copy and overwrite the SMESH CMakeLists.txt file target = os.path.join('src/SMESH', 'CMakeLists.txt') shutil.copyfile('cmake/SMESH/CMakeLists.txt', target) # Patch sources pset = patch.fromfile('patch/SMESH.patch') pset.apply(strip=0, root='src/SMESH') # Copy MeshVSLink sources shutil.copytree('extra/MeshVSLink', 'src/SMESH/src/MeshVSLink', dirs_exist_ok=True) # Copy MEFISTO2 trte.c source shutil.copyfile('extra/MEFISTO2/trte.c', 'src/SMESH/src/MEFISTO2/trte.c')
def apply_refactor(self, call_id, payload): supported_refactorings = ["AddImport", "OrganizeImports", "Rename", "InlineLocal"] if payload["refactorType"]["typehint"] in supported_refactorings: diff_file = payload["diff"] patch_set = fromfile(diff_file) if not patch_set: self.env.logger.warning("Couldn't parse diff_file: {}" .format(diff_file)) return self.env.logger.debug("Refactoring get root from: {}" .format(self.refactorings[payload['procedureId']])) root = root_as_str_from_abspath(self.refactorings[payload['procedureId']]) self.env.logger.debug("Refactoring set root: {}" .format(root)) result = patch_set.apply(0, root) if result: file = self.refactorings[payload['procedureId']] sublime.set_timeout(bind(self.env.editor.reload_file, file), 0) self.env.logger.info("Refactoring succeeded, patch file: {}" .format(diff_file)) self.env.status_message("Refactoring succeeded") else: self.env.logger.error("Patch refactoring failed, patch file: {}" .format(diff_file)) self.env.status_message("Refactor failed: {}".format(diff_file))
def Patch (patchDir, codeDir, patchFile): patch1 = os.path.join (patchDir, patchFile) curDir = os.getcwd () os.chdir (codeDir) pset = patch.fromfile (patch1) pset.apply () os.chdir (curDir)
def patch_pyqt(configuration, qt_paths): # TODO: gee golly get this figured out properly and configured etc patch_path = (pathlib.Path(__file__).resolve().parent / 'pluginloader.{}.patch'.format(configuration.pyqt_version)) patchset = patch.fromfile(fspath(patch_path)) patchset.apply(strip=1)
def patch_triangle(): '''Patch the Triangle source with 64-bit modification.''' print("Starting patching process of triangle") tmpdir = tempfile.mkdtemp() os.chdir(tmpdir) print("Downloading triangle") try: with open("triangle.zip", 'wb') as f: response = urllib2.urlopen(URL_TRIANGLE) assert (response.getcode() == 200) f.write(response.read()) print("Done") zf = zipfile.ZipFile("triangle.zip") zf.extract("triangle.c") zf.extract("triangle.h") print("Checking md5 sum of downloaded file") with open("triangle.c", "rb") as f: m5 = md5.new(f.read()).digest() zf.close() assert (m5 == MD5_TRI) rc = patch.fromfile(PATCH_TRIANGLE) SRC_TRI = os.path.join(tmpdir, "triangle.c") except Exception, e: print("Patching process failed with error:\n" + str(e)) rc = False
def parse_patch(patch_file): """returns a dictionary of {filepath:[lines patched]}""" patch_set = patch.fromfile(patch_file) target_files = set() for changed_file in patch_set.items: relative_path = LEFTOVER_BAD_CHARS.sub('', changed_file.target) if not is_ignored_file(relative_path): absolute_file_path = os.path.join(ROOT_PATH, relative_path) if (os.path.exists(absolute_file_path) and not os.path.isdir(absolute_file_path)): target_files.add(absolute_file_path) target_lines = defaultdict(list) for p in patch_set.items: source_file = os.path.join(ROOT_PATH, LEFTOVER_BAD_CHARS.sub('', p.target)) if source_file not in target_files: continue for hunk in p.hunks: patched_lines = [] line_offset = hunk.starttgt for hline in hunk.text: if not hline.startswith(REMOVED_LINE): if hline.startswith(ADDED_LINE): patched_lines.append(line_offset) line_offset += 1 target_lines[LEFTOVER_BAD_CHARS.sub('', p.target)].extend(patched_lines) return target_lines
def parse_patch(patch_file): """ returns a dictionary of {filepath:[lines patched]} """ patch_set = patch.fromfile(patch_file) target_files = set() target_files.update([os.path.join(django_path, re.sub(PATH_FIX, '', p.target)) for p in patch_set.items]) target_files = [p for p in target_files if 'test' not in p] target_files = [p for p in target_files if 'docs' not in p] target_files = [p for p in target_files if os.path.exists(p)] target_lines = defaultdict(list) for p in patch_set.items: source_file = os.path.join(django_path, re.sub(PATH_FIX, '', p.target)) if source_file not in target_files: # skip files filtered out above continue source_lines = [] last_hunk_offset = 1 for hunk in p.hunks: patched_lines = [] line_offset = hunk.starttgt for hline in hunk.text: if hline.startswith('-'): continue if hline.startswith('+'): patched_lines.append(line_offset) line_offset += 1 target_lines[re.sub(PATH_FIX, '', p.target)].extend(patched_lines) return target_lines
def _generate_template(template): if template['no_input'] is None: template['no_input'] = True kwargs = {k: v for k, v in template.items() if v is not None} ret = cookiecutter(**kwargs) output_dir = os.path.relpath(ret, '.') context.log.success(f"{template['template']} -> {output_dir}") for patch_file in os.listdir(output_dir): if patch_file.endswith('.patch'): patch_file = os.path.join(output_dir, patch_file) if os.path.isfile(patch_file): context.log.notice("Applying patch %s in %s", patch_file, output_dir) try: pset = patch.fromfile(patch_file) pset.apply(root=output_dir) context.log.success("Patch %s applied in %s", patch_file, output_dir) except Exception as exc: # pylint:disable=broad-except context.log.error( "Patch %s has failed to be applied in %s: %s", patch_file, output_dir, exc)
def patch(base_path=None, patch_file=None, patch_string=None, strip=0, output=None): """Applies a diff from file (patch_file) or string (patch_string) in base_path directory or current dir if None""" class PatchLogHandler(logging.Handler): def __init__(self): logging.Handler.__init__(self, logging.DEBUG) self.output = output or ConanOutput(sys.stdout, True) self.patchname = patch_file if patch_file else "patch" def emit(self, record): logstr = self.format(record) if record.levelno == logging.WARN: self.output.warn("%s: %s" % (self.patchname, logstr)) else: self.output.info("%s: %s" % (self.patchname, logstr)) patchlog = logging.getLogger("patch") if patchlog: patchlog.handlers = [] patchlog.addHandler(PatchLogHandler()) if not patch_file and not patch_string: return if patch_file: patchset = fromfile(patch_file) else: patchset = fromstring(patch_string.encode()) if not patchset: raise ConanException("Failed to parse patch: %s" % (patch_file if patch_file else "string")) if not patchset.apply(root=base_path, strip=strip): raise ConanException("Failed to apply patch: %s" % patch_file)
def test_autofixed_parent_path(self): # [ ] exception vs return codes for error recovery # [x] separate return code when patch lib compensated the error # (implemented as warning count) pto = patch.fromfile(join(TESTS, "data/autofix/parent-path.diff")) self.assertEqual(pto.errors, 0) self.assertEqual(pto.warnings, 2) self.assertEqual(pto.items[0].source, b"patch.py")
def main(args): ''' Main entrypoint of the ASAR patcher. Args: args (...): A set of arguments parsed by the Python argparse module. ''' logging.basicConfig( level=logging.INFO, format='%(asctime)s - %(process)d - [%(levelname)s] %(message)s', ) logger = logging.getLogger(__name__) # Expand our user provided paths. path_input = os.path.abspath(os.path.expanduser(args.input)) path_output = os.path.abspath(os.path.expanduser(args.output)) # As we'll be packing and unpacking files, we a temporary location. temporary_dir = tempfile.mkdtemp(prefix='nowtv') logger.info('Using %s as a temporary directory', temporary_dir) # Attempt to extract the encrypted bundle.js, and electron.js logger.info('Attempting to extract ASAR') asar.unpack(path_input, os.path.join(temporary_dir, 'app')) # Decrypt. logger.info('Attempting to decrypt bundle.js from ASAR') decrypt_file( os.path.join(temporary_dir, 'app/dist/src/bundle.js'), os.path.join(temporary_dir, 'app/dist/src/bundle.plain.js'), ) # Load and attempt to apply patches. for diff in glob.glob('{0}/*.patch'.format(args.patches)): logger.info('Attempting to apply patch %s', diff) patch.fromfile(diff).apply(strip=1, root=temporary_dir) # Finally, apply optional patches. if args.oi_you_got_a_license_for_that: for diff in glob.glob('{0}/optional/*.patch'.format(args.patches)): logger.info('Attempting to apply OPTIONAL patch %s', diff) patch.fromfile(diff).apply(strip=1, root=temporary_dir) # Rebunble into patched ASAR. logger.info('Attempting to write patched ASAR to %s', path_output) asar.pack(os.path.join(temporary_dir, 'app/'), path_output)
def test_apply_strip(self): treeroot = join(self.tmpdir, "rootparent") shutil.copytree(join(tests_dir, "06nested"), treeroot) pto = patch.fromfile(join(tests_dir, "06nested/06nested.patch")) for p in pto: p.source = "nasty/prefix/" + p.source p.target = "nasty/prefix/" + p.target self.assert_(pto.apply(strip=2, root=treeroot))
def test_apply_strip(self): treeroot = join(self.tmpdir, 'rootparent') shutil.copytree(join(TESTS, '06nested'), treeroot) pto = patch.fromfile(join(TESTS, '06nested/06nested.patch')) for p in pto: p.source = b'nasty/prefix/' + p.source p.target = b'nasty/prefix/' + p.target self.assertTrue(pto.apply(strip=2, root=treeroot))
def _patch_headers(self): import patch patches_dir = os.path.join(os.path.dirname(__file__), 'patches') for f in os.listdir(patches_dir): f = os.path.join(patches_dir, f) pset = patch.fromfile(f) if not pset.apply(strip=2, root=CBC_DIR): print('fail to patch file: ' + f)
def test_autofixed_parent_path(self): # [ ] exception vs return codes for error recovery # [x] separate return code when patch lib compensated the error # (implemented as warning count) pto = patch.fromfile(join(tests_dir, "data/autofix/parent-path.diff")) self.assertEqual(pto.errors, 0) self.assertEqual(pto.warnings, 2) self.assertEqual(pto.items[0].source, "patch.py")
def test_revert(self): self.tmpcopy(["03trail_fname.patch", "03trail_fname.from"]) pto = patch.fromfile("03trail_fname.patch") self.assert_(pto.apply()) self.assertNotEqual( open(self.tmpdir + "/03trail_fname.from").read(), open(TESTS + "/03trail_fname.from").read() ) self.assert_(pto.revert()) self.assertEqual(open(self.tmpdir + "/03trail_fname.from").read(), open(TESTS + "/03trail_fname.from").read())
def patch(base_path=None, patch_file=None, patch_string=None, strip=0, output=None): """Applies a diff from file (patch_file) or string (patch_string) in base_path directory or current dir if None""" class PatchLogHandler(logging.Handler): def __init__(self): logging.Handler.__init__(self, logging.DEBUG) self.output = output or ConanOutput(sys.stdout, True) self.patchname = patch_file if patch_file else "patch" def emit(self, record): logstr = self.format(record) if record.levelno == logging.WARN: self.output.warn("%s: %s" % (self.patchname, logstr)) else: self.output.info("%s: %s" % (self.patchname, logstr)) patchlog = logging.getLogger("patch") if patchlog: patchlog.handlers = [] patchlog.addHandler(PatchLogHandler()) if not patch_file and not patch_string: return if patch_file: patchset = fromfile(patch_file) else: patchset = fromstring(patch_string.encode()) if not patchset: raise ConanException("Failed to parse patch: %s" % (patch_file if patch_file else "string")) # account for new and deleted files, upstream dep won't fix them items = [] for p in patchset: source = p.source.decode("utf-8") if source.startswith("a/"): source = source[2:] target = p.target.decode("utf-8") if target.startswith("b/"): target = target[2:] if "dev/null" in source: if base_path: target = os.path.join(base_path, target) hunks = [s.decode("utf-8") for s in p.hunks[0].text] new_file = "".join(hunk[1:] for hunk in hunks) save(target, new_file) elif "dev/null" in target: if base_path: source = os.path.join(base_path, source) os.unlink(source) else: items.append(p) patchset.items = items if not patchset.apply(root=base_path, strip=strip): raise ConanException("Failed to apply patch: %s" % patch_file)
def patch_openssl_props(context): patch_file = os.path.join(config['__Umbrella_path'], "patches", "python_openssl_path.patch") savedpath = os.getcwd() tarpath = os.path.join(context["build_path"]) os.chdir(tarpath) pset = patch.fromfile(patch_file) pset.apply() os.chdir(savedpath) return True
def patch_prebuilt(): patch_root = Options.cef_binary patch_name = 'prebuilt_{}.{}.{}.patch'.format(Options.cef_version_major, Options.cef_version_minor, Options.cef_version_patch) patch_file = os.path.join(Options.cefpython_dir, 'patches', patch_name) if os.path.exists(patch_file): p = patch.fromfile(patch_file) p.apply(root=patch_root)
def main(): recipe_dir = os.environ["RECIPE_DIR"] conda_platform = 'win-32' if os.environ["ARCH"] == '32' else 'win-64' prefix = os.environ['PREFIX'] metadata = MetaData(recipe_dir) msys2_tar_xz_url = metadata.get_section( 'extra')['msys2-binaries'][conda_platform]['url'] msys2_md5 = metadata.get_section( 'extra')['msys2-binaries'][conda_platform]['md5'] mv_srcs_list = metadata.get_section( 'extra')['msys2-binaries'][conda_platform]['mv-srcs'] mv_dsts_list = metadata.get_section( 'extra')['msys2-binaries'][conda_platform]['mv-dsts'] msys2_tar_xz = get_tar_xz(msys2_tar_xz_url, msys2_md5) tar = tarfile.open(msys2_tar_xz, 'r|xz') tar.extractall(path=prefix) try: patches = metadata.get_section( 'extra')['msys2-binaries'][conda_platform]['patches'] except: patches = [] if len(patches): for patchname in patches: patchset = patch.fromfile(join(getenv('RECIPE_DIR'), patchname)) patchset.apply(1, root=prefix) # shutil is a bit funny (like mv) with regards to how it treats # the destination depending on whether it is an existing directory or not # (i.e. moving into that versus moving as that). # Therefore, the rules employed are: # 1. If mv_dst ends with a '/' it is a directory that you want mv_src # moved into. # 2. If mv_src has a wildcard, mv_dst is a directory that you want mv_src # moved into. # In these cases we makedirs(mv_dst) and then call move(mv_src, mv_dst) # .. otherwise we makedirs(dirname(mv_dst)) and call move(mv_src, mv_dst) # .. however, if no mv_srcs exist we don't makedirs at all. for mv_src, mv_dst in zip(mv_srcs_list, mv_dsts_list): mv_dst_definitely_dir = False mv_srcs = glob(join(prefix, normpath(mv_src))) if '*' in mv_src or mv_dst.endswith('/') or len(mv_srcs) > 1: mv_dst_definitely_dir = True if len(mv_srcs): mv_dst = join(prefix, normpath(mv_dst)) mv_dst_mkdir = mv_dst if not mv_dst_definitely_dir: mv_dst_mkdir = dirname(mv_dst_mkdir) try: makedirs(mv_dst_mkdir) except: pass for mv_src in mv_srcs: move(mv_src, mv_dst) tar.close()
def test_diffstat(self): output = """\ updatedlg.cpp | 20 ++++++++++++++++++-- updatedlg.h | 1 + manifest.xml | 15 ++++++++------- conf.cpp | 23 +++++++++++++++++------ conf.h | 7 ++++--- 5 files changed, 48 insertions(+), 18 deletions(-), +1203 bytes""" pto = patch.fromfile(join(TESTS, "01uni_multi/01uni_multi.patch")) self.assertEqual(pto.diffstat(), output, "Output doesn't match")
def get_patches(path): patch_lst = {} flist = os.listdir(path) for fitem in flist: fname = path + fitem data = patch.fromfile(fname) patch_lst.update({fitem[:-5]: data}) return patch_lst
def patchboost(context): try: savedpath = os.getcwd() os.chdir( os.path.join("{}/boost_{}".format( config["paths"]["build"], config["boost_version"].replace(".", "_")))) pset = patch.fromfile( os.path.join(config["paths"]["build"], "usvfs", "patches", "type_traits_vs15_fix.patch")) pset.apply() pset = patch.fromfile( os.path.join(config['__Umbrella_path'], "Patches", "boost_msvc2017_fix.patch")) pset.apply() os.chdir(savedpath) return True except OSError: return False
def _apply_patch(self): patch_path = self.patch_path() if not os.path.exists(patch_path): return patch_set = patch.fromfile(patch_path) if not patch_set or not patch_set.apply(root=self._work_path): raise Exception('Failed to apply patch ' + patch_path)
def test_diffstat(self): output = """\ updatedlg.cpp | 20 ++++++++++++++++++-- updatedlg.h | 1 + manifest.xml | 15 ++++++++------- conf.cpp | 23 +++++++++++++++++------ conf.h | 7 ++++--- 5 files changed, 48 insertions(+), 18 deletions(-), +1203 bytes""" pto = patch.fromfile(join(tests_dir, "01uni_multi/01uni_multi.patch")) self.assertEqual(pto.diffstat(), output, "Output doesn't match")
def _patch_salt_grains_core_server_id(): import salt.config # must import before salt.grains.core import salt.grains.core import sys import patch pset = patch.fromfile('pkg/salt.grains.core.patch') pset.items[0].target = salt.grains.core.__file__.encode() pset.apply() sys.stderr.write('patching complete\n')
def revert(self): if self.status == STATUS_CLEAN: return False patch_path = os.path.join(data_path, "skins", self.patch) patchset = patch.fromfile(patch_path) if patchset.revert(root=self.skin_path): self.status = STATUS_CLEAN self.save_status() os.remove(patch_path) window.clearProperty(property_name) return True
def apply_patches(source_dir, patch_dir): import patch patches = (glob.glob(os.path.join(patch_dir, '*.diff')) + glob.glob(os.path.join(patch_dir, '*.patch'))) for filename in patches: print("Applying patch: {!r}".format(filename)) patchset = patch.fromfile(filename) success = patchset.apply(1, root=source_dir) if not success: print("Failed to apply patch! Exitting...") sys.exit(1)
def test_revert(self): self.tmpcopy(['03trail_fname.patch', '03trail_fname.from']) pto = patch.fromfile('03trail_fname.patch') self.assert_(pto.apply()) self.assertNotEqual( open(self.tmpdir + '/03trail_fname.from').read(), open(TESTS + '/03trail_fname.from').read()) self.assert_(pto.revert()) self.assertEqual( open(self.tmpdir + '/03trail_fname.from').read(), open(TESTS + '/03trail_fname.from').read())
def make_npm(self): os.chdir(self.client_dir) if self.has_npm: printmsg('Installing JavaScript dependencies...') # Use the default package.json if the supplied template doesn't have one if not os.path.isfile( os.path.join(self.template_dir, self.client_source_dir, 'package.json')): shutil.copy2( os.path.join(self.fallback_dir, 'client', 'package.json'), '.') self._update_project_name() run_cmd(['npm', 'install']) printmsg('Patching Transcrypt Parcel Plugin...') patch_name = 'asset.js.win.patch' if is_windows else 'asset.js.patch' try: import patch # Use the default patch if the supplied template doesn't have one if not os.path.isfile( os.path.join(self.template_dir, patch_name)): patch_set = patch.fromfile( os.path.join(self.fallback_dir, patch_name)) else: patch_set = patch.fromfile( os.path.join(self.template_dir, patch_name)) patch_set.apply(root=os.path.join('.', 'node_modules', 'parcel-plugin-transcrypt')) except Exception as e: printerr("Transcrypt Parcel Plugin patch failed!") printerr(e) else: printwarn('SKIPPING JavaScript dependencies!') if os.path.isfile(os.path.join(self.client_dir, 'package.json')): os.remove(os.path.join(self.client_dir, 'package.json'))
def patchboost(context): try: savedpath = os.getcwd() os.chdir(boost_path) pset = patch.fromfile( os.path.join(config['__Umbrella_path'], "patches", "boost_python_libname.patch")) pset.apply() os.chdir(savedpath) return True except OSError: return False
def init_patch(context): try: savedpath = os.getcwd() os.chdir(context["build_path"]) pset = patch.fromfile( os.path.join(config['__Umbrella_path'], "patches", "pyqt5_configure_init.patch")) pset.apply() os.chdir(savedpath) return True except OSError: return False
def patch(base_path=None, patch_file=None, patch_string=None): """Applies a diff from file (patch_file) or string (patch_string) in base_path directory or current dir if None""" if not patch_file and not patch_string: return if patch_file: patchset = fromfile(patch_file) else: patchset = fromstring(patch_string) patchset.apply(root=base_path)
def patch(base_path=None, patch_file=None, patch_string=None): """Applies a diff from file (patch_file) or string (patch_string) in base_path directory or current dir if None""" if not patch_file and not patch_string: return if patch_file: patchset = fromfile(patch_file) else: patchset = fromstring(patch_string.encode()) if not patchset.apply(root=base_path): raise ConanException("Failed to apply patch: %s" % patch_file)
def test_revert(self): def get_file_content(filename): with open(filename, 'rb') as f: return f.read() self.tmpcopy(['03trail_fname.patch', '03trail_fname.from']) pto = patch.fromfile('03trail_fname.patch') self.assertTrue(pto.apply()) self.assertNotEqual(get_file_content(self.tmpdir + '/03trail_fname.from'), get_file_content(TESTS + '/03trail_fname.from')) self.assertTrue(pto.revert()) self.assertEqual(get_file_content(self.tmpdir + '/03trail_fname.from'), get_file_content(TESTS + '/03trail_fname.from'))
def patch_info(location): """ Return a list of tuples of (src_path, target_path, patch_text) for each patch segment of a patch file at location. Raise an exception if the file is not a patch file or cannot be parsed. """ patchset = pythonpatch.fromfile(location) if not patchset: msg = 'Unable to parse patch file: %(location)s' % locals() raise ExtractErrorFailedToExtract(msg) for ptch in patchset.items: src = fileutils.as_posixpath(ptch.source.strip()) tgt = fileutils.as_posixpath(ptch.target.strip()) text = [l.strip() for l in patch_text(ptch) if l] yield src, tgt, text
def apply_diffs(strWorkingFolder, strPatchFolder, uiStrip): # Collect all ".diff" files from the patch folder. astrPatches = [] for strDirname, astrDirnames, astrFilenames in os.walk(strPatchFolder): for strFilename in astrFilenames: strDummy, strExt = os.path.splitext(strFilename) if strExt == '.diff': strAbsFilename = os.path.join(strDirname, strFilename) astrPatches.append(strAbsFilename) # Sort the patches alphabetically. astrSortedPatches = sorted(astrPatches) for strPatch in astrSortedPatches: print 'Apply patch "%s"...' % strPatch # Apply the patches. tPatch = patch.fromfile(strPatch) tPatch.diffstat() tPatch.apply(uiStrip, root=strWorkingFolder)
def main(): argc = len(sys.argv) numpy_path = sys.argv[1] if argc > 1 else os.getcwd() n_bits = sys.argv[2] if argc > 2 else get_bitness() if n_bits not in ('32', '64'): raise RuntimeError("Number of bits should be 32 or 64") os.chdir(abspath(numpy_path)) check_call(['git', 'clean', '-fxd']) check_call(['git', 'reset', '--hard']) patch_file = pjoin(BUILD_STUFF, '1.10.4-init.patch') patch_set = patch.fromfile(patch_file) patch_set.apply() atlas_path = ATLAS_PATH_TEMPLATE.format(repo_path=BUILD_STUFF, n_bits=n_bits) with open('site.cfg', 'wt') as fobj: fobj.write(SITE_CFG_TEMPLATE.format(atlas_path=atlas_path, lib_name=LIB_NAME)) shutil.copy2(pjoin(BUILD_STUFF, '_distributor_init.py'), 'numpy') check_call(['python', 'setup.py', 'bdist_wheel']) add_library(atlas_path + r'\\lib\\' + LIB_NAME + '.dll')
def test_svn_detected(self): pto = patch.fromfile(join(tests_dir, "01uni_multi/01uni_multi.patch")) self.assertEqual(pto.type, patch.SVN)
def test_autofixed_absolute_path(self): pto = patch.fromfile(join(tests_dir, "data/autofix/absolute-path.diff")) self.assertEqual(pto.errors, 0) self.assertEqual(pto.warnings, 2) self.assertEqual(pto.items[0].source, "winnt/tests/run_tests.py")
def apply_patches(patches): for p in patches: file = p.get('file') strip = p.get('strip', 0) root = p.get('root', None) patch.fromfile(file).apply(strip, root)
#!/usr/bin/env python import patch, sys ps = patch.fromfile(sys.argv[1]) f = open('INFO', 'w') f.write(''.join(ps.items[0].header)) f.close() for p in ps.items: s = p.source[2:].replace('/', '_') f = open(s, 'w') f.write('--- %s\n' % p.source) f.write('+++ %s\n' % p.target) for h in p.hunks: f.write('@@ -%d,%d +%d,%d @@\n' % ( h.startsrc, h.linessrc, h.starttgt, h.linestgt)) f.write(''.join(h.text)) f.close()
def test_hg_exported(self): pto = patch.fromfile(join(tests_dir, "data/hg-exported.diff")) self.assertEqual(pto.type, patch.HG)
def apply_patch (conf, patch_abspath): conf.msg ("Applying patch", os.path.basename (patch_abspath)) p = patch.fromfile (patch_abspath) if not p.apply (root = conf.bldnode.make_node ("3rd").abspath()): conf.fatal ("Cannot apply patch %s" % patch_abspath)
def test_git_changed_detected(self): pto = patch.fromfile(join(tests_dir, "data/git-changed-file.diff")) self.assertEqual(pto.type, patch.GIT)
def test_apply_returns_false_on_failure(self): self.tmpcopy(["data/failing/non-empty-patch-for-empty-file.diff", "data/failing/upload.py"]) pto = patch.fromfile("non-empty-patch-for-empty-file.diff") self.assertFalse(pto.apply())
def test_apply_returns_true_on_success(self): self.tmpcopy(["03trail_fname.patch", "03trail_fname.from"]) pto = patch.fromfile("03trail_fname.patch") self.assert_(pto.apply())
def test_apply_root(self): treeroot = join(self.tmpdir, "rootparent") shutil.copytree(join(tests_dir, "06nested"), treeroot) pto = patch.fromfile(join(tests_dir, "06nested/06nested.patch")) self.assert_(pto.apply(root=treeroot))
def test_hg_detected(self): pto = patch.fromfile(join(tests_dir, "data/hg-added-file.diff")) self.assertEqual(pto.type, patch.HG)
def test_autofixed_stripped_trailing_whitespace(self): pto = patch.fromfile(join(tests_dir, "data/autofix/stripped-trailing-whitespace.diff")) self.assertEqual(pto.errors, 0) self.assertEqual(pto.warnings, 4)