def clean_diff(self): """ Validate the diff form. This function will be invoked by django.form.Forms.is_valid(), and will raise the exception ValidationError """ self.the_patch = patch.fromstring(self.cleaned_data['diff']) # Check that the submitted diff patches the correct number of files if len(self.the_patch.hunks) != 1: raise ValidationError, 'The patch affects more than one file.' # Check that the filename it patches is correct. if self.FILE_TO_BE_PATCHED not in self.cleaned_data['diff']: raise ValidationError, 'The patch affects the wrong file.' # Now we need to generate a working copy to apply the patch to. # We can also use this working copy to commit the patch if it's OK. repo = view_helpers.SvnRepository(self.username) view_helpers.subproc_check_output( ['svn', 'co', repo.file_trunk_url(), self.wcdir]) # Check that it will apply correctly to the working copy. if not self.the_patch._match_file_hunks(self.file_to_patch, self.the_patch.hunks[0]): raise ValidationError, 'The patch will not apply correctly to the lastest revision.' # Check that the resulting file matches what is expected. self.new_content = ''.join( self.the_patch.patch_stream(open(self.file_to_patch), self.the_patch.hunks[0])) if self.new_content != open(self.NEW_CONTENT).read(): raise ValidationError, 'The file resulting from patching does not have the correct contents.'
def parse_diff(lines): merge_parents = False fingerprint = False commit_hash = None diff_lines = [] for line in lines: if line.startswith("commit"): _, commit_hash = line.split() if line.startswith("Merge"): merge_parents = line.split()[1:] if diff_lines: diff_lines.append(line) # get the rest of the diffs after the first one if line.startswith("diff") and not diff_lines: diff_lines = [line] if line.startswith("Primary key fingerprint:"): # This is bad, kids. Stay in school. # ex: line = 'Primary key fingerprint: 22E6 9398 3D87 4EA0 CF7C 1947 D934 BC84 BD2F FE0E' # this parses out the fingerprint into a single hex number fingerprint = "".join(line.split(":")[1].split()) # old patch lib # patch = PatchSet(diff_lines) patch = pypatch.fromstring("".join(diff_lines)) return commit_hash, fingerprint, merge_parents, patch, diff_lines
def patch(base_path=None, patch_file=None, patch_string=None, strip=0, output=None): """Applies a diff from file (patch_file) or string (patch_string) in base_path directory or current dir if None""" class PatchLogHandler(logging.Handler): def __init__(self): logging.Handler.__init__(self, logging.DEBUG) self.output = output or ConanOutput(sys.stdout, True) self.patchname = patch_file if patch_file else "patch" def emit(self, record): logstr = self.format(record) if record.levelno == logging.WARN: self.output.warn("%s: %s" % (self.patchname, logstr)) else: self.output.info("%s: %s" % (self.patchname, logstr)) patchlog = logging.getLogger("patch") if patchlog: patchlog.handlers = [] patchlog.addHandler(PatchLogHandler()) if not patch_file and not patch_string: return if patch_file: patchset = fromfile(patch_file) else: patchset = fromstring(patch_string.encode()) if not patchset: raise ConanException("Failed to parse patch: %s" % (patch_file if patch_file else "string")) if not patchset.apply(root=base_path, strip=strip): raise ConanException("Failed to apply patch: %s" % patch_file)
def test_fromstring(self): try: f = open(join(TESTS, "01uni_multi/01uni_multi.patch"), "rb") readstr = f.read() finally: f.close() pst = patch.fromstring(readstr) self.assertEqual(len(pst), 5)
def test_fromstring(self): try: f = open(join(tests_dir, "01uni_multi/01uni_multi.patch"), "rb") readstr = f.read() finally: f.close() pst = patch.fromstring(readstr) self.assertEqual(len(pst), 5)
def patch(base_path=None, patch_file=None, patch_string=None, strip=0, output=None): """Applies a diff from file (patch_file) or string (patch_string) in base_path directory or current dir if None""" class PatchLogHandler(logging.Handler): def __init__(self): logging.Handler.__init__(self, logging.DEBUG) self.output = output or ConanOutput(sys.stdout, True) self.patchname = patch_file if patch_file else "patch" def emit(self, record): logstr = self.format(record) if record.levelno == logging.WARN: self.output.warn("%s: %s" % (self.patchname, logstr)) else: self.output.info("%s: %s" % (self.patchname, logstr)) patchlog = logging.getLogger("patch") if patchlog: patchlog.handlers = [] patchlog.addHandler(PatchLogHandler()) if not patch_file and not patch_string: return if patch_file: patchset = fromfile(patch_file) else: patchset = fromstring(patch_string.encode()) if not patchset: raise ConanException("Failed to parse patch: %s" % (patch_file if patch_file else "string")) # account for new and deleted files, upstream dep won't fix them items = [] for p in patchset: source = p.source.decode("utf-8") if source.startswith("a/"): source = source[2:] target = p.target.decode("utf-8") if target.startswith("b/"): target = target[2:] if "dev/null" in source: if base_path: target = os.path.join(base_path, target) hunks = [s.decode("utf-8") for s in p.hunks[0].text] new_file = "".join(hunk[1:] for hunk in hunks) save(target, new_file) elif "dev/null" in target: if base_path: source = os.path.join(base_path, source) os.unlink(source) else: items.append(p) patchset.items = items if not patchset.apply(root=base_path, strip=strip): raise ConanException("Failed to apply patch: %s" % patch_file)
def patch(self, diff): """ Apply patch data from 'diff' to the file at 'path'. 'diff' must contain unified diff data. """ patch.setdebug() patcher = patch.fromstring(diff) if not patcher.apply(): self.error("couldn't apply patch")
def apply_patch(self, diff): """ Apply patch data from 'diff' to the file at 'path'. 'diff' must contain unified diff data. """ #patch.setdebug() broken for python 3 patcher = patch.fromstring(bytes(diff, 'utf-8')) if not patcher: self.error("couldn't parse patch") if not patcher.apply(): self.error("couldn't apply patch")
def patch(base_path=None, patch_file=None, patch_string=None): """Applies a diff from file (patch_file) or string (patch_string) in base_path directory or current dir if None""" if not patch_file and not patch_string: return if patch_file: patchset = fromfile(patch_file) else: patchset = fromstring(patch_string) patchset.apply(root=base_path)
def patch(base_path=None, patch_file=None, patch_string=None): """Applies a diff from file (patch_file) or string (patch_string) in base_path directory or current dir if None""" if not patch_file and not patch_string: return if patch_file: patchset = fromfile(patch_file) else: patchset = fromstring(patch_string.encode()) if not patchset.apply(root=base_path): raise ConanException("Failed to apply patch: %s" % patch_file)
PATCH = patch.fromstring(""" --- scripts/postinst 2020-06-07 19:36:36.000000000 -0700 +++ scripts/postinst 2020-06-07 19:39:48.000000000 -0700 @@ -68,63 +68,6 @@ # Log file output where it's located echo "Plex share located at $PLEX_LIBRARY_PATH" - -# Set rights on Plex share -synoshare --setuser Plex RW = plex,admin - -# Add friendly warnings not to place media files in the Plex share in multiple languages -# English, German, French, Spanish, Japanese, Chinese (simplified) -touch "$PLEX_LIBRARY_PATH/Please do not place any media files here." -touch "$PLEX_LIBRARY_PATH/Bitte legen Sie hier keine Mediendateien ab." -touch "$PLEX_LIBRARY_PATH/Veuillez ne placer aucun fichier multimédia ici." -touch "$PLEX_LIBRARY_PATH/Por favor, no coloque ningún archivo multimedia aquí." -touch "$PLEX_LIBRARY_PATH/ここにメディアファイルを置かないでください。" -touch "$PLEX_LIBRARY_PATH/请不要在此处放置任何媒体文件。" - -# Set the ACLs to standard -synoacltool -del $PLEX_LIBRARY_PATH -synoacltool -add $PLEX_LIBRARY_PATH group:administrators:allow:rwxpdDaARWc--:fd-- -synoacltool -add $PLEX_LIBRARY_PATH user:admin:allow:rwxpdDaARWc:fd-- -synoacltool -add $PLEX_LIBRARY_PATH user:plex:allow:rwxpdDaARWcCo:fd-- - -# Verify the Plex share is visible from File Station for those with access permission -synoshare --setbrowse Plex 1 - -# Create temp transcoding and "Plex Media Server" directories if required -if [ ! -d $PLEX_LIBRARY_PATH/tmp_transcoding ]; then - mkdir $PLEX_LIBRARY_PATH/tmp_transcoding -fi - -if [ ! -d "$PLEX_LIBRARY_PATH/Library/Application Support/Plex Media Server" ]; then - mkdir -p "$PLEX_LIBRARY_PATH/Library/Application Support/Plex Media Server" -fi - -# Are Ownership corrections needed? ( We will do this normally when first creating the share. ) -FixOwner=0; - -# If plex:users is not the current owner of Library, make it so -[ -d $PLEX_LIBRARY_PATH/Library ] && [ "$(stat -c %U $PLEX_LIBRARY_PATH/Library)" != "plex" ] && FixOwner=1 -[ -d $PLEX_LIBRARY_PATH/Library ] && [ "$(stat -c %G $PLEX_LIBRARY_PATH/Library)" != "users" ] && FixOwner=1 - -[ -d $PLEX_LIBRARY_PATH/tmp_transcoding ] && [ "$(stat -c %U $PLEX_LIBRARY_PATH/tmp_transcoding)" != "plex" ] && FixOwner=1 -[ -d $PLEX_LIBRARY_PATH/tmp_transcoding ] && [ "$(stat -c %G $PLEX_LIBRARY_PATH/tmp_transcoding)" != "users" ] && FixOwner=1 - -# Do we need set owner & group? -if [ $FixOwner -eq 1 ]; then - - # Fix tmp_transcoding, Preferences.xml and Plug-in Support first - chown -R plex:users $PLEX_LIBRARY_PATH/tmp_transcoding - chown plex:users "$PLEX_LIBRARY_PATH/Library/Application Support/Plex Media Server/Preferences.xml" - chown -R plex:users "$PLEX_LIBRARY_PATH/Library/Application Support/Plex Media Server/Plug-in Support" - - # Now launch the blanket fix-everything - chown -R plex:users $PLEX_LIBRARY_PATH/Library & - - # Give the chown time to execute before starting. (minimize false errors for large libraries) - sleep 3 -fi - - # To handle TV Butler cards and fix HW Transcoding we need to make a Video group and fix some devices. # Setup udev rule for TV Butler device for that specific vendor ID.""".encode( 'utf8'))
def __init__(self): logging.Handler.__init__(self, logging.DEBUG) def emit(self, record): logstr = self.format(record) print logstr patchlog = logging.getLogger("patch") patchlog.handlers = [] patchlog.addHandler(PatchLogHandler()) patch = fromstring("""--- /dev/null +++ b/newfile @@ -0,0 +0,3 @@ +New file1 +New file2 +New file3 """) patch.apply(root=os.getcwd(), strip=0) with open("newfile", "rb") as f: newfile = f.read() assert "New file1\nNew file2\nNew file3\n" == newfile patch = fromstring("""--- a/newfile +++ /dev/null @@ -0,3 +0,0 @@ -New file1 -New file2
def execute(self, phase, patch_uri, build_args, **state): # Build our environment variables env = dict(os.environ) # Merge in extra env if 'env' in build_args: env.update(build_args['env']) # We need the actual source path to actually apply a patch if 'source' in state: src_path = state['source'] base_src = os.path.basename(src_path) status_path = os.path.join(state['patches_dir'], '{}.json'.format(base_src)) logs_path = os.path.join(state['logs_dir'], base_src) else: raise Exception('Unable to apply a patch without a source directory') # Load the status file status = get_status(status_path) if phase not in status or not status[phase]: os.makedirs(logs_path, exist_ok=True) with open(os.path.join(logs_path, '{}_{}.log'.format(base_src, phase)), 'w') as logfile: if is_sequence(patch_uri): patch_uris = patch_uri else: patch_uris = [patch_uri] errors = False for patch_uri in patch_uris: try: if patch_uri.startswith(('http', 'ftp')): cprint( indent('Applying patch from URL to "{}"'.format(base_src), 8), 'white', attrs=['bold'] ) logfile.write('Applying patch from URL "{}" to "{}"\n'.format(patch_uri, base_src)) pset = patch.fromurl(patch_uri) elif os.path.exists(patch_uri): cprint( indent('Applying patch from local file to "{}"'.format(base_src), 8), 'white', attrs=['bold'] ) logfile.write( 'Applying patch from local file "{}" to "{}"\n'.format(patch_uri, base_src) ) pset = patch.fromfile(patch_uri) else: cprint( indent('Applying patch from string to "{}"'.format(base_src), 8), 'white', attrs=['bold'] ) logfile.write('Applying patch from string "{}" to "{}"\n'.format(patch_uri, base_src)) # Strings have to be bytes encoded pset = patch.fromstring(patch_uri.encode('utf-8')) except: pset = False pass if not pset: errors = True cprint(indent('Failed to load patch "{}"'.format(patch_uri), 8), 'red', attrs=['bold']) root_folder = build_args.get('patch_root', src_path) patch.streamhandler = patch.logging.StreamHandler(stream=logfile) patch.setdebug() if pset and not pset.apply(root=root_folder): errors = True cprint( indent('Failed to apply patch "{}" to "{}"'.format(patch_uri, base_src), 8), 'red', attrs=['bold'] ) if errors: raise Exception('{} step for {} failed to apply patches'.format(phase, base_src)) else: status = update_status(status_path, {phase: True}) else: cprint( indent('{} step for {} complete... Skipping...'.format(phase, base_src), 8), 'yellow', attrs=['bold'] )
def make_assertions(commit_hash, fingerprint, parents, patch, diff_lines): errors = {} # no POW for merge. if len(parents) == 0: if fingerprint not in DEVELOPER_FINGERPRINTS: errors['genesis'] = "orphan commit is not signed properly" # If this is a valid signed commit, skip everythign else. if fingerprint: if fingerprint in DEVELOPER_FINGERPRINTS: return None errors['fingerprint'] = "Fingerprint: %s not in it %s" % (fingerprint, DEVELOPER_FINGERPRINTS) if not (commit_hash and re.match('^[0-9a-f]{40}$', commit_hash)): error['commit'] = "invalid commit hash %s" % commit_hash elif len(parents) == 1: # if not commit_hash[0] == '0': diff_string = ''.join(diff_lines) diff_size = len(diff_string) + 1 # `+ 1` to match `wc -c` if not (int((30.0 / (diff_size)) * 1461501637330902918203684832716283019655932542975) > int(commit_hash, 16)): errors['pow'] = "Needs more work. Diff size: %s" % (diff_size,) if len(patch) > 1: errors["files"] = "too many files commited" if len(patch.removed_files) != 0: errors["files-removed"] = "too many files removed" if len(patch.added_files) != 1: errors["files-added"] = "only one file can be added: %s" % (patch.added_files) try: diff_string.decode('utf-8') except UnicodeDecodeError as e: errors['utf8'] = str(e) # TODO: implement gpg key distribution # if not fingerprint: # errors['fingerprint'] = 'missing fingerprint on commit: %s' % (commit_hash, ) else: if len(diff_lines) > 0: errors['merge'] = 'merge commit should have no conflicts or changes: %s', (commit_hash, ) # So we know that there is only a single new file that is being validated here. # Now extract the data without the unifieddiff meta data and hash it to match the # filename. patch = pypatch.fromstring(''.join(diff_lines)) for p in patch.items: # Lol. # new_file = str('\n'.join('\n'.join(str(l)[1:] for l in h) # for h in patched_file)) if p.source == 'dev/null': # no access to the real /dev/nul original_file = StringIO.StringIO() else: original_file = open(p.source) new_file = ''.join(list(patch.patch_stream(original_file, p.hunks))) if diff_lines[-1] == '\ No newline at end of file': new_file = new_file[:-1] # Validate data structure now #data = None # try: # data = bencode.bdecode(raw_data) # except ValueError as e: # raise ValidationError(e) # schema = Schema({ # Required('parent_sha1'): Any("", All(str, Length(min=40, max=40))), # Required('data'): Any(dict, str) #}) # Validate it! # schema(data) data_hash = hashlib.sha256(new_file).hexdigest() # Data hash matches file name in the ./data/ directory if p.target.startswith('b/data/'): if p.target.endswith('.json'): target_path = ('data/' + data_hash + '.json') if p.target[2:] != target_path: errors["name"] = 'Target file %s is not named %s, make sure the path and sha1 hexdigest is correct' % (p.target[2:], target_path) schema = Schema({ Required('parent_sha1'): Any("", All(str, Length(min=40, max=40))), Required('data'): Any(dict, str) }) # Validate it! schema(data) else: errors['name'] = "Invalid extension" else: errors["path"] = 'invalid directory. Should be in "data/"' # we need better error messages return errors