def process_workingcopy(): #command = ['hg', 'status', '--subrepos', '-q', '.'] command = ['hg', 'status', '--subrepos', '-q', '-C', '.'] output = subprocess.Popen( command, stdout=subprocess.PIPE).communicate()[0].decode("utf-8") lines = fixup_renames(output.split('\n')) # decorate entries based on any staging information orphaned_tag = '^' orphaned_count = 0 root = find_hg_root() stage_path = Stage.StageIO().get_staging_root(root, options) if os.path.exists(stage_path): stage_names = os.listdir(stage_path) # if len(stage_names) and len(lines) == 0: # msg = 'ERROR: Orphaned staged entries found in the following areas:\n' # for stage_name in stage_names: # msg += ' [%s]\n' % stage_name # msg += '\nUse "unstage --erase" to clear them.' # self.message = msg # return False for stage_name in stage_names: # reference_count = 0 # capture_count = 0 stage_db_path = os.path.join(stage_path, stage_name) stage_db_file = os.path.join(stage_db_path, 'stage.db') if not os.path.exists(stage_db_file): continue # odd... should probably print a message stage_db = Stage.StageIO().load_stage_db(stage_db_file) # (yes, yes, I know...quadratic complexity: I don't care :) for key in stage_db: staged_entry = stage_db[key] found = False for i in range(len(lines)): if key == lines[i][2:]: # reference_count += 1 if staged_entry.snapshot is None else 0 # capture_count += 1 if staged_entry.snapshot is not None else 0 snap = Stage.StageIO().get_staged_entry_tag( stage_db_path, staged_entry, key) lines[i] = '%s [%s] %s (%s)' % ( lines[i][:1], stage_name, key, snap) found = True break if not found: snap = Stage.StageIO().get_staged_entry_tag( stage_db_path, staged_entry, key) # if this is a refernce, it's orphaned orphaned = '' if staged_entry.snapshot is None: # reference_count += 1 orphaned = orphaned_tag orphaned_count += 1 lines.append('%s [%s] %s%s (%s)' % (staged_entry.state, stage_name, orphaned, key, snap)) self.process_lines(lines, options) if orphaned_count != 0: print( '\n(Use the "staged" command to purge %sorphaned references)' % orphaned_tag) return True
def __init__(self, options): if not options.branch: return working_dir = os.getcwd() staged_entries = Staged().get_staged_entries(options) if len(staged_entries) and len(options.args): print("ERROR: You have staged entries pending; those must be committed or cleared.", file=sys.stderr) sys.exit(1) if len(staged_entries) > 1: print("ERROR: You may only commit staged modifications from one area at a time.", file=sys.stderr) sys.exit(1) if (options.stage_name is not None) and (options.stage_name not in staged_entries): print('ERROR: You have specified a staging area ("%s") that does not exist.' % options.stage_name, file=sys.stderr) sys.exit(1) output = [] root = options.working_dir stage_db = {} if len(staged_entries): # staged entries are all relative to the root of the working copy # so we need to put ourselves there... root = find_hg_root() if root is None: print("ERROR: Could not find the root of the working copy.", file=sys.stderr) sys.exit(1) # need root before we alter it stage_name = staged_entries.keys()[0] stage_path = StageIO().get_staging_root(root, options) stage_db_path = os.path.join(stage_path, stage_name) stage_db_file = os.path.join(stage_db_path, 'stage.db') stage_db = StageIO().load_stage_db(stage_db_file) lines = stage_db.keys() os.chdir(root) os.chdir("..") root = os.getcwd() else: command = ['hg', 'status', '-q', '.'] output = subprocess.Popen(command, stdout=subprocess.PIPE).communicate()[0].decode("utf-8") lines = output.split('\n') if len(options.args): newlines = [] # they are specifying files to be committed...filter 'lines' # based on them for item in options.args: found = -1 for i in range(len(lines)): if item in lines[i]: newlines.append(lines[i]) break if len(newlines) == 0: print("Your specified filter(s) did not match any pending changes in the working copy.", file=sys.stderr) sys.exit(1) lines = newlines all_comments = {} batch_text = '' if os.name == 'nt': batch_text = '@echo off\n' batch_text += 'set FG=%_fg\n' batch_text += 'set BG=%_bg\n' files_to_commit = ['.'] if len(staged_entries) or len(options.args): files_to_commit = [] snapshot_backups = False for line in lines: line = line.strip() if not len(line): continue if len(stage_db): status = stage_db[line].state filename = line else: status = line[0] filename = line[2:] staged_entry = None if len(stage_db): staged_entry = stage_db[filename] if staged_entry.state != status: # must be the same state; abort print('ERROR: Staged version of "%s" has different state (%s != %s).' (filename, staged_entry.state, status), file=sys.stderr) sys.exit(1) full_path = os.path.join(root, filename) else: full_path = os.path.join(options.working_dir, filename) if not os.path.exists(full_path): print('WARNING: Skipping non-existent file "%s".' % full_path, file=sys.stderr) continue # if this is a referenced entry, then no additional processing is # required; a snapshot, however, requires some more complicated # heuristics... if len(stage_db) and (staged_entry.snapshot is not None): # staged snapshot heuristics: # 1. if the timestamps are equal, the snapshot becomes a reference snapshot_path = os.path.join(stage_db_path, staged_entry.snapshot) snapshot_stat = os.stat(snapshot_path) source_stat = os.stat(full_path) if int(snapshot_stat.st_mtime) == int(source_stat.st_mtime): # the current state of the source version is identical # to the snapshot, so that is what will be committed # (reference behavior) pass else: # 2. if timestamps differ, then the snapshot becomes the commit target # a. a backup of the source file is made and then it is reverted # b. the source backup uses the snapshot's filename with a ".bak" extension # c. the snapshot contents replace the source file using the snapshot's state # d. committing proceeds as though the staged file is a reference # e. when committing is complete, the source's backup and state are restored snapshot_bak_path = os.path.join(stage_db_path, '%s.bak' % staged_entry.snapshot) try: shutil.copy2(full_path, snapshot_bak_path) except: print('ERROR: Backup of "%s" could created for version in "%s" staging area..' % (filename, staged_entries.keys()[0]), file=sys.stderr) sys.exit(1) try: shutil.copy2(snapshot_path, full_path) except: print('ERROR: Staged version of "%s" in "%s" staging area could placed for comitting.' (filename, staged_entries.keys()[0]), file=sys.stderr) sys.exit(1) snapshot_backups = True comments = None if ((status == 'M') or (status == 'A')): if len(stage_db) or len(options.args): files_to_commit.append(filename) if (options.log_file is None) and (options.commit_message is None) and is_valid(filename): try: comments = extract_comments(full_path, display=DISPLAY_COMMENT) except Exception as e: os.chdir(working_dir) print(str(e), file=sys.stderr) comments = None stage_prefix = '' if len(stage_db): stage_prefix = '[%s] ' % (options.stage_name if options.stage_name is not None else staged_entries.keys()[0]) if options.ansi_color: if options.ansi_color_requires_batch: batch_text += 'echo %s\033[1;33m%s\n' % (stage_prefix, filename) else: print('%s\033[1;33m%s' % (stage_prefix, filename)) else: print('%s%s' % (stage_prefix, line)) if comments: all_comments[filename] = comments #all_comments.append('[ %s ]' % file) #all_comments += comments if options.ansi_color: if options.ansi_color_requires_batch: if os.name == 'nt': batch_text += 'color %FG on %BG\n' open(options.batch_file_name, 'w').write(batch_text) os.system(options.batch_file_name) comment_count = len(all_comments.keys()) if comment_count > 0: comment_keys = all_comments.keys() comment_keys.sort() with open(options.batch_file_name, 'w') as f: for key in comment_keys: if comment_count > 1: f.write('[ %s ]\n' % key) f.write('\n'.join(all_comments[key])) f.write('\n') if 'PYHG_COMMENT_EDITOR' in os.environ: subprocess.call([os.environ['PYHG_COMMENT_EDITOR'], options.batch_file_name]) else: if os.name == 'nt': # probably not the best editor, but it's known subprocess.call([r'C:\Windows\System32\notepad.exe', options.batch_file_name]) else: # same here (although vi is MY personal favorite :) subprocess.call(['vi', options.batch_file_name]) if options.log_file is not None: # ensure the log file text is wrapped at specific column offsets log_lines = wrap_lines(options.log_file, options.wrap_at) if options.auth_token is not None: log_lines.append('(%s)' % options.auth_token) # write it out to the 'batch_file_name' target open(options.batch_file_name, 'w').write('\n'.join(log_lines)) # set a flag to trip the log version of the HG command all_comments[options.log_file] = True elif options.commit_message is not None: # ensure the log file text is wrapped at specific column offsets log_lines = wrap_lines(options.commit_message, options.wrap_at) if options.auth_token is not None: log_lines.append('(%s)' % options.auth_token) # write it out to the 'batch_file_name' target open(options.batch_file_name, 'w').write('\n'.join(log_lines)) # set a flag to trip the log version of the HG command all_comments[options.commit_message] = True try: input('Press ENTER when ready to commit (press Ctrl-C to abort):') except SyntaxError: pass if len(stage_db): assert len(files_to_commit) == len(stage_db), "Staged files have been missed in the commit!" if len(all_comments): command = ['hg', 'commit', '-l', options.batch_file_name] + files_to_commit else: command = ['hg', 'commit'] + files_to_commit if len(stage_db): output = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=root).communicate()[0].decode("utf-8") else: output = subprocess.Popen(command, stdout=subprocess.PIPE).communicate()[0].decode("utf-8") first_line = True lines = output.split('\n') for line in lines: line = line.strip() if first_line and len(line) == 0: continue print(line) first_line = False if options.push_changes: options.args = [] if options.push_external: options.args = ["extern"] Push(options) # put the comment text on the system clipboard (if available) if len(all_comments): comment_text = open(options.batch_file_name).readlines() try: pyperclip.copy(comment_text) except: pass os.chdir(working_dir) # if we committed from staged files, remove the area if len(stage_db): if snapshot_backups: # we need to restore snapshot backups to their proper place # in the repository before we blow away the staging area for key in stage_db: entry = stage_db[key] if entry.snapshot is not None: snapshot_bak_path = os.path.join(stage_db_path, '%s.bak' % entry.snapshot) if os.path.exists(snapshot_bak_path): # restore the contents of this file shutil.copy2(snapshot_bak_path, key) # if state is 'M', then the copy itself will set it if entry.state == 'A': # execute an add on the file command = ['hg', 'add', key] output = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=root).communicate()[0].decode("utf-8") if len(output.strip()) != 0: print('ERROR: Failed to restore snapshot backup for entry "%s" in the "%s" staging area.' % (key, staged_entries.keys()[0]), file=sys.stderr) sys.exit(1) shutil.rmtree(stage_db_path)
def get_staged_entries(self, options): working_dir = os.getcwd() root = find_hg_root() if root: os.chdir(root) os.chdir("..") if not os.path.exists('.hg'): os.chdir(working_dir) self.message = 'ERROR: Must be in root of working copy to stage.' return [] command = ['hg', 'status', '-q', '-C', '.'] output = subprocess.Popen( command, stdout=subprocess.PIPE).communicate()[0].decode("utf-8") output_lines = fixup_renames(output.split('\n')) stage_name = options.stage_name self.stage_path = super(Staged, self).get_staging_root(root, options) if not os.path.exists(self.stage_path): os.mkdir(self.stage_path) stage_names = [] if stage_name is None: # gather up all staging area names for entry in os.listdir(self.stage_path): stage_names.append(entry) else: stage_names.append(stage_name) staged_entries = {} for stage_name in stage_names: stage_db_path = os.path.join(self.stage_path, stage_name) stage_db_file = os.path.join(stage_db_path, 'stage.db') if not os.path.exists(stage_db_file): continue # odd... should probably print a message stage_db = super(Staged, self).load_stage_db(stage_db_file) reference_count = 0 capture_count = 0 entries = [] bad_keys = [] for key in stage_db: staged_entry = stage_db[key] reference_count += 1 if staged_entry.snapshot is None else 0 capture_count += 1 if staged_entry.snapshot is not None else 0 found = False for line in output_lines: if (key in line) or (staged_entry.snapshot is not None): snapshot_path = None snap = super(Staged, self).get_staged_entry_tag( stage_db_path, staged_entry, key) entries.append('%s (%s)' % (line, snap)) found = True break if not found: if staged_entry.snapshot is None: bad_keys.append(key) else: # note: snapshots are independent of the state of their source files snap = super(Staged, self).get_staged_entry_tag( stage_db_path, staged_entry, key) entries.append('%s %s (%s)' % (staged_entry.state, key, snap)) if len(bad_keys): # all 'bad_keys' are references for key in bad_keys: del stage_db[key] if len(stage_db): # save the corrected database super(Staged, self).save_stage_db(stage_db, stage_db_file) staged_entries[stage_name] = entries else: if (len(output_lines) == 0) and (reference_count != 0) and (capture_count == 0): print('WARNING: Purging orphaned staging area "%s".' % stage_name) # if (len(output_lines) == 0) and (reference_count != 0) and (capture_count == 0): # # orphaned references found # os.chdir(working_dir) # msg = 'ERROR: Orphaned reference entries found in the following staging areas:\n' # for stage_name in stage_names: # msg += ' [%s]\n' % stage_name # msg += '\nUse "unstage --erase" to clear them.' # self.message = msg # return [] if os.path.exists(stage_db_path): shutil.rmtree(stage_db_path) os.chdir(working_dir) return staged_entries
def execute(self, options, quiet=False, **kwargs): if not options.branch: self.message = 'ERROR: Could not determine branch.' return False working_dir = os.getcwd() root = find_hg_root() if root: os.chdir(root) os.chdir("..") if not os.path.exists('.hg'): os.chdir(working_dir) self.message = 'ERROR: Must be in root of working copy to use the switch command.' return False if len(options.args) == 0: os.chdir(working_dir) self.message = 'ERROR: A target branch name must be specified.' return False self.mb_root = find_mb_root() # this will not return if we can't find a working location self.mb_switch = os.path.join(self.mb_root, 'switch') if not os.path.exists(self.mb_switch): try: os.mkdir(self.mb_switch) except: self.message = 'ERROR: Could not create folder "%s".' % self.mb_switch return False target_branch = options.args[0] # validate the target branch name output = subprocess.Popen(['hg', 'branches'], stdout=subprocess.PIPE).communicate()[0].decode("utf-8") found = False for line in output.split('\n'): data = re.search('([a-zA-Z0-9\.]+)\s+', line) if not data: continue if target_branch == data.group(1): found = True break if not found: os.chdir(working_dir) self.message = 'ERROR: The specified target branch "%s" cannot be validated.' % target_branch return False # are there any pending changes? output = subprocess.Popen(['hg', 'status', '--quiet'], stdout=subprocess.PIPE).communicate()[0].decode("utf-8") if len(output): # shelve the changes if not self.shelve(options): os.chdir(working_dir) return False # now the easy part... if subprocess.call(['hg', 'update', target_branch]) != 0: self.message = 'ERROR: Switching to the target branch "%s" failed.' % target_branch # ok, restore the shelved work above, if any if not self.restore(options): # whoa..everything's going to Hell in a handbasket... self.message = '%s\nERROR: Failed to restore the shelved microbranch for current branch "%s".' % options.branch return False # apply any cached micro-branch work options.args[0] = target_branch # make sure we overwrite things options.overwrite = True if not self.restore(options): print(self.message, file=sys.stderr) self.message = 'ERROR: Failed to apply cached micro-branch to branch "%s".' % target_branch return False # if restore() is successful, any cached data for the target # branch is deleted # we're done! os.chdir(working_dir) return True
def __init__(self, options): super(Stage, self).__init__() def generate_snapshot(options, stage_db_path, file_path, entry): if (entry is None) or (entry.snapshot is None): if (not options.snapshot) or (not os.path.exists(file_path)): return StageEntry(None, entry.state) if entry.state != 'M': print( "ERROR: Only (M)odified files can be captured by snapshot.", file=sys.stderr) sys.exit(1) ss = entry.snapshot if ss is None: ss = str(uuid.uuid4()).replace('-', '') snapshot_file_name = os.path.join(stage_db_path, ss) if os.path.exists(snapshot_file_name): os.remove(snapshot_file_name) # we are refreshing the snapshot if not os.path.exists(stage_db_path): os.mkdir(stage_db_path) # use copy2() to make sure the snapshot shares the timestamp of # the source file at the time of creation shutil.copy2(file_path, snapshot_file_name) return StageEntry(ss, entry.state) if not options.branch: print('ERROR: Could not determine branch.', file=sys.stderr) sys.exit(1) root = find_hg_root() if root: os.chdir(root) os.chdir("..") if not os.path.exists('.hg'): os.chdir(working_dir) print('ERROR: Must be in root of working copy to stage.', file=sys.stderr) sys.exit(1) stage_name = "default" if options.stage_name is None else options.stage_name stage_db = {} stage_path = super(Stage, self).get_staging_root(root, options) if not os.path.exists(stage_path): os.mkdir(stage_path) stage_db_path = os.path.join(stage_path, stage_name) stage_db_file = os.path.join(stage_db_path, 'stage.db') if os.path.exists(stage_db_file): if options.erase_cache: print('All staged entries in "%s" cleared.' % stage_name) shutil.rmtree(stage_db_path) else: stage_db = super(Stage, self).load_stage_db(stage_db_file) if options.erase_cache: return # nothing else to do if (len(options.args) == 0): capture_count = 0 for key in stage_db: if stage_db[key].snapshot is not None: capture_count += 1 if capture_count: try: print( 'You are about to refresh snapshot entries in the "%s" staging area.' % stage_name) input( 'Press ENTER if this is the intent (or press Ctrl-C to abort):' ) except SyntaxError: pass command = ['hg', 'status', '-q', '-C', '.'] output = subprocess.Popen( command, stdout=subprocess.PIPE).communicate()[0].decode("utf-8") output_lines = fixup_renames(output.split('\n')) lines = [] if len(options.args): newlines = [] # they are specifying files to be staged...filter 'lines' # based on them for item in options.args: found = -1 for i in range(len(output_lines)): if item in output_lines[i]: newlines.append(output_lines[i][2:]) break if len(newlines) == 0: print( "ERROR: Your specified filter(s) did not match any pending changes in the working copy.", file=sys.stderr) sys.exit(1) lines = newlines else: # strip off the status bit for i in range(len(output_lines)): lines.append(output_lines[i][2:]) if len(lines) == 0: print("ERROR: No files have been selected for staging.", file=sys.stderr) sys.exit(1) # filter out duplicate entries status_db = {} for path in output_lines: key = path[2:].strip() status_db[key] = StageEntry(None, path[:1]) added_files = [] refreshed_files = [] # all the files in lines[] are to be added to the staging database for path in lines: if path in stage_db: stage_db[path] = generate_snapshot(options, stage_db_path, path, stage_db[path]) refreshed_files.append('%s %s' % (stage_db[path].state, path)) else: for l in output_lines: if path in l: added_files.append(l) break entry = StageEntry() if path in status_db: entry = status_db[path] stage_db[path] = generate_snapshot(options, stage_db_path, path, entry) bad_keys = [] for key in stage_db: if key not in status_db: # this file had been staged, but is now no longer modified bad_keys.append(key) for key in bad_keys: if stage_db[key].snapshot is not None: # it's a snapshot, delete it as well snapshot_file = os.path.join(stage_db_path, stage_db[key].snapshot) os.remove(snapshot_file) del status_db[key] # save the new database super(Stage, self).save_stage_db(stage_db, stage_db_file) if len(added_files) or len(refreshed_files): s = Info.Status() if len(added_files): print( 'The following new %s entries were added to the "%s" staging area:' % ('snapshot' if options.snapshot else 'reference', stage_name)) s.process_lines(added_files, options) if len(refreshed_files): print( 'The following snapshot entries were refreshed in the "%s" staging area:' % stage_name) s.process_lines(refreshed_files, options) else: print('No unique entries were added to the "%s" staging area.' % stage_name)
def __init__(self, options): super(Unstage, self).__init__() if not options.branch: print('ERROR: Could not determine branch.', file=sys.stderr) sys.exit(1) root = find_hg_root() if root: os.chdir(root) os.chdir("..") if not os.path.exists('.hg'): os.chdir(working_dir) print('ERROR: Must be in root of working copy to stage.', file=sys.stderr) sys.exit(1) stage_name = "default" if options.stage_name is None else options.stage_name stage_path = super(Unstage, self).get_staging_root(root, options) if not os.path.exists(stage_path): os.mkdir(stage_path) stage_db_path = os.path.join(stage_path, stage_name) stage_db_file = os.path.join(stage_db_path, 'stage.db') if not os.path.exists(stage_db_path): if options.erase_cache: return # nothing more to do print( 'ERROR: No modifications are currently staged in "%s" for committing.' % stage_name, file=sys.stderr) sys.exit(1) if options.erase_cache: print('All entries in the "%s" staging area were cleared.' % stage_name) shutil.rmtree(stage_db_path) return if len(options.args) == 0: print('ERROR: No filter(s) specified for unstaging.', file=sys.stderr) sys.exit(1) stage_db = {} if os.path.exists(stage_db_file): stage_db = super(Unstage, self).load_stage_db(stage_db_file) command = ['hg', 'status', '-q', '-C', '.'] output = subprocess.Popen( command, stdout=subprocess.PIPE).communicate()[0].decode("utf-8") output_lines = fixup_renames(output.split('\n')) bad_keys = [] for key in stage_db: for item in options.args: if item in key: bad_keys.append(key) if len(bad_keys) == 0: print( 'ERROR: Your specified filter(s) did not match any entries in the "%s" staging area.' % stage_name, file=sys.stderr) sys.exit(1) unstaged_entries = [] for key in bad_keys: for line in output_lines: if key in line: unstaged_entries.append(line) for key in bad_keys: if stage_db[key].snapshot is not None: # it's a snapshot, delete it as well snapshot_file = os.path.join(stage_db_path, stage_db[key].snapshot) os.remove(snapshot_file) del stage_db[key] if len(stage_db) == 0: if os.path.exists(stage_db_path): shutil.rmtree(stage_db_path) else: # save the new database super(Unstage, self).save_stage_db(stage_db, stage_db_file) if len(unstaged_entries): print( 'The following existing entries were removed from the "%s" staging area:' % stage_name) s = Info.Status() s.process_lines(unstaged_entries, options) else: print( 'No unique entries were removed from the "%s" staging area.' % stage_name)
def __init__(self, options): if not options.branch: return working_dir = os.getcwd() root = find_hg_root() if root: # set working directory to the top of the working copy os.chdir(root) os.chdir("..") if not os.path.exists('.hg'): os.chdir(working_dir) print("ERROR: Must be in root of working copy to shelf.", file=sys.stderr) sys.exit(1) shelf_name = 'shelf' if len(options.shelf_name): shelf_name = options.shelf_name shelf_name_unquoted = shelf_name shelf_name = quote(shelf_name, '') root = find_mb_root( ) # this will not return if we can't find a working location manifest_version = 0 manifest = [] manifest_name = os.path.join(root, '%s.manifest' % shelf_name) manifest_archive = os.path.join(root, '%s.7z' % shelf_name) manifest_comment = '' if not os.path.exists(manifest_name): if shelf_name == 'shelf': print('Working copy has no cached default microbranch.') else: print('Cannot access microbranch "%s".' % shelf_name_unquoted) else: manifest_version = 0 manifest_lines = open(manifest_name).readlines() if manifest_lines[0].startswith('version '): manifest_version = int(manifest_lines[0][8:]) del manifest_lines[0] if manifest_version >= 1: manifest_comment = manifest_lines[1].rstrip() del manifest_lines[0] else: manifest_comment = manifest_lines[0].rstrip() del manifest_lines[0] conflict_count = 0 for line in manifest_lines: line = line.rstrip() if len(line) == 0: continue status, file_name, previous_key = line.split( '?' ) # 'previous_key' will be an md5 hash starting with MANIFEST_VERSION 2 if status == 'M': if os.name == 'nt': file_name = file_name.replace('/', '\\') else: file_name = file_name.replace('\\', '/') files_are_equal = False # see if the file is unchanged by the merge; in that case, just copy it if manifest_version <= 1: new_key = get_changeset_for(options, file_name) if not new_key: print( "ERROR: Failed to determine changeset for file '%s'!" % file_name, file=sys.stderr) os.chdir(working_dir) sys.exit(1) old_crc32 = crc32( os.path.join(working_folder, file_name)) new_crc32 = crc32(file_name) files_are_equal = (previous_key == new_key) and (old_crc32 == new_crc32) else: new_key = hashlib.md5(open(file_name, 'rb').read()).hexdigest() files_are_equal = new_key == previous_key if not files_are_equal: if conflict_count == 0: print( 'Intervening differences detected for the following "%s" microbranch assets:' % (shelf_name_unquoted if shelf_name_unquoted != "shelf" else "default")) print('\t', file_name) conflict_count += 1 if conflict_count == 0: print( 'No intervening differences detected for "%s" microbranch.' % (shelf_name_unquoted if shelf_name_unquoted != "shelf" else "default")) os.chdir(working_dir)
def execute(self, options, quiet=False, **kwargs): if not options.branch: self.message = 'ERROR: Could not determine branch.' return False working_dir = os.getcwd() root = find_hg_root() if root: os.chdir(root) os.chdir("..") stage_path = StageIO().get_staging_root(root, options) if os.path.exists(stage_path): stages = os.listdir(stage_path) stage_path = os.path.join(".hg", "stage") if len(stages) != 0 else None else: stage_path = None if not os.path.exists('.hg'): os.chdir(working_dir) self.message = 'ERROR: Must be in root of working copy to shelf.' return False command = ['hg', 'status', '-q', '-C'] if (options.include_filter is None) and (len(options.exclude_filter) == 0): command.append(options.use_path) output = subprocess.Popen( command, stdout=subprocess.PIPE).communicate()[0].decode("utf-8") if len(output) > 0: lines = fixup_renames(output.split('\n')) shelf_name = 'shelf' if len(options.shelf_name) != 0: shelf_name = options.shelf_name shelf_name_unquoted = shelf_name shelf_name = quote(shelf_name, '') if 'mb_root' in kwargs: root = kwargs['mb_root'] else: root = find_mb_root( ) # this will not return if we can't find a working location manifest_version = 0 manifest = [] manifest_name = os.path.join(root, '%s.manifest' % shelf_name) manifest_archive = os.path.join(root, '%s.7z' % shelf_name) manifest_comment = '' timestamp = hex(int(time.time()))[2:] if os.path.exists(manifest_name): # grab the previous comment manifest_lines = open(manifest_name).readlines() if manifest_lines[0].startswith('version '): manifest_version = int(manifest_lines[0][8:]) if manifest_version >= 1: manifest_comment = manifest_lines[1].rstrip() else: manifest_comment = manifest_lines[0].rstrip() manifest_lines = None try: os.rename(manifest_name, '%s.%s' % (manifest_name, timestamp)) except: os.chdir(working_dir) self.message = 'ERROR: Could not back up previous shelf.' return False if os.path.exists(manifest_archive): try: os.rename(manifest_archive, '%s.%s' % (manifest_archive, timestamp)) except: os.chdir(working_dir) self.message = 'ERROR: Could not back up previous shelf.' return False if len(options.comment): manifest_comment = options.comment shelve_command = [ options.seven_zip, 'a', manifest_archive, '@%s.list' % shelf_name ] for line in lines: line = line.strip() if not len(line): continue if (options.include_filter is not None) or len( options.exclude_filter): if options.include_filter is not None: if options.include_filter in line: manifest.append(line) if len(options.exclude_filter): exclude = [ f for f in options.exclude_filter if f in line ] if len(exclude) == 0: manifest.append(line) else: manifest.append(line) if options.ide_state: # find all the .suo files and add them to the archive # (the Visual Studio .suo file maintains a record of all files that # were last open in the IDE) if os.path.exists('.vs'): # VS2017+ for root, dirs, files in os.walk('.vs'): if '.suo' in files: manifest.append('X %s' % os.path.join(root, '.suo')) options.extra_files.append( os.path.join(root, '.suo')) else: # +VS2013 files = glob.glob('*.suo') if len(files): options.extra_files += files for file in files: manifest.append('X %s' % file) lines_written = 0 with open('%s.list' % shelf_name, 'w') as f: if stage_path is not None: f.write('%s\n' % stage_path) # capture current staging metadata if isinstance(options.extra_files, (list, tuple)) and len(options.extra_files): # should be a path relative to the root of the working copy f.write('%s\n' % '\n'.join(options.extra_files)) for line in manifest: action = line[0] if action == 'M' or action == 'A': f.write('%s\n' % line[2:]) lines_written += 1 if action == 'V': # rename; place the current file into the backup in case it holds changes f.write('%s\n' % line[2:].split(',')[1]) lines_written += 1 if lines_written: output = subprocess.Popen( shelve_command, stdout=subprocess.PIPE).communicate()[0].decode("utf-8") lines = output.split('\n') something_went_wrong = True for line in lines: line = line.rstrip() if line == 'Everything is Ok': something_went_wrong = False break if something_went_wrong: os.chdir(working_dir) self.message = 'ERROR: Failed to construct shelf archive:\n%s' % output return False os.remove('%s.list' % shelf_name) if (options.include_filter is not None) or len( options.exclude_filter): for line in manifest: action = line[0] filename = line[2:] if action == 'V': filename = line[2:].split(',')[1] command = ['hg', 'revert', filename] output = subprocess.Popen( command, stdout=subprocess.PIPE).communicate()[0].decode( "utf-8") elif not options.no_revert: command = ['hg', 'revert', '--all'] if options.use_path != '.': command.append(options.use_path) output = subprocess.Popen( command, stdout=subprocess.PIPE).communicate()[0].decode("utf-8") if stage_path is not None: shutil.rmtree( stage_path) # remove current staging metadata with open(manifest_name, 'w') as f: f.write('version %d\n' % MANIFEST_VERSION) f.write('%s\n' % manifest_comment) for line in manifest: action = line[0] if os.name == 'nt': file_name = line[2:].replace('/', '\\') else: file_name = line[2:].replace('\\', '/') #timestamp = 0.0 changeset = '' if action == 'M': changeset = hashlib.md5(open(file_name, 'rb').read()).hexdigest() elif action == 'V': # the revert above may have left the renamed file in place # we are nice, and clean it up for them... from_name, to_name = file_name.split(',') if os.path.exists(to_name): try: os.remove(to_name) except: self.message = 'ERROR: Failed to remove renamed file "%s"!' % to_name return False changeset = hashlib.md5(open(from_name, 'rb').read()).hexdigest() f.write('%s?%s?%s\n' % (action, file_name, changeset)) batch_text = '' if os.name == 'nt': batch_text = '@echo off\n' batch_text += 'set FG=%_fg\n' batch_text += 'set BG=%_bg\n' manifest = colorize_status(manifest) if not quiet: print('Shelved the following state as microbranch "%s":' % shelf_name_unquoted) for line in manifest: if options.ansi_color: if options.ansi_color_requires_batch: batch_text += 'echo %s\n' % line[3] else: print(line[3]) else: print(line[0], line[1]) if options.ansi_color: if options.ansi_color_requires_batch: if os.name == 'nt': batch_text += 'color %FG on %BG\n' open(options.batch_file_name, 'w').write(batch_text) os.system(options.batch_file_name) if options.no_revert: print( '\nAs requested, changes have been left in the working copy.' ) else: if not quiet: print('Nothing to shelve.') os.chdir(working_dir) return True
def execute(self, options, quiet=False, **kwargs): abort_cleanups = [] def abort_cleanup(cleanups): for cleanup in cleanups: cleanup() if not options.branch: self.message = 'ERROR: Could not determine branch.' return False shelf_name = 'shelf' if len(options.shelf_name) != 0: shelf_name = options.shelf_name shelf_name_unquoted = shelf_name shelf_name = quote(shelf_name, '') working_dir = os.getcwd() root = find_hg_root() if root: os.chdir(root) os.chdir("..") if not os.path.exists('.hg'): os.chdir(working_dir) self.message = 'ERROR: Must be in root of working copy to restore.' return False if 'mb_root' in kwargs: self.mb_root = kwargs['mb_root'] else: self.mb_root = find_mb_root( ) # this will not return if we can't find a working location if not os.path.exists( os.path.join(self.mb_root, '%s.manifest' % shelf_name)): os.chdir(working_dir) self.message = 'ERROR: A valid shelf state could not be found.' return False command = ['hg', 'status', '-q', '.'] output = subprocess.Popen( command, stdout=subprocess.PIPE).communicate()[0].decode("utf-8") if len(output) != 0: os.chdir(working_dir) self.message = 'Cannot restore into pending changes.' return False working_folder = os.path.join(tempfile.gettempdir(), '__%s__' % shelf_name) if os.path.exists(working_folder): try: shutil.rmtree(working_folder) except: os.chdir(working_dir) self.message = 'ERROR: Failed to remove remnants of previous restore atttempt.' return False manifest_name = os.path.join(self.mb_root, '%s.manifest' % shelf_name) manifest_archive = os.path.join(self.mb_root, '%s.7z' % shelf_name) if not os.path.exists(manifest_archive): os.chdir(working_dir) self.message = 'ERROR: The specified microbranch "%s" does not exist.' % shelf_name return False restore_command = [ options.seven_zip, 'x', '-o%s' % working_folder, manifest_archive ] output = subprocess.Popen( restore_command, stdout=subprocess.PIPE).communicate()[0].decode("utf-8") lines = output.split('\n') something_went_wrong = True for line in lines: line = line.rstrip() if line == 'Everything is Ok': something_went_wrong = False break if something_went_wrong: os.chdir(working_dir) self.message = 'ERROR: Failed to extract shelf archive:\n%s' % output return False # does this archive contain any staging areas? it won't be in the # archive unless it has staging areas shelved_stage = os.path.join(working_folder, '.hg', 'stage') stage_path = StageIO().get_staging_root(root, options) if os.path.exists(shelved_stage): # ok, check to make sure there isn't one lingering if os.path.exists(stage_path): stage_areas = os.listdir(stage_path) if len(stage_areas) != 0: # we have to abort; cleanup() will remove the working_folder os.chdir(working_dir) self.message = 'ERROR: Active staging areas found; cannot overwrite with shelved version' return False shutil.rmtree(stage_path) shutil.copytree(shelved_stage, stage_path) abort_cleanups.append(lambda: shutil.rmtree(stage_path)) else: shelved_stage = None manifest_version = 0 manifest_lines = open(manifest_name).readlines() if manifest_lines[0].startswith('version '): manifest_version = int(manifest_lines[0][8:]) del manifest_lines[0] if manifest_version >= 1: manifest_comment = manifest_lines[1].rstrip() del manifest_lines[0] else: manifest_comment = manifest_lines[0].rstrip() del manifest_lines[0] abort_cleanups.append( lambda: subprocess.check_output(['hg', 'revert', '--all'])) abort_cleanups.append(lambda: os.chdir(working_dir)) merge_status = {} add_status = {} for line in manifest_lines: line = line.rstrip() status, file_name, previous_key = line.split( '?' ) # 'previous_key' will be an md5 hash starting with MANIFEST_VERSION 2 if os.name == 'nt': file_name = file_name.replace('/', '\\') else: file_name = file_name.replace('\\', '/') if status == 'A': if os.path.exists(file_name): output = subprocess.Popen( ['hg', 'add', file_name], stdout=subprocess.PIPE).communicate()[0].decode( "utf-8") if not len(output): add_status[file_name] = True if not quiet: print('.', end='') else: abort_cleanup(abort_cleanups) self.message = 'ERROR: Failed to restore added file "%s":\n%s\n...aborting restore...' % ( file_name, output) return False else: if not os.path.exists( os.path.join(working_folder, file_name)): add_status[file_name] = False if not quiet: print( "WARNING: Added file '%s' no longer exists; skipping..." % file_name, file=sys.stderr) else: if not make_path(file_name): abort_cleanup(abort_cleanups) self.message = 'ERROR: Failed to recreate path for added file "%s"; aborting restore...' % file_name return False try: shutil.copyfile( os.path.join(working_folder, file_name), file_name) if not quiet: print('.', end='') except: abort_cleanup(abort_cleanups) self.message = 'ERROR: Failed to restore added file "%s"; aborting restore...' % file_name return False output = subprocess.Popen( ['hg', 'add', file_name], stdout=subprocess.PIPE).communicate()[0].decode( "utf-8") if not len(output): add_status[file_name] = True if not quiet: print('.', end='') else: abort_cleanup(abort_cleanups) self.message = 'ERROR: Failed to restore added file "%s":\n%s\n...aborting restore...' % ( file_name, output) return False elif status == 'M': files_are_equal = False # see if the file is unchanged by the merge; in that case, just copy it if manifest_version <= 1: new_key = get_changeset_for(options, file_name) if not new_key: if shelved_stage is not None: shutil.rmtree(stage_path) os.chdir(working_dir) self.message = 'ERROR: Failed to determine changeset for file "%s".' % file_name return False old_crc32 = crc32(os.path.join(working_folder, file_name)) new_crc32 = crc32(file_name) files_are_equal = (previous_key == new_key) and (old_crc32 == new_crc32) else: # make sure the target file hasn't changed since we last shelved new_key = hashlib.md5(open(file_name, 'rb').read()).hexdigest() files_are_equal = new_key == previous_key if options.overwrite or files_are_equal: try: shutil.copyfile( os.path.join(working_folder, file_name), file_name) if not quiet: print('.', end='') except: abort_cleanup(abort_cleanups) self.message = 'ERROR: Failed to restore modified file "%s":\n...aborting restore...' % file_name return False else: # the file has been altered, so we have to merge...see what we have available merge_tool = '' if 'PYHG_MERGE_TOOL' in os.environ: merge_tool = os.environ['PYHG_MERGE_TOOL'] elif os.name == 'nt': if len(options.winmerge): merge_tool = options.winmerge elif len(options.patch): merge_tool = options.patch elif len(options.diff): merge_tool = options.diff if len(merge_tool): previous_key = hashlib.md5( open(os.path.join(working_folder, file_name), 'rb').read()).hexdigest() os.system( '%s "%s" "%s"' % (merge_tool, os.path.join(working_folder, file_name), file_name)) new_key = hashlib.md5( open(os.path.join(working_folder, file_name), 'rb').read()).hexdigest() merge_status[file_name] = (previous_key != new_key) if previous_key != new_key: try: shutil.copyfile( os.path.join(working_folder, file_name), file_name) if not quiet: print('.', end='') except: abort_cleanup(abort_cleanups) self.message = 'ERROR: Failed to restore merged file "%s":\n...aborting restore...' % file_name return False else: print( "WARNING: Skipping '%s'; no merge performed..." % file_name, file=sys.stderr) else: if not quiet: print( "WARNING: Skipping '%s'; no merge solution available..." % file_name, file=sys.stderr) elif status == 'R': if os.path.exists(file_name): output = subprocess.Popen( ['hg', 'remove', file_name], stdout=subprocess.PIPE).communicate()[0].decode( "utf-8") if len(output): abort_cleanup(abort_cleanups) self.message = 'ERROR: Failed to remove file "%s":\n%s\n...aborting restore...' % ( file_name, output) return False else: if not quiet: print('.', end='') elif status == 'V': # rename output = '' from_name, to_name = file_name.split(',') # first, perform a 'move' (i.e., rename) on the existing file if os.path.exists(from_name): output = subprocess.Popen( ['hg', 'mv', from_name, to_name], stdout=subprocess.PIPE).communicate()[0].decode( "utf-8") if len(output): abort_cleanup(abort_cleanups) self.message = 'ERROR: Failed to rename file "%s":\n%s\n...aborting restore...' % ( from_name, output) return False # next, copy the contents of the archived version if it differs files_are_equal = False if manifest_version <= 1: old_crc32 = crc32(os.path.join(working_folder, to_name)) new_crc32 = crc32(to_name) files_are_equal = (previous_key == new_key) and (old_crc32 == new_crc32) else: new_key = hashlib.md5(open(to_name, 'rb').read()).hexdigest() files_are_equal = new_key == previous_key if options.overwrite or files_are_equal: try: shutil.copyfile(os.path.join(working_folder, to_name), to_name) if not quiet: print('.', end='') except: abort_cleanup(abort_cleanups) self.message = 'ERROR: Failed to restore renamed file "%s":\n...aborting restore...' % to_name return False else: # the file has been altered, so we have to merge...see what we have available merge_tool = '' if 'PYHG_MERGE_TOOL' in os.environ: merge_tool = os.environ['PYHG_MERGE_TOOL'] elif os.name == 'nt': if len(options.winmerge): merge_tool = options.winmerge elif len(options.patch): merge_tool = options.patch elif len(options.diff): merge_tool = options.diff if len(merge_tool): previous_key = hashlib.md5( open(os.path.join(working_folder, to_name), 'rb').read()).hexdigest() os.system( '%s "%s" "%s"' % (merge_tool, os.path.join(working_folder, to_name), to_name)) new_key = hashlib.md5( open(os.path.join(working_folder, to_name), 'rb').read()).hexdigest() merge_status[file_name] = (previous_key != new_key) if previous_key != new_key: try: shutil.copyfile( os.path.join(working_folder, to_name), to_name) if not quiet: print('.', end='') except: abort_cleanup(abort_cleanups) self.message = 'ERROR: Failed to restore merged file "%s":\n...aborting restore...' % file_name return False else: print( "WARNING: Skipping '%s'; no merge performed..." % file_name, file=sys.stderr) else: if not quiet: print( "WARNING: Skipping '%s'; no merge solution available..." % file_name, file=sys.stderr) elif status == 'X': # extra file -- just put it back where it was exactly as it was, no additional handling if os.path.exists(file_name): try: os.remove(file_name) except: abort_cleanup(abort_cleanups) self.message = 'ERROR: Failed to remove extra file "%s":\n...aborting restore...' % file_name return False restore_command = [ options.seven_zip, 'x', manifest_archive, file_name ] try: output = subprocess.Popen( restore_command, stdout=subprocess.PIPE).communicate()[0].decode( "utf-8") except: abort_cleanup(abort_cleanups) self.message = 'ERROR: Failed to restore extra file "%s":\n...aborting restore...' % file_name return False something_went_wrong = True for line in output.split('\n'): line = line.rstrip() if line == 'Everything is Ok': something_went_wrong = False break if something_went_wrong: abort_cleanup(abort_cleanups) self.message = 'ERROR: Failed to restore extra file "%s":\n...aborting restore...' % file_name return False if not quiet: print('.', end='') if not quiet: for i in range(len(manifest_lines)): line = manifest_lines[i].rstrip() #status, file_name, timestamp = line.split(':') status, file_name, changeset = line.split('?') manifest_lines[i] = '%s %s' % (status, file_name) if file_name in merge_status: if not merge_status[file_name]: manifest_lines[i] = '? %s' % file_name elif file_name in add_status: if not add_status[file_name]: manifest_lines[i] = '? %s' % file_name batch_text = '' if os.name == 'nt': batch_text = '@echo off\n' batch_text += 'set FG=%_fg\n' batch_text += 'set BG=%_bg\n' manifest_lines = colorize_status(manifest_lines) print('\nRestored the following state from microbranch "%s":' % shelf_name_unquoted) for line in manifest_lines: if options.ansi_color: if options.ansi_color_requires_batch: batch_text += 'echo %s\n' % line[3] else: print(line[3]) else: print(line[0], line[1]) if options.ansi_color: if options.ansi_color_requires_batch: if os.name == 'nt': batch_text += 'color %FG on %BG\n' open(options.batch_file_name, 'w').write(batch_text) os.system(options.batch_file_name) if options.erase_cache: print('Removing cached microbranch "%s".' % shelf_name_unquoted) try: os.remove(manifest_name) os.remove(manifest_archive) except: os.chdir(working_dir) self.message = 'ERROR: Failed to remove cached files.' return False os.chdir(working_dir) return True
def execute(self, options, quiet=False, **kwargs): working_dir = os.getcwd() root = find_hg_root() if root: # set working directory to the top of the working copy os.chdir(root) os.chdir("..") root = find_mb_root( ) # this will not return if we can't find a working location shelf_name = None if len(options.shelf_name) != 0: shelf_name = quote(options.shelf_name, '') import glob files = glob.glob(os.path.join(root, '*.manifest')) if len(files) == 0: if not quiet: print('No microbranches are currently shelved.') else: if not shelf_name: if not quiet: print('The following microbranches are on the shelf:') for file in files: microbranch_name = os.path.basename(file).split('.')[0] if shelf_name and shelf_name != microbranch_name: continue if shelf_name and options.detailed: if not quiet: print( 'Microbranch "%s" caches the following changes:' % options.args[0]) manifest_file = file manifest_lines = open(manifest_file).readlines() if manifest_lines[0].startswith('version '): manifest_version = int(manifest_lines[0][8:]) del manifest_lines[0] if manifest_version == 1: manifest_comment = manifest_lines[0].rstrip() else: manifest_comment = manifest_lines[0].rstrip() del manifest_lines[0] if not quiet: if len(manifest_comment): print(' "%s" (%s)' % (microbranch_name, manifest_comment)) else: print(' "%s"' % microbranch_name) if options.detailed: for line in manifest_lines: line = line.rstrip() items = line.split('?') print(' %s -> %s' % (items[0], items[1])) os.chdir(working_dir) return True
def __init__(self, options): if not options.branch: return working_dir = os.getcwd() root = find_hg_root() if root: # set working directory to the top of the working copy os.chdir(root) os.chdir("..") if not os.path.exists('.hg'): os.chdir(working_dir) print("ERROR: Must be in root of working copy to shelf.", file=sys.stderr) sys.exit(1) shelf_name = 'shelf' if len(options.args): if sys.version_info[0] > 2: shelf_name = urllib.parse.quote(options.args[0], '') else: shelf_name = urllib.quote(options.args[0], '') root = find_mb_root( ) # this will not return if we can't find a working location manifest_file = os.path.join(root, '%s.manifest' % shelf_name) if not os.path.exists(manifest_file): if shelf_name == 'shelf': print('Working copy has no cached default microbranch.') else: print('Cannot access microbranch "%s".' % shelf_name) else: manifest_lines = open(manifest_file).readlines() del manifest_lines[0] # delete the comment conflict_count = 0 for line in manifest_lines: line = line.rstrip() if len(line) == 0: continue status, file_name, old_changeset = line.split('?') if status == 'M': if os.name == 'nt': file_name = file_name.replace('/', '\\') else: file_name = file_name.replace('\\', '/') new_changeset = get_changeset_for(options, file_name) old_crc32 = crc32(os.path.join(working_dir, file_name)) new_crc32 = crc32(file_name) if not new_changeset: print( "ERROR: Failed to determine changeset for file '%s'!" % file_name, file=sys.stderr) os.chdir(working_dir) sys.exit(1) if (old_changeset != new_changeset) or (old_crc32 != new_crc32): if conflict_count == 0: print( 'Potential conflicts detected for the following "%s" microbranch assets:' % (shelf_name if shelf_name != "shelf" else "default")) print('\t', file_name) conflict_count += 1 if conflict_count == 0: print('No potential conflicts detected for "%s" microbranch.' % (shelf_name if shelf_name != "shelf" else "default")) os.chdir(working_dir)