def import_project(self, directory): project_name = os.path.basename( directory ) if project_name in self.projects: print("A project named '{0}' already exists.".format(project_name)) sys.exit(1) # Create project entry in database new_pb_project = Project(project_name, self, None) # Copy files shutil.copytree( directory, new_pb_project.work_dir) original_dir = os.getcwd() print("Creating public git repo '{0}'".format(new_pb_project.public_dir)) os.makedirs( new_pb_project.public_dir ) original_dir = os.getcwd() os.chdir(new_pb_project.public_dir) git.init(True) os.chdir(new_pb_project.work_dir) # Initialize git repo if it doesn't exist if not os.path.isdir(".git"): # Create first import with initial content git.init(False) git.add(["."]) git.commit("Import from {0}.".format(directory)) # Create public repository if 'public' in git.remote_list(): print("Renaming old public repo to public.orig") git.remote_rename('public', 'public.orig') print("Adding 'public' remote") git.remote_add("public", new_pb_project.public_dir) os.chdir( original_dir ) # Open by default self.projects[project_name] = new_pb_project new_pb_project.state = "open" # why save? -> to update the state to 'open' new_pb_project.save()
def test_commit_solution(monkeypatch): my_stdin = '0\n100\nO(1)\n0\n100\nO(1)\n' try: git.branch('-D', 'issue/42/foo-bar') except: pass monkeypatch.setattr('sys.stdin', io.StringIO(my_stdin)) p = problem.new_problem_args( 'foo-bar', 42, 3, 'class Solution {\npublic:\nint fooBar() {\nreturn 0;\n}\n};\n') try: git.add('foo-bar.cpp') git.add('foo-bar-test.cpp') problem.commit_solution() except Exception as e: git.checkout(initial_branch) git.branch('-D', p._branch) raise e git.checkout(initial_branch) git.branch('-D', p._branch)
def save_favorite(self, directory, name, title=None, page = None): self.logger.info(__name__, 'save_favorite', directory, name) if not title == None: git.cd(directory + '/favorites/' + name) favorite = open('title', 'w+') favorite.write(title) favorite.close() git.add() output, errors = git.commit("updated " + name + " with new title " + title) if not errors == "": self.logger.error(__name__, 'save_favorite', directory, title, errors) return False return True elif not page == None: git.cd(directory + '/favorites/' + name) favorite = open('page', 'w+') favorite.write(page.serialize()) favorite.close() favorite = open('type', 'w+') favorite.write(page.get_type()) favorite.close() git.add() output, errors = git.commit("updated " + name + " with new page " + page.description()) if not errors == "": self.logger.error(__name__, 'save_favorite', directory, page, errors) return False return True
def post(self, filename): print(filename + ' upload') print(DATA_DIR + filename) git.repo(DATA_DIR) with open(DATA_DIR + filename, 'wb') as f: f.write(self.request.body) git.add(DATA_DIR + filename) git.commit(filename + ' added')
def add_favorite(self, directory, name, title, existing=''): self.logger.info(__name__, 'add_favorite', directory, title) git.cd(directory) git.makedir(directory + '/favorites/' + existing + name) favorite = open(directory + '/favorites/' + existing + name + '/title', 'w+') favorite.write(title) favorite.close() git.add() git.commit('added new favorite ' + name + ' with title ' + title)
def run(self): git = self.repo.git #git.add('Sheet_4_(d)_Full_Data_data.csv') #git.add('foph_covid19_data_converted_unix.csv') #git.add('last_update.txt') git.add('data/*') commit_message = "Data updated: " self.repo.index.commit(commit_message) git.push()
def sync(self, directory): self.logger.info(__name__, 'sync', directory) git.cd(directory) if git.changesExist(): self.logger.warn(__name__, 'sync', 'changes detected, syncing') git.add(' -u') git.add() git.commit('committing repository state before loading favorites') return True self.logger.info(__name__, 'sync', 'no changes detected') return False
def put_in_repo(data): """ Puts the login's assignment into their repo """ path_to_subm, timestamp = get_subm(data) logins = get_logins(data.login) path_to_repo = find_path(logins, data) data.git_assign = utils.dirty_assign(data.git_assign) issue_num = model.get_issue_number(logins, data.assign) if not issue_num: path_to_template = config.TEMPLATE_DIR if "hw" in data.assign: path_to_template += "hw/" else: path_to_template += "proj/" if data.git_assign not in config.ASSIGN_TO_NAME_MAP: path_to_template += data.git_assign + "/" else: path_to_template += config.ASSIGN_TO_NAME_MAP[data.git_assign] + "/" copy_important_files(data, path_to_template, path_to_repo, template=True) git_init(path_to_repo) git.add(None, path=path_to_repo) git.commit("Initial commit", path=path_to_repo) else: #we want to check that we didnt mess up, and there is actually something here original_path = os.getcwd() os.chdir(path_to_repo) out, err = utils.run("git status") if "fatal: Not a git repository" in err: print("Issue number present, but no files in repository. Resetting issue number...") model.remove_issue_number(logins, data.git_assign, issue_num) os.chdir(original_path) return put_in_repo(data) else: #we have a partner who submitted (I think) if not os.path.exists('commits'): raise SubmissionException("Found a git repository that hasn't been committed to yet. Ignoring...") with open('commits', 'r') as f: out = f.read().strip() last_line = out[out.rfind("\n"):] if last_line.find(":") != -1: com_time = last_line[last_line.find(":") + 1:].strip() if timestamp in com_time: raise SubmissionException("This timestamp ({}) has already been uploaded. Exiting...".format(timestamp)) os.chdir(original_path) copy_important_files(data, path_to_subm, path_to_repo) with open(path_to_repo + 'commits', 'a') as f: f.write('{} : {}\n'.format(utils.get_timestamp_str(), timestamp)) git.add(None, path=path_to_repo) files = glob.glob(path_to_repo + "*") for f in files: utils.chmod_own_grp(f) utils.chown_staff_master(f) return path_to_repo, logins
def add(item, target): """ Add an item to the repository Just call each 'add' method of the different repository implementations @param item the item content's to add @param target the target location of the @param item into the repository. Must be a list containing the target file (the last element) and the different subfolders leading to it """ git.add(item, target)
def put(self): """action sent from js. Does all the work!""" # get current directory (to return later) #might not be needed cwd = os.getcwd() try: # make connection to git remote server git = git_cnxn() # obtain filename and msg for commit data = json.loads(self.request.body.decode('utf-8')) filename = urllib.parse.unquote(data['filename']) msg = data['msg'] add_all = data['add_all'] pull = data['pull'] if pull: git.pull() self.write({ 'status': 200, 'statusText': ('Success! ' 'Pulled from {} everything up to date!'.format(git.url)) }) else: git.pull() git.add(filename, add_all) git.commit(msg) git.push() #git_pr() # close connection self.write({ 'status': 200, 'statusText': ('Success! ' 'Changes to {} captured on branch {} at {}'.format( filename, git.branch_nm, git.url)) }) # return to directory os.chdir(cwd) except ErrorPrintToJupyter as e: self.error_and_return(cwd, str(e).replace('\n', '</br> '))
def add_mt_disclaimers(repository, file_info): now = datetime.now() os.chdir(repository.github.git_root) root_folder = repository.github.single_folder \ if repository.github.single_folder is not None else repository.github.project_folder for file in get_eligible_files(repository, git.ls_files(root_folder), 'en'): crowdin_file = get_crowdin_file(repository, file) target_file = 'ja/' + file[3:] if file[0:3] == 'en/' else file.replace('/en/', '/ja/') if not os.path.isfile('%s/%s' % (repository.github.git_root, target_file)): if target_file[-9:] == '.markdown': target_file = target_file[:-9] + '.md' elif target_file[-3:] == '.md': target_file = target_file[:-3] + '.markdown' if not os.path.isfile('%s/%s' % (repository.github.git_root, target_file)): continue if crowdin_file not in file_info or file_info[crowdin_file]['translated'] == file_info[crowdin_file]['approved']: continue new_lines = [] with open(target_file, 'r') as f: new_lines = [ line for line in f.readlines() if line.find('<p class="alert alert-info"><span class="wysiwyg-color-blue120">') == -1 ] content = '%s\n%s' % (''.join(new_lines), disclaimer_text) with open(target_file, 'w') as f: f.write(content) git.add(target_file) git.commit('-m', 'Added machine translation disclaimer %s' % now.strftime("%Y-%m-%d %H:%M:%S")) os.chdir(initial_dir)
def txn(branch): # git ignore everything except hostery file and gitignore. command('git rm --cached . -r', verbose=True) git_ignore = open('.gitignore', 'w') git_ignore.write('*\n!.gitignore\n!%s'%HOSTERY_FILE) git_ignore.close() git.add('.gitignore') # make hostery file hostery_file = open(HOSTERY_FILE, 'w') hostery_file.write(json.dumps({})) hostery_file.close() # add, commit, push. git.add(HOSTERY_FILE) command('git status', multiline=True, verbose=True) git.commit('hostery init') git.push(branch)
def _handle_submodule(self, branch, outer_commit, submodule_path, prev_commit, cur_commit, input_work_dir, output_work_dir, start_commit, new_branch): submodule_tree = os.path.join(input_work_dir, submodule_path) commits = self._log_commits(prev_commit, cur_commit, submodule_tree) for commit in commits: # ok ... if any fails just abandon the whole thing - could be better, but ... if not self._create_output(branch, outer_commit, input_work_dir, output_work_dir, start_commit, new_branch, submodules={submodule_path: commit}): return [] git.add('.', tree=output_work_dir) if self.is_modified(tree=output_work_dir): self._commit_modified(commit, submodule_tree, output_work_dir, None, add_id=False) return [git.rev_parse('HEAD', tree=output_work_dir)]
def write_txn(branch): if sync: git.pull(branch) hostery_file = open(HOSTERY_FILE, 'r') all_data = json.loads(hostery_file.read()) all_data.update(data) hostery_file.close() hostery_file = open(HOSTERY_FILE, 'w') hostery_file.write(json.dumps(all_data, indent=2)) hostery_file.close() git.add(HOSTERY_FILE) # command('git status', verbose=True) git.commit('hostery mark') if sync: git.push(branch) return all_data
def generate_html_files(repository, file_info): now = datetime.now() os.chdir(repository.github.git_root) root_folder = repository.github.single_folder \ if repository.github.single_folder is not None else repository.github.project_folder titles = {} for file in get_eligible_files(repository, git.ls_files(root_folder), 'en'): crowdin_file = get_crowdin_file(repository, file) target_file = 'ja/' + file[3:] if file[0:3] == 'en/' else file.replace('/en/', '/ja/') if not os.path.isfile('%s/%s' % (repository.github.git_root, target_file)): if target_file[-9:] == '.markdown': target_file = target_file[:-9] + '.md' elif target_file[-3:] == '.md': target_file = target_file[:-3] + '.markdown' if not os.path.isfile('%s/%s' % (repository.github.git_root, target_file)): continue html_file = target_file[:target_file.rfind('.')] + '.html' titles[html_file] = get_title(file) _pandoc(target_file, html_file, '--from=gfm', '--to=html') git.add(html_file) git.commit('-m', 'Updated pandoc conversion %s' % now.strftime("%Y-%m-%d %H:%M:%S")) os.chdir(initial_dir) return titles
except: git.checkout(b=monit_node) #git.checkout("HEAD", b=monit_node) filename = monit_node + "/" + f['applicationsolution_name'] + "_" + \ f['url'].replace("://", "_").replace("/","_") + ".conf" filepath = telegraf_dir + "/" + filename print(filename) with open(filepath, 'w') as fi: fi.write("[[inputs.url_monitor]]\n" + \ "\tapp = " + '"' + f['applicationsolution_name'] + '"\n' + \ "\taddress = " + '"' + f['url'] + '"\n' + \ "\tresponse_timeout = \"" + response_timeout + '"\n' + \ "\tmethod = " + '"' + f['method'] + '"\n' + \ "\trequire_str = " + "'" + f['require_str'] + "'\n" + \ "\trequire_code = " + "'" + f['require_code'] + "'\n" + \ "\tfailed_count = " + f['failed_count'] + '\n' + \ "\tfailed_timeout = " + f['timeout'] + '\n' + \ "\tfollow_redirects = " + follow_redirects + '\n' + \ "\tbody = " + "'" + f['body'] + "'\n" + \ "\tinsecure_skip_verify = " + insecure_skip_verify + '\n' + \ "\t[inputs.url_monitor.headers]\n\t\t" + h_str + '\n') git.add(filename) try: git.commit(m=filename) except: print(git.status()) git.push("origin", monit_node)
def download_zendesk_articles(repository, domain, language): user = init_zendesk(domain) logging.info('Authenticated as %s' % user['email']) assert (user['verified']) # Download current article information and rearrange articles that may have # moved categories. articles, new_tracked_articles, categories, sections, section_paths = get_zendesk_articles( repository, domain, language) article_paths = check_renamed_articles(repository, language, articles, section_paths) for article_id, article_path in sorted(article_paths.items()): if article_id not in articles: print('Missing data for %s with path %s' % (article_id, article_path)) continue # Save articles and translations that currently exist on zendesk save_article_metadata(domain, repository, language, articles, article_paths, categories, sections) old_dir = os.getcwd() os.chdir(repository.github.git_root) # First pass: check if anything appears to be out of date candidate_article_ids = [ article_id for article_id, article in sorted(articles.items()) if requires_update( repository, domain, article, language, article_paths[article_id]) ] # Second pass: anything that looks outdated, make sure its translation metadata is up-to-date for article_id in candidate_article_ids: update_translated_at(domain, article_id, language, articles[article_id], articles, section_paths) with open('%s/articles_%s.json' % (initial_dir, domain), 'w') as f: json.dump(articles, f) # Third pass: assume metadata is complete, check what is out of date refresh_articles = { article_id: articles[article_id] for article_id in candidate_article_ids if requires_update(repository, domain, articles[article_id], language, article_paths[article_id]) } # Cache the articles on disk so we can work on them without having to go back to the API with open('%s/articles_%s.json' % (initial_dir, domain), 'w') as f: json.dump(articles, f) refresh_paths = { article_id: article_paths[article_id] for article_id in refresh_articles.keys() } for article_id, article in refresh_articles.items(): article_path = refresh_paths[article_id] os.makedirs(os.path.dirname(article_path), exist_ok=True) with open(article_path, 'w', encoding='utf-8') as f: f.write('<h1>%s</h1>\n' % article['title']) f.write(remove_nbsp(article['body'])) git.add(article_path) git.commit('-m', 'Recently added articles: %s' % datetime.now()) os.chdir(old_dir) return articles, article_paths, refresh_articles, refresh_paths
data = json.load(f) data["保存时间戳"] = ticks data["保存时间"] = localtime data["本次运行时长"] = data["保存时间戳"] - data["启动时间戳"] data["总游戏时长"] = data["总游戏时长"] + data["本次运行时长"] def secondsToGameStr(sec): m, s = divmod(sec, 60) h, m = divmod(m, 60) return "%02d时%02d分%02d秒" % (h, m, s) data["本次游戏时间"] = secondsToGameStr(data["本次运行时长"]) data["总游戏时间"] = secondsToGameStr(data["总游戏时长"]) # 写入 JSON 数据 with open(curFileList[0] + '\data.json', 'w', encoding='utf8') as f: json.dump(data, f) logStr = "本次游戏时间 " + data["本次游戏时间"] + " 总游戏时间 " + data["总游戏时间"] for log in mddata: logStr = logStr + log git = repo.git git.add('.') git.commit('-m', logStr) git.push()
if prl.name in CHANGELOG_LABELS: found_changelog_label = True break if found_changelog_label: print(NOTICE + "Processing release notes." + ENDC) release_notes = "" for rnf in release_notes_files: release_notes += open(rnf, 'r').read().rstrip() + '\n\n' with open('.release-notes/next-release.md', 'a+') as next_release_notes: next_release_notes.write(release_notes) print(INFO + "Adding git changes." + ENDC) for rnf in release_notes_files: git.rm(rnf) git.add('.release-notes/next-release.md') git.commit('-m', "Updates release notes for PR #" + str(pr_id)) else: print(NOTICE + "Found release notes but no changelog label." + ENDC) for rnf in release_notes_files: git.rm(rnf) git.commit( '-m', "Removes release notes from changelog labelless PR #" + str(pr_id)) push_failures = 0 while True: try: print(INFO + "Pushing changes." + ENDC) git.push() break
def commit(repo): git = repo.git git.add(".") git.commit('-m', 'first commit')
if not readme_file: print(ERROR + "Unable to find README. Exiting." + ENDC) sys.exit(1) # open README and update with new version print(INFO + "Updating versions in " + readme_file + " to " + version + ENDC) with open(readme_file, "r+") as readme: text = readme.read() for sub in subs: (find, replace) = sub text = re.sub(find, replace, text) readme.seek(0) readme.write(text) print(INFO + "Adding git changes." + ENDC) git.add(readme_file) if not git.status("-s"): print(INFO + "No changes. Exiting." + ENDC) sys.exit(0) git.commit('-m', f'Update {readme_file} examples to reflect new version {version}') push_failures = 0 while True: try: print(INFO + "Pushing updated README." + ENDC) git.push() break except GitCommandError: push_failures += 1 if push_failures <= 5:
def check_renamed_articles(repository, language, articles, section_paths): old_dir = os.getcwd() os.chdir(repository.github.git_root) old_article_paths = { get_article_id(file): file for file in git.ls_files('en/').split('\n') } new_article_paths = { str(article['id']): get_article_path(article, 'en', section_paths) for article in articles.values() if is_tracked_article(article, section_paths) } old_translated_paths = { get_article_id(file): file for file in git.ls_files(language + '/').split('\n') } new_translated_paths = { str(article['id']): get_article_path(article, language, section_paths) for article in articles.values() if is_tracked_article(article, section_paths) } for article_id, new_article_path in sorted(new_article_paths.items()): if article_id not in old_article_paths: continue old_article_path = old_article_paths[article_id] if old_article_path == new_article_path: continue if not os.path.exists(old_article_path): continue os.makedirs(os.path.dirname(new_article_path), exist_ok=True) os.rename(old_article_path, new_article_path) git.add(old_article_path) git.add(new_article_path) for article_id, new_article_path in sorted(new_translated_paths.items()): if article_id not in old_translated_paths: continue old_article_path = old_translated_paths[article_id] if old_article_path == new_article_path: continue if not os.path.exists(old_article_path): continue os.makedirs(os.path.dirname(new_article_path), exist_ok=True) os.rename(old_article_path, new_article_path) git.add(old_article_path) git.add(new_article_path) git.commit('-m', 'Renamed articles: %s' % datetime.now()) os.chdir(old_dir) return new_article_paths
# save files and states json.dump(pack_statuses, open(out_dir+"mx140"+"-"+time.asctime()+".json",'w')) json.dump (pack_json, open('pack.json','w')) nf = open('../mx140/public/js/data.js','w') print >>nf, "var data = "+json.dumps(pack_json)+";" nf.close() cPickle.dump (buffers, open('buffers.cpk','w')) cPickle.dump (most_recent_ids, open('mrids.cpk','w')) cPickle.dump (status_buff, open('status.buff','w')) print ("[saved]: pack.json, buffers.cpk, mrids.cpk, status.buff") # commit to github (requieres sudo git config --global credential.helper store + login para guardar credentials) os.chdir('../mx140') git.add('--all') git.commit('[*-*]/~ :: Monitor Mx140 V1.0 - '+time.asctime()) git.create_simple_git_command('push')() os.chdir('../collector') print "\n\n[git]: ok :: " , time.asctime() # then sleepover... #wait, publish something #current_list, current_keyword, current_asoc_ws must be ready try: current_status = u"El grupo de " + \ rlis[current_list].upper() + \ u" discute sobre "+ \ current_keyword.upper() +\ u" junto a [ " +\ current_asoc_ws[1] + " ] y [ " +\
def create(self, project_name, language): """ Create a bare project. """ if project_name in self.projects: print("A project named '{0}' already exists.".format(project_name)) sys.exit(1) new_pb_project = Project(project_name, self, language) print("Creating project '{0}' ({1}).".format(project_name, language)) new_pb_project.save() # Create bare project os.makedirs(new_pb_project.work_dir) original_dir = os.getcwd() os.chdir(new_pb_project.work_dir) if language in self.languages: print("I have templates for a '{0}' project.".format(language)) for root, dirs, files in os.walk( new_pb_project.templates_dir ): # print root, dirs, files rel_dir = root[len(new_pb_project.templates_dir)+1:] for d in dirs: o = os.path.join(root, d) w = os.path.join(new_pb_project.work_dir, rel_dir, d) os.makedirs(w) print( "{0} -> {1}".format(o, w) ) for f in files: dest_file, ext = os.path.splitext(f) o = os.path.join(root, f) if ext == ".tmpl": w = os.path.join(new_pb_project.work_dir, rel_dir, dest_file) print( "{0} -> {1}".format(o, w) ) t = Template( file=o ) t.project = new_pb_project.name of = open(dest_file, 'w') of.write(str(t)) else: w = os.path.join(new_pb_project.work_dir, rel_dir, f) print( "{0} -> {1}".format(o, w) ) shutil.copy(o, w) else: print("No templates available.") print("Creating public git repo '{0}'".format(new_pb_project.public_dir)) os.makedirs( new_pb_project.public_dir ) original_dir = os.getcwd() os.chdir(new_pb_project.public_dir) git.init(True) os.chdir(original_dir) print("Initializing git repository.") git.init(False) print("Adding 'public' remote") git.remote_add("public", new_pb_project.public_dir) if language in self.languages: # Commit the templates git.add(["."]) git.commit("Original import") # git.push("public") os.chdir(original_dir) # Open by default self.projects[project_name] = new_pb_project new_pb_project.state = "open" # why save? -> to update the state to 'open' new_pb_project.save()
def sync_articles(repository, domain, language, articles, article_paths, refresh_articles=None, refresh_paths=None): if refresh_articles is not None: logging.info('Updating translations for %d articles' % len(refresh_paths)) new_files, all_files, file_info = update_repository( repository, list(refresh_paths.values()), sync_sources=True) else: logging.info('Downloading latest translations for %d articles' % len(article_paths)) new_files, all_files, file_info = update_repository( repository, list(article_paths.values()), sync_sources=False) old_dir = os.getcwd() os.chdir(repository.github.git_root) for article_id, file in sorted(article_paths.items()): article = articles[article_id] target_file = language + '/' + file[3:] if file[ 0:3] == 'en/' else file.replace('/en/', '/%s/' % language) if not os.path.isfile(target_file): continue if language in article['label_names'] and 'mt' not in article[ 'label_names']: print(target_file, 'not machine translated') os.remove(target_file, ) git.checkout(target_file) continue crowdin_file = get_crowdin_file(repository, file) if crowdin_file not in file_info: continue file_metadata = file_info[crowdin_file] if file_metadata['phrases'] != file_metadata['translated']: print(target_file, 'not fully translated') os.remove(target_file) git.checkout(target_file) continue new_title, old_content, new_content = add_disclaimer_zendesk( article, target_file, language) if old_content != new_content: with open(target_file, 'w') as f: f.write('<h1>%s</h1>\n' % new_title) f.write(new_content) git.add(target_file) if refresh_articles is not None: git.commit('-m', 'Translated new articles: %s' % datetime.now()) else: git.commit('-m', 'Translated existing articles: %s' % datetime.now()) os.chdir(old_dir) return file_info
def _config(self, remote, conf, groups): """ Builds the groups file and project.config file for a project. @param remote - gerrit.Remote object @param conf - Dict containing git config information @param groups - List of groups """ if not self.config: return msg = "Project %s: Configuring." % self.name logger.info(msg) print msg repo_dir = '~/tmp' repo_dir = os.path.expanduser(repo_dir) repo_dir = os.path.abspath(repo_dir) uuid_dir = str(uuid4()) repo_dir = os.path.join(repo_dir, uuid_dir) # Make Empty directory - We want this to stop and fail on OSError logger.debug( "Project %s: Creating directory %s" % (self.name, repo_dir) ) os.makedirs(repo_dir) # Save the current working directory old_cwd = os.getcwd() origin = 'origin' try: # Change cwd to that repo os.chdir(repo_dir) # Git init empty directory git.init() # Add remote origin ssh_url = 'ssh://%s@%s:%s/%s' % ( remote.username, remote.host, remote.port, self.name ) git.add_remote(origin, ssh_url) # Fetch refs/meta/config for project refspec = 'refs/meta/config:refs/remotes/origin/meta/config' git.fetch(origin, refspec) # Checkout refs/meta/config git.checkout_branch('meta/config') # Get md5 of existing config _file = os.path.join(repo_dir, 'project.config') contents = '' try: with open(_file, 'r') as f: contents = f.read() except IOError: pass existing_md5 = hashlib.md5(contents).hexdigest() # Get md5 of new config with open(self.config, 'r') as f: contents = f.read() new_md5 = hashlib.md5(contents).hexdigest() msg = "Project %s: Md5 comparision\n%s\n%s" msg = msg % (self.name, existing_md5, new_md5) logger.debug(msg) print msg # Only alter if checksums do not match if existing_md5 != new_md5: logger.debug( "Project %s: config md5's are different." % self.name ) # Update project.config file _file = os.path.join(repo_dir, 'project.config') with open(_file, 'w') as f: f.write(contents) # Update groups file group_contents = groups_file_contents(groups) _file = os.path.join(repo_dir, 'groups') with open(_file, 'w') as f: f.write(group_contents) # Git config user.email git.set_config('user.email', conf['git-config']['email']) # Git config user.name git.set_config('user.name', conf['git-config']['name']) # Add groups and project.config git.add(['groups', 'project.config']) # Git commit git.commit(message='Setting up %s' % self.name) # Git push git.push(origin, refspecs='meta/config:refs/meta/config') logger.info("Project %s: pushed configuration." % self.name) else: msg = "Project %s: config unchanged." % self.name logger.info(msg) print msg finally: # Change to old current working directory os.chdir(old_cwd) # Attempt to clean up created directory shutil.rmtree(repo_dir)
def delete_node(self, directory, fullName): self.logger.warn(__name__, 'delete_node', directory, fullName, ' (destructive)') git.cd(directory + '/favorites') git.rm(fullName) git.add(' -u') git.commit('deleted ' + fullName)
def _mirror_one(self, branch, input_work_dir, output_reference): self.debug('*********** start work for branch %s -> %s' % ( branch, self.output_branch_name(branch), )) if not self._need_to_mirror(branch, input_work_dir, output_reference): return True with tempdir.tempdir() as tmpdir: output_work_dir = os.path.join(tmpdir, 'output') start_commit, new_branch = self._prepare_output_git( branch, input_work_dir, output_work_dir, output_reference) commits = self._find_commits(branch, start_commit, input_work_dir, output_work_dir) os.chdir(tmpdir) committed_anything = False last_failure = None try: # In this case we're already done, the loop below would be skipped # completely, but we can't even calculate the (unnecessary) # prep_prev_commit and similar, nor do we really have to check out # the code to generate nothing. However, if it's a new branch we # may have to push it out, so go through the "finally:" segment of # the code (and hence have the return statement within the "try:" # block. # The output tree is correct since self._prepare_output_git() will # leave it at the commit it wanted to start generating from (even # if there's nothing to generate, it doesn't consider that.) if len(commits) == 0: return True prep_prev_commit = Commit( git.rev_parse(commits[0].tree_id + '^', tree=input_work_dir), input_work_dir) self._checkout(prep_prev_commit.tree_id, input_work_dir) submodules = self._submodule_status(input_work_dir) for commit in commits: prev_commit = prep_prev_commit prep_prev_commit = commit if not self._create_output(branch, commit, input_work_dir, output_work_dir, start_commit, new_branch): if last_failure is None: last_failure = commit continue if last_failure: last_failure_shortlog = git.shortlog( last_failure, commit) else: last_failure_shortlog = None git.add('.', tree=output_work_dir) prev_submodules = submodules submodules = self._submodule_status(input_work_dir) if self.is_modified(tree=output_work_dir): parents = [git.rev_parse('HEAD', tree=output_work_dir)] tree_id = git.write_tree(tree=output_work_dir) for s in submodules: if not s in prev_submodules: continue if prev_submodules[s] != submodules[s]: parents += self._handle_submodule( branch, prev_commit, s, prev_submodules[s], submodules[s], input_work_dir, output_work_dir, start_commit, new_branch) self._commit_modified(commit, input_work_dir, output_work_dir, last_failure_shortlog, tree_id, parents) committed_anything = True elif new_branch and self.always_commit_new_branch: self._commit_new_branch(commit, output_work_dir) committed_anything = True new_branch = False last_failure = None git.set_note(self.notes_branch, 'HEAD', commit.tree_id, tree=output_work_dir, env=self._commit_env()) except Abort: return False finally: # if necessary, push to the server from the output_work_dir git.set_origin_url(self._output_tree, gitdir=output_work_dir) if committed_anything or new_branch: git.push(opts=[ '-q', 'origin', 'HEAD:' + self.output_branch_name(branch) ], tree=output_work_dir) git.push(opts=[ '-q', '-f', 'origin', 'refs/notes/' + self.notes_branch ], tree=output_work_dir) return True
def _config(self, remote, conf, groups): """ Builds the groups file and project.config file for a project. @param remote - gerrit.Remote object @param conf - Dict containing git config information @param groups - List of groups """ if not self.config: return msg = "Project %s: Configuring." % self.name logger.info(msg) print msg repo_dir = '~/tmp' repo_dir = os.path.expanduser(repo_dir) repo_dir = os.path.abspath(repo_dir) uuid_dir = str(uuid4()) repo_dir = os.path.join(repo_dir, uuid_dir) # Make Empty directory - We want this to stop and fail on OSError logger.debug("Project %s: Creating directory %s" % (self.name, repo_dir)) os.makedirs(repo_dir) # Save the current working directory old_cwd = os.getcwd() origin = 'origin' try: # Change cwd to that repo os.chdir(repo_dir) # Git init empty directory git.init() # Add remote origin ssh_url = 'ssh://%s@%s:%s/%s' % (remote.username, remote.host, remote.port, self.name) git.add_remote(origin, ssh_url) # Fetch refs/meta/config for project refspec = 'refs/meta/config:refs/remotes/origin/meta/config' git.fetch(origin, refspec) # Checkout refs/meta/config git.checkout_branch('meta/config') # Get md5 of existing config _file = os.path.join(repo_dir, 'project.config') contents = '' try: with open(_file, 'r') as f: contents = f.read() except IOError: pass existing_md5 = hashlib.md5(contents).hexdigest() # Get md5 of new config with open(self.config, 'r') as f: contents = f.read() new_md5 = hashlib.md5(contents).hexdigest() msg = "Project %s: Md5 comparision\n%s\n%s" msg = msg % (self.name, existing_md5, new_md5) logger.debug(msg) print msg # Only alter if checksums do not match if existing_md5 != new_md5: logger.debug("Project %s: config md5's are different." % self.name) # Update project.config file _file = os.path.join(repo_dir, 'project.config') with open(_file, 'w') as f: f.write(contents) # Update groups file group_contents = groups_file_contents(groups) _file = os.path.join(repo_dir, 'groups') with open(_file, 'w') as f: f.write(group_contents) # Git config user.email git.set_config('user.email', conf['git-config']['email']) # Git config user.name git.set_config('user.name', conf['git-config']['name']) # Add groups and project.config git.add(['groups', 'project.config']) # Git commit git.commit(message='Setting up %s' % self.name) # Git push git.push(origin, refspecs='meta/config:refs/meta/config') logger.info("Project %s: pushed configuration." % self.name) else: msg = "Project %s: config unchanged." % self.name logger.info(msg) print msg finally: # Change to old current working directory os.chdir(old_cwd) # Attempt to clean up created directory shutil.rmtree(repo_dir)