def main(token, date_from, date_to): github_miner = GithubMiner(token=token, date_from=date_from, date_to=date_to) i = 0 for repo in github_miner.mine(): i += 1 if (re.search(r"\btosca\b", repo['description'].lower()) or re.search(r"\btosca\b", repo['owner'].lower()) or re.search(r"\btosca\b", repo['name'].lower())): clone_repo(repo['owner'], repo['name']) else: continue print(f'{i} repositories mined') print(f'Quota: {github_miner.quota}') print(f'Quota will reset at: {github_miner.quota_reset_at}') print('---------------') with open("logs/executed_queries.txt", "a+") as file: file.write(f'mined: {i} from: {date_from} to: {date_to} \n')
def __update_code(self): """ update repo code and merge tag """ logs.info("BRANCH: %s" % self.branch) logs.info("USRE_NAME: %s" % self.user_name) utils.mkdirs(self.code_path) utils.clone_repo(self.repo_path, self.repo_uri) os.chdir(self.repo_path) cmd_list = [ 'git clean -df', 'git reset --hard', 'git branch | grep "\* {branch}" || git checkout {branch}'.format( branch=self.branch), 'git fetch', 'git reset --hard origin/{branch}'.format(branch=self.branch), 'git remote remove STAGE || echo "del stage remote"', 'git remote add STAGE {stage_uri}'.format( stage_uri=self.stage_uri), 'git fetch STAGE tag phabricator/diff/{diff}'.format( diff=self.diff), 'git merge phabricator/diff/{diff}'.format(diff=self.diff), ] try: for cmd in cmd_list: utils.call(cmd) except Exception as e: logs.error(e) raise Exception("Merge tag fail" if "git merge" in str(e) else "Update code fail")
def clone_repositories(args, temp_dir): write_msg('=> Cloning the repositories...') repositories = [] for crate in args.crates: crate = crate['crate'] if args.specified_crate is not None and crate[ 'crate'] != args.specified_crate: continue if crate["repository"] not in repositories: repositories.append(crate["repository"]) if clone_repo(crate["repository"], temp_dir) is False: write_error('Cannot clone the "{}" repository...'.format( crate["repository"])) return [] if len(repositories) < 1: write_msg('No crate "{}" found. Aborting...'.format( args.specified_crate)) return [] if args.doc_only is False: if clone_repo(consts.BLOG_REPO, temp_dir, depth=1) is False: write_error('Cannot clone the "{}" repository...'.format( consts.BLOG_REPO)) return [] if clone_repo(consts.DOC_REPO, temp_dir, depth=1) is False: write_error('Cannot clone the "{}" repository...'.format( consts.DOC_REPO)) return [] write_msg('Done!') return repositories
def _build_containerd_binaries(self): containerd_path = utils.get_containerd_folder() utils.clone_repo(self.opts.containerd_repo, self.opts.containerd_branch, containerd_path) ctr_path = utils.get_ctr_folder() utils.clone_repo(self.opts.ctr_repo, self.opts.ctr_branch, ctr_path) utils.build_containerd_binaries()
def _prepare_ansible(self): utils.clone_repo(self.opts.ansibleRepo, self.opts.ansibleBranch, self.default_ansible_path) # Creating ansible hosts file linux_master_hostname = self.deployer.get_cluster_master_vm_name() windows_minions_hostnames = self.deployer.get_cluster_win_minion_vms_names() hosts_file_content = self.ansible_hosts_template.replace("KUBE_MASTER_PLACEHOLDER", linux_master_hostname) hosts_file_content = hosts_file_content.replace("KUBE_MINIONS_WINDOWS_PLACEHOLDER", "\n".join(windows_minions_hostnames)) self.logging.info("Writing hosts file for ansible inventory.") with open(self.ansible_hosts_path, "w") as f: f.write(hosts_file_content) # This proliferation of args should be set to cli to ansible when called win_hosts_extra_vars = "\nCONTAINER_RUNTIME: \"%s\"" % self.opts.containerRuntime if self.opts.containerRuntime == "containerd": win_hosts_extra_vars += "\nCNIBINS: \"sdnms\"" # Creating hosts_vars for hosts for vm_name in windows_minions_hostnames: vm_username = self.deployer.get_win_vm_username() vm_pass = self.deployer.get_win_vm_password() hosts_var_content = self.ansible_host_var_windows_template.replace("USERNAME_PLACEHOLDER", vm_username).replace("PASS_PLACEHOLDER", vm_pass) filepath = os.path.join(self.ansible_host_var_dir, vm_name) with open(filepath, "w") as f: f.write(hosts_var_content) f.write(win_hosts_extra_vars) # Enable ansible log, json output and set ssh options with open(self.ansible_config_file, "a") as f: log_file = os.path.join(self.opts.log_path, "ansible-deploy.log") log_config = "log_path=%s\n" % log_file json_output = "stdout_callback = json\nbin_ansible_callbacks = True" # This probably goes better in /etc/ansible.cfg (set in dockerfile ) ansible_config = "\n\n[ssh_connection]\nssh_args=-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null\n" f.write(log_config) f.write(json_output) f.write(ansible_config) full_ansible_tmp_path = os.path.join(self.ansible_playbook_root, "tmp") utils.mkdir_p(full_ansible_tmp_path) # Copy prebuilt binaries to ansible tmp for path in glob.glob("%s/*" % utils.get_bins_path()): self.logging.info("Copying %s to %s." % (path, full_ansible_tmp_path)) shutil.copy(path, full_ansible_tmp_path) azure_ccm = "false" # Generate azure.json if needed and populate group vars with necessary paths if self.opts.flannelMode == Terraform_Flannel.FLANNEL_MODE_L2BRIDGE: self._generate_azure_config() azure_ccm = "true" # Set flannel mode in group vars with open(self.ansible_group_vars_file, "a") as f: f.write("FLANNEL_MODE: %s\n" % self.opts.flannelMode) f.write("AZURE_CCM: %s\n" % azure_ccm) f.write("AZURE_CCM_LOCAL_PATH: %s\n" % Terraform_Flannel.AZURE_CCM_LOCAL_PATH)
def _prepare_ansible(self): utils.clone_repo(self.opts.ansibleRepo, self.opts.ansibleBranch, self.default_ansible_path) # Creating ansible hosts file linux_master = self._get_linux_vms()[0].get("name") linux_minions = [vm.get("name") for vm in self._get_linux_vms()[1:]] windows_minions = [vm.get("name") for vm in self._get_windows_vms()] hosts_file_content = self.ansible_hosts_template.replace("KUBE_MASTER_PLACEHOLDER", linux_master) hosts_file_content = hosts_file_content.replace("KUBE_MINIONS_LINUX_PLACEHOLDER", "\n".join(linux_minions)) hosts_file_content = hosts_file_content.replace("KUBE_MINIONS_WINDOWS_PLACEHOLDER","\n".join(windows_minions)) self.logging.info("Writing hosts file for ansible inventory.") with open(self.ansible_hosts_path, "w") as f: f.write(hosts_file_content) # Creating hosts_vars for hosts for vm in self._get_windows_vms(): vm_name = vm.get("name") vm_username = self.ansible_windows_admin # TO DO: Have this configurable trough opts vm_pass = openstack.server_get_password(vm_name, self.opts.keyFile) hosts_var_content = self.ansible_host_var_windows_template.replace("USERNAME_PLACEHOLDER", vm_username).replace("PASS_PLACEHOLDER", vm_pass) filepath = os.path.join(self.ansible_host_var_dir, vm_name) with open(filepath, "w") as f: f.write(hosts_var_content) # Populate hosts file with open(OVN_OVS_CI.HOSTS_FILE,"a") as f: for vm in self._get_all_vms(): vm_name = vm.get("name") if vm_name.find("master") > 0: vm_name = vm_name + " kubernetes" hosts_entry=("%s %s\n" % (self._get_vm_fip(vm), vm_name)) self.logging.info("Adding entry %s to hosts file." % hosts_entry) f.write(hosts_entry) # Enable ansible log and set ssh options with open(self.ansible_config_file, "a") as f: log_file = os.path.join(self.opts.log_path, "ansible-deploy.log") log_config = "log_path=%s\n" % log_file # This probably goes better in /etc/ansible.cfg (set in dockerfile ) ansible_config="\n\n[ssh_connection]\nssh_args=-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null\n" f.write(log_config) f.write(ansible_config) full_ansible_tmp_path = os.path.join(self.ansible_playbook_root, "tmp") utils.mkdir_p(full_ansible_tmp_path) # Copy kubernetes prebuilt binaries for file in ["kubelet","kubectl","kube-apiserver","kube-controller-manager","kube-scheduler","kube-proxy"]: full_file_path = os.path.join(utils.get_k8s_folder(), constants.KUBERNETES_LINUX_BINS_LOCATION, file) self.logging.info("Copying %s to %s." % (full_file_path, full_ansible_tmp_path)) shutil.copy(full_file_path, full_ansible_tmp_path) for file in ["kubelet.exe", "kubectl.exe", "kube-proxy.exe"]: full_file_path = os.path.join(utils.get_k8s_folder(), constants.KUBERNETES_WINDOWS_BINS_LOCATION, file) self.logging.info("Copying %s to %s." % (full_file_path, full_ansible_tmp_path)) shutil.copy(full_file_path, full_ansible_tmp_path)
def _build_containerd_shim(self): if self.opts.containerd_shim_repo is None: fromVendor = True else: fromVendor = False containerd_shim_path = utils.get_containerd_shim_folder(fromVendor) if not fromVendor: utils.clone_repo(self.opts.containerd_shim_repo, self.opts.containerd_shim_branch, containerd_shim_path) utils.build_containerd_shim(containerd_shim_path, fromVendor)
def update_doc_content_repository(repositories, temp_dir, token, no_push, args): if clone_repo(consts.DOC_CONTENT_REPO, temp_dir) is False: input('Try to fix the problem then press ENTER to continue...') write_msg("Done!") repo_path = join(temp_dir, consts.DOC_CONTENT_REPO) write_msg("=> Generating documentation for crates...") for repo in repositories: current = None for crate in args.crates: crate = crate['crate'] if crate['repository'] == repo: current = crate break if current is None: input( 'No repository matches "{}", something is weird. (Press ENTER TO CONTINUE)' ) continue if current.get("doc", True) is False: continue write_msg('==> Generating documentation for "{}"'.format(current)) path = join(temp_dir, current['repository']) command = [ 'bash', '-c', 'cd {} && make doc && mv vendor.md {}'.format( path, join(repo_path, current['crate'])) ] if not exec_command_and_print_error(command): input("Fix the error and then press ENTER") write_msg('Done!') write_msg('Committing "{}" changes...'.format(consts.DOC_CONTENT_REPO)) commit(consts.DOC_CONTENT_REPO, temp_dir, "Update vendor files") if no_push is False: push(consts.DOC_CONTENT_REPO, temp_dir, consts.MASTER_TMP_BRANCH) # We always make minor releases in here, no need for a more important one considering we don't # change the API. if update_repo_version(consts.DOC_CONTENT_REPO, consts.DOC_CONTENT_REPO, "", temp_dir, UpdateType.MINOR, False) is False: write_error('The update for the "{}" crate failed...'.format( consts.DOC_CONTENT_REPO)) input('Fix the error and then press ENTER') commit(consts.DOC_CONTENT_REPO, temp_dir, "Update version") if no_push is False: push(consts.DOC_CONTENT_REPO, temp_dir, consts.MASTER_TMP_BRANCH) create_pull_request(consts.DOC_CONTENT_REPO, consts.MASTER_TMP_BRANCH, "master", token, False) input(( 'All done with the "{}" update: please merge the PR then press ENTER so the ' 'publication can performed...').format(consts.DOC_CONTENT_REPO)) publish_crate(consts.DOC_CONTENT_REPO, "", temp_dir, consts.DOC_CONTENT_REPO, checkout_branch='master') write_msg('Ok all done! We can move forward now!') else: write_msg(( 'All with "{}", you still need to publish a new version if you want the changes ' 'to be taken into account').format(consts.DOC_CONTENT_REPO))
def _setup_kubetest(self): self.logging.info("Setup Kubetest") if self.opts.kubetest_link != "": kubetestbin = "/usr/bin/kubetest" utils.download_file(self.opts.kubetest_link, kubetestbin) os.chmod(kubetestbin, stat.S_IRWXU | stat.S_IRWXG) return # Clone repository using git and then install. Workaround for: # https://github.com/kubernetes/test-infra/issues/14712 utils.clone_repo("https://github.com/kubernetes/test-infra", "master", "/tmp/test-infra") utils.run_shell_cmd(cmd=["go", "install", "./kubetest"], cwd="/tmp/test-infra", env={"GO111MODULE": "on"})
def current_csv(ctx, remove_temp_dir): config = ctx.obj['config'] with clone_repo(config, remove_temp_dir) as repo_dir: path = os.path.join(repo_dir, config['component']) channel = config['channel'] catalog = Catalog(path, channel) click.echo(catalog.current_csv)
def _getKubetest(self): self.logging.info("Get Kubetest") if self.opts.kubetest_link == "": # Clone repository using git and then install # Workaround for https://github.com/kubernetes/test-infra/issues/14712 utils.clone_repo(TESTINFRA_REPO_URL, "master", "/tmp/test-infra") os.putenv("GO111MODULE", "on") cmd = ["go", "install", "./kubetest"] _, err, ret = utils.run_cmd(cmd, stderr=True, cwd="/tmp/test-infra") if ret != 0: self.logging.error( "Failed to get kubetest binary with error: %s" % err) raise Exception( "Failed to get kubetest binary with errorr: %s" % err) else: kubetestbin = "/usr/bin/kubetest" utils.download_file(self.opts.kubetest_link, kubetestbin) os.chmod(kubetestbin, stat.S_IRWXU | stat.S_IRWXG)
def add_bundle(ctx, remove_temp_dir, push, prune_after, bundle_path): config = ctx.obj['config'] author = git.Actor(config['git-name'], config['git-email']) channel = config['channel'] source_bundle = Bundle(bundle_path) with clone_repo(config, remove_temp_dir) as repo_dir: path = os.path.join(repo_dir, config['component']) catalog = Catalog(path, channel) if catalog.bundle_exists(source_bundle): click.echo("Bundle already exists", err=True) sys.exit(1) if not catalog.is_bundle_valid(source_bundle): click.echo("Invalid bundle", err=True) sys.exit(1) if prune_after: try: catalog.prune_after(prune_after) except PruneCSVNotFoundError: click.echo("Prune CSV not found", err=True) sys.exit(1) bundle = catalog.add_bundle(source_bundle) repo = git.Repo(repo_dir) repo.git.add(A=True) if len(repo.index.diff("HEAD")) == 0: click.echo("No changes detected", err=True) sys.exit(1) repo.index.commit("Adds {}".format(bundle.name), committer=author, author=author) if push: origin = repo.remote(name='origin') origin.push()
def _create_deployment(repo_url, branch): # create temp folder prefix = '{0}_'.format(time.strftime("%Y%m%d")) temp_build_folder = tempfile.mkdtemp(prefix=prefix) try: # clone git repo logger.info('Cloning repo..') clone_folder = os.path.join(temp_build_folder, 'repo') repo_path = utils.clone_repo(repo_url, destination=clone_folder, branch=branch) faaspot_folder = os.path.join(repo_path, FAASPOT_FOLDER) faaspot_config = os.path.join(faaspot_folder, 'faaspot.yml') logger.debug('Repo cloned to: {0}'.format(clone_folder)) # prepare deployment folder logger.debug('Creating deployment folder..') deployment_folder = os.path.join(temp_build_folder, 'deploy') utils.makedir(deployment_folder) logger.debug( 'Deployment folder created: {0}'.format(deployment_folder)) # copy modules from faaspot folder to the deployment folder logger.info('Copying config files into deployment folder..') utils.copy_files(faaspot_folder, deployment_folder) # build package into the deployment folder logger.info('Installing dependencies..') utils.install_libraries(repo_path, deployment_folder) # create a zip from the logger.info('Packaging it..') deployment_zip = os.path.join(temp_build_folder, 'deploy.zip') utils.zip_dir(deployment_folder, deployment_zip) logger.info('Zip file created: {0}'.format(deployment_zip)) yield Deployment(repo_path, faaspot_config, deployment_zip) finally: utils.remove_dir(temp_build_folder)
def __enter__(self): utils.clone_repo(self.url) return self
def start(update_type, token, no_push, doc_only, specified_crate, badges_only): write_msg('=> Creating temporary directory...') with TemporaryDirectory() as temp_dir: write_msg('Temporary directory created in "{}"'.format(temp_dir)) write_msg('=> Cloning the repositories...') repositories = [] for crate in consts.CRATE_LIST: if specified_crate is not None and crate[ 'crate'] != specified_crate: continue if crate["repository"] not in repositories: repositories.append(crate["repository"]) if clone_repo(crate["repository"], temp_dir) is False: write_error('Cannot clone the "{}" repository...'.format( crate["repository"])) return if len(repositories) < 1: write_msg( 'No crate "{}" found. Aborting...'.format(specified_crate)) return if doc_only is False: if clone_repo(consts.BLOG_REPO, temp_dir, depth=1) is False: write_error('Cannot clone the "{}" repository...'.format( consts.BLOG_REPO)) return if clone_repo(consts.DOC_REPO, temp_dir, depth=1) is False: write_error('Cannot clone the "{}" repository...'.format( consts.DOC_REPO)) return write_msg('Done!') if doc_only is False: write_msg('=> Updating [master] crates version...') for crate in consts.CRATE_LIST: if specified_crate is not None and crate[ 'crate'] != specified_crate: continue if update_repo_version(crate["repository"], crate["crate"], crate["path"], temp_dir, update_type, badges_only) is False: write_error( 'The update for the "{}" crate failed...'.format( crate["crate"])) return write_msg('Done!') if badges_only is False: write_msg('=> Committing{} to the "{}" branch...'.format( " and pushing" if no_push is False else "", consts.MASTER_TMP_BRANCH)) for repo in repositories: commit(repo, temp_dir, "Update versions [ci skip]") if no_push is False: push(repo, temp_dir, consts.MASTER_TMP_BRANCH) write_msg('Done!') if no_push is False: write_msg('=> Creating PRs on master branch...') for repo in repositories: create_pull_request(repo, consts.MASTER_TMP_BRANCH, "master", token) write_msg('Done!') write_msg('=> Building blog post...') build_blog_post(repositories, temp_dir, token) write_msg('Done!') write_msg('=> Checking out "crate" branches') for repo in repositories: checkout_target_branch(repo, temp_dir, "crate") write_msg('Done!') if doc_only is False and badges_only is False: write_msg('=> Merging "master" branches into "crate" branches...') for repo in repositories: merging_branches(repo, temp_dir, "master") write_msg('Done!') write_msg('=> Updating [crate] crates version...') for crate in consts.CRATE_LIST: if specified_crate is not None and crate[ 'crate'] != specified_crate: continue if update_crate_version(crate["repository"], crate["crate"], crate["path"], temp_dir, specified_crate) is False: write_error( 'The update for the "{}" crate failed...'.format( crate["crate"])) return write_msg('Done!') write_msg('=> Committing{} to the "{}" branch...'.format( " and pushing" if no_push is False else "", consts.CRATE_TMP_BRANCH)) for repo in repositories: commit(repo, temp_dir, "Update versions [ci skip]") if no_push is False: push(repo, temp_dir, consts.CRATE_TMP_BRANCH) write_msg('Done!') if no_push is False: write_msg('=> Creating PRs on crate branch...') for repo in repositories: create_pull_request(repo, consts.CRATE_TMP_BRANCH, "crate", token) write_msg('Done!') write_msg('+++++++++++++++') write_msg('++ IMPORTANT ++') write_msg('+++++++++++++++') write_msg( 'Almost everything has been done. Take a deep breath, check for opened ' 'pull requests and once done, we can move forward!') write_msg("\n{}\n".format('\n'.join(PULL_REQUESTS))) PULL_REQUESTS.append('=============') input('Press ENTER to continue...') write_msg('=> Publishing crates...') for crate in consts.CRATE_LIST: if specified_crate is not None and crate[ 'crate'] != specified_crate: continue publish_crate(crate["repository"], crate["path"], temp_dir, crate['crate']) write_msg('Done!') write_msg('=> Creating PR for examples repository') create_pull_request("examples", "pending", "master", token) write_msg('Done!') write_msg("=> Generating tags...") for repo in repositories: generate_new_tag(repo, temp_dir, specified_crate) write_msg('Done!') if badges_only is False: write_msg( '=> Preparing doc repo (too much dark magic in here urg)...') cleanup_doc_repo(temp_dir) write_msg('Done!') write_msg('=> Building docs...') for repo in repositories: if repo != "sys": # Maybe we should generate docs for sys crates as well? write_msg('-> Building docs for {}...'.format(repo)) build_docs(repo, temp_dir) end_docs_build(temp_dir) write_msg('Done!') write_msg('=> Committing{} docs to the "{}" branch...'.format( " and pushing" if no_push is False else "", consts.CRATE_TMP_BRANCH)) commit(consts.DOC_REPO, temp_dir, "Regen docs") if no_push is False: push(consts.DOC_REPO, temp_dir, consts.CRATE_TMP_BRANCH) create_pull_request(consts.DOC_REPO, consts.CRATE_TMP_BRANCH, "gh-pages", token) write_msg("New pull request(s):\n\n{}\n".format( '\n'.join(PULL_REQUESTS))) write_msg('Done!') if doc_only is False: write_msg('=> Updating blog...') if update_badges(consts.BLOG_REPO, temp_dir, specified_crate) is False: write_error("Error when trying to update badges...") elif no_push is False: commit_and_push(consts.BLOG_REPO, temp_dir, "Update versions", consts.MASTER_TMP_BRANCH) create_pull_request(consts.BLOG_REPO, consts.MASTER_TMP_BRANCH, "master", token) write_msg('Done!') write_msg('Seems like most things are done! Now remains:') write_msg( " * Check generated docs for all crates (don't forget to enable features!)." ) input( 'Press ENTER to leave (once done, the temporary directory "{}" will be destroyed)' .format(temp_dir))
def _build_sdn_binaries(self): sdn_path = utils.get_sdn_folder() utils.clone_repo(self.opts.sdn_repo, self.opts.sdn_branch, sdn_path) utils.build_sdn_binaries()
def _build_k8s_binaries(self): k8s_path = utils.get_k8s_folder() utils.clone_repo(self.opts.k8s_repo, self.opts.k8s_branch, k8s_path) utils.build_k8s_binaries()