def _clone_required_cookbooks(self):
     """ Clone the cookbooks required to install Tomcat """
     script = []
     # Tomcat
     script.append(git.clone_opscode_cookbook("java"))
     script.append(git.clone("git://github.com/abiquo/tomcat.git",
         "/var/chef/cookbooks/tomcat", "ajp"))
     # Monitoring
     script.append(git.clone_opscode_cookbook("build-essential"))
     script.append(git.clone_opscode_cookbook("apt"))
     script.append(git.clone_opscode_cookbook("xml"))
     script.append(git.clone_opscode_cookbook("mysql"))
     script.append(git.clone_opscode_cookbook("php"))
     script.append(git.clone_opscode_cookbook("python"))
     script.append(git.clone_opscode_cookbook("apache2"))
     script.append(
         git.clone("git://github.com/escapestudios/chef-newrelic.git",
         "/var/chef/cookbooks/newrelic"))
     script.append(
         git.clone("git://github.com/boundary/boundary_cookbooks.git",
         "/tmp/boundary"))
     # Use only the bprobe cookbook
     script.append(Statements.exec(
         "mv /tmp/boundary/bprobe /var/chef/cookbooks/"))
     return script
Example #2
0
	def __init__(self):
		self.log = log.Log('logs/oc-release.log')

		# Setup AMQP connection
		sslkey = os.path.normpath(os.path.join(os.getcwd(), 'keys/ockey.pem'))
		sslcert = os.path.normpath(os.path.join(os.getcwd(), 'keys/CIA-londeroth.org.pem'))

		amqp_params = pika.ConnectionParameters(
                        host='amqp.nosebud.de',
                        port=5671,
                        virtual_host='openclonk',
                        ssl=True,
                        ssl_options={'keyfile': sslkey, 'certfile': sslcert},
                        credentials=pika.credentials.ExternalCredentials(),
                        )
		self.amqp_connection = pika.BlockingConnection(amqp_params)

		if not os.path.exists('openclonk'):
			self.log.write('Openclonk Repository does not exist. Cloning...\n')
			git.clone('git://git.openclonk.org/openclonk')
			self.log.write('Repository created\n')

		os.chdir('openclonk')

		# Register triggers
		self.queue = notifyqueue.NotifyQueue()
		self.pushtrigger = pushtrigger.PushTrigger(self.amqp_connection, self.queue, self.log)
		self.xmltrigger = xmltrigger.XMLTrigger(self.amqp_connection, self.queue, self.log)
Example #3
0
def get_git(scheme,url,target,overwrite,tag):
    import git

    if os.path.exists(target + '/.git'):
        if not overwrite: return
    else:
        if len(scheme) == 1: giturl = url
        else: giturl = url[4:]
        git.clone(giturl,target)

    fs.goto(target)
    git.fetch()
    out = git.branch()
    for line in out.split('\n'):
        if not line: continue
        if line[0] != '*': continue
        out = line.split()[1]
        break
    #print out,tag
    if out != tag:
        lbranches,rbranches = git.branches()
        if tag in lbranches:
            git.checkout(tag)
        else:
            # git 1.5 does not put remotes/ like 1.6 does
            from exception import CommandFailure
            try:
                git.checkout('origin/'+tag,tag)
            except CommandFailure:
                git.checkout('remotes/origin/'+tag,tag)
    git.pull()
    fs.goback()
    return
Example #4
0
def prepare_scoreboard_repo(url):
    path = get_github_path(url).split('/')
    scoreboard_owner = path[0]
    scoreboard_name = path[1]
    scoreboard_dir = '.score'
    clone(scoreboard_owner, scoreboard_name, False, scoreboard_dir)
    return scoreboard_dir
Example #5
0
    def _prepare_output_git(self, branch, input_work_dir, output_work_dir,
                            output_reference):
        start_commit = self.output_branch_name(branch)
        new_branch = False
        try:
            git.clone(
                output_reference,
                output_work_dir,
                options=['--branch',
                         self.output_branch_name(branch), '-q'])
            git.set_origin_url(self._output_tree, output_work_dir)
            try:
                git.commit_env_vars(self.output_branch_name(branch),
                                    tree=output_work_dir)
            except git.GitError:
                shutil.rmtree(output_work_dir)
                self.debug('clone incorrect - new branch?')
                raise Exception('clone incorrect - new branch?')
        except:
            # assume error was due to branch not found
            new_branch = True
            start_commit = self._check_new_branch(branch, input_work_dir,
                                                  output_work_dir,
                                                  output_reference)
            git.reset(['--hard', start_commit], tree=output_work_dir)

        try:
            git.fetch('refs/notes/mirror:refs/notes/mirror',
                      tree=output_work_dir)
        except:
            pass

        return start_commit, new_branch
Example #6
0
def start_get_hash(config, github, config_file):
    repo_owner = config['repo_owner']
    for team in config['teams']:
        repo_name = config['teams'][team]['repo_name']
        if repo_name == '-':
            continue

        print('[*] Get the commit hash of %s repo.' % repo_name)
        bug_branches = config['teams'][team]['bug_branches']
        clone(repo_owner, repo_name)
        branches = bug_branches if len(bug_branches) > 0 \
            else list_branches(repo_name)
        if "master" in branches:
            branches.remove("master") # Do not consider master branch
        for branch in branches:
            checkout(repo_name, branch)
            hash = get_latest_commit_hash(repo_name, int(time.time()), branch)
            config['teams'][team][branch] = hash
        rmdir(repo_name)

    with open(config_file, 'w') as outfile:
        json.dump(config, outfile, indent=4)

    print ('[*] Successfully write in %s' % config_file)

    return
Example #7
0
    def mirror(self):
        """
        Start the mirroring process according to the configuration.
        """
        cwd = os.getcwd()
        result = True
        with tempdir.tempdir() as tmpdir:
            input_work_dir = os.path.join(tmpdir, 'input')

            git.clone(self._input_tree,
                      input_work_dir,
                      options=self.reference + ['-q'])
            git.remote_update(gitdir=input_work_dir)
            git.submodule(['--quiet', 'init'], tree=input_work_dir)
            git.submodule(['--quiet', 'sync'], tree=input_work_dir)
            git.submodule(['--quiet', 'update'], tree=input_work_dir)

            if self.output_tree_pull:
                output_reference = self.output_tree_pull
            else:
                output_reference = os.path.join(tmpdir, 'outref')
                git.clone(self._output_tree,
                          output_reference,
                          options=['-q', '--mirror'])

            _branches = self._glob_branches(input_work_dir)

            for branch in _branches:
                try:
                    if not self._mirror_one(branch, input_work_dir,
                                            output_reference):
                        result = False
                finally:
                    os.chdir(cwd)
        return result
Example #8
0
def clone_or_pull_repo(k, repo_url, b_updte_repo, b_tag_after_update=True):
    # initialize repository info
    repo = {
        'url': repo_url,
        'name': repo_path.get_repo_name_from_url(repo_url),  # repository name
    }

    # get project path
    repo_path_in_section = repo_path.get_local_repo_path(repo['name'])

    repo['path'] = os.path.abspath(repo_path_in_section)

    # just in case
    dir_backup = os.getcwd()

    # even if b_updte_repo is False,
    # if not cloned yet, do it now.
    if not os.path.exists(repo_path_in_section):
        print('clone_or_pull_repo(%2d) : clone %s' % (k, repo['url']))
        git.clone(repo['url'], id=config['Admin']['id'])
    else:
        if b_updte_repo:
            print('clone_or_pull_repo(%2d) : pull %s' % (k, repo['url']))
            fetch_and_reset(repo_path_in_section)

    # tag with time stamp after clone or pull
    tag_all_remote_branches(b_tag_after_update,
                            os.path.abspath(repo_path_in_section), repo)

    # just in case
    os.chdir(dir_backup)

    return repo
def verify_injection(team, config_file):
    config = load_config(config_file)
    timeout = config["exploit_timeout"]["injection_phase"]
    repo_owner = config['repo_owner']
    repo_name = config['teams'][team]['repo_name']
    bug_branches = config['teams'][team]['bug_branches']
    clone(repo_owner, repo_name)
    branches = bug_branches if len(bug_branches) > 0 \
        else list_branches(repo_name)
    if "master" in branches:
        branches.remove("master")  # master branch is not verification target

    for branch in branches:
        checkout(repo_name, branch)
        exploit_dir = get_exploit_dir(repo_name, branch, config, team)
        bug_branch_result, _ = \
            verify_exploit(exploit_dir, repo_name, branch, timeout, config)

        checkout(repo_name, "master")
        master_result, _ = \
            verify_exploit(exploit_dir, repo_name, "master", timeout, config)

        rmdir(exploit_dir)

        if master_result == False and bug_branch_result == True:
            print('[*] Successflly verified branch "%s".' % branch)
        elif bug_branch_result == True:
            print ('[*] Exploit for branch "%s" works, but it also works on ' \
                   'master branch, which indicates some error.' %  branch)
            sys.exit()
        else:
            print('[*] Failed to verify exploit in branch "%s".' % branch)
            sys.exit()

    rmdir(repo_name)
Example #10
0
def get_next_commit(last_commit, defender, branch, config):
    repo_name = config['teams'][defender]['repo_name']
    rmdir(repo_name)
    clone(config['repo_owner'], repo_name)
    next_commit_hash = get_next_commit_hash(repo_name, branch, last_commit)
    rmdir(repo_name)
    print next_commit_hash
    if next_commit_hash == '':
        return None
    else:
        return next_commit_hash
Example #11
0
def verify_service(team, branch, service_port, host_port, config_file):
    config = load_config(config_file)
    repo_owner = config['repo_owner']
    repo_name = config['teams'][team]['repo_name']
    container_name = "%s-%s" % (repo_name, branch)
    clone(repo_owner, repo_name)
    docker_cleanup(container_name)
    checkout(repo_name, branch)
    setup(repo_name, container_name, int(service_port), int(host_port))
    check_liveness(container_name, int(host_port))
    docker_cleanup(container_name)
    rmdir(repo_name)
    sys.exit()
Example #12
0
def build_repo(repository, ref, docker_repo, docker_tag, namespace, push, registry):
    docker_repo = '{0}/{1}'.format(namespace or 'library', docker_repo)
    img_id = None
    if '{0}@{1}'.format(repository, ref) not in processed.keys():
        logger.info('Cloning {0} (ref: {1})'.format(repository, ref))
        dst_folder = git.clone(repository, ref)
        if not 'Dockerfile' in os.listdir(dst_folder):
            raise RuntimeError('Dockerfile not found in cloned repository')
        logger.info('Building using dockerfile...')
        img_id, logs = client.build(path=dst_folder, quiet=True)
        rmtree(dst_folder, True)
    else:
        img_id = processed['{0}@{1}'.format(repository, ref)]
    logger.info('Committing to {0}:{1}'.format(docker_repo,
        docker_tag or 'latest'))
    client.tag(img_id, docker_repo, docker_tag)
    if push:
        logger.info('Pushing result to registry {0}'.format(
            registry or "default"))
        if registry is not None:
            docker_repo = '{0}/{1}'.format(registry, docker_repo)
            logger.info('Also tagging {0}'.format(docker_repo))
            client.tag(img_id, docker_repo, docker_tag)
        client.push(docker_repo)
    return img_id
Example #13
0
def build_repo(repository, ref, docker_repo, docker_tag, namespace, push, registry):
    docker_repo = '{0}/{1}'.format(namespace or 'library', docker_repo)
    img_id = None
    dst_folder = None
    if '{0}@{1}'.format(repository, ref) not in processed.keys():
        logger.info('Cloning {0} (ref: {1})'.format(repository, ref))
        if repository not in processed:
            rep, dst_folder = git.clone(repository, ref)
            processed[repository] = rep
            processed_folders.append(dst_folder)
        else:
            dst_folder = git.checkout(processed[repository], ref)
        if not 'Dockerfile' in os.listdir(dst_folder):
            raise RuntimeError('Dockerfile not found in cloned repository')
        logger.info('Building using dockerfile...')
        img_id, logs = client.build(path=dst_folder, quiet=True)
    else:
        img_id = processed['{0}@{1}'.format(repository, ref)]
    logger.info('Committing to {0}:{1}'.format(docker_repo,
        docker_tag or 'latest'))
    client.tag(img_id, docker_repo, docker_tag)
    if push:
        logger.info('Pushing result to registry {0}'.format(
            registry or "default"))
        if registry is not None:
            docker_repo = '{0}/{1}'.format(registry, docker_repo)
            logger.info('Also tagging {0}'.format(docker_repo))
            client.tag(img_id, docker_repo, docker_tag)
        client.push(docker_repo)
    return img_id
Example #14
0
 def testCloneModifyPushToBranch(self):
     new_repo = clone(self.repo, utils.get_temporary_location())
     branch = new_repo.createBranch('work')
     new_repo.checkout(branch)
     utils.commit_change(new_repo)
     new_repo.push(self.repo, fromBranch=branch, toBranch='work')
     self.assertTrue(self.repo.getBranchByName('work').getHead() == new_repo.getHead())
Example #15
0
def api():
    """Setups a project database using django on the remote server."""
    wsgi_file = os.path.join(env.config.ApiServer.document_root, "django.wsgi")
    repo = "[email protected]:klooff/klooff-server.git"

    if not files.exists(wsgi_file):
        print "Can't file the project code at %s." % env.config.ApiServer.document_root
        query = "Do you want to clone it from github?"
        if prompt(query):
            git.clone(repo, env.config.ApiServer.document_root, env.config.branch)
            run("chmod 777 %s" % os.path.join(env.config.ApiServer.document_root, "media", "user_uploads"))
        else:
            print "Aborting."
            sys.exit(1)

    virtualenv()
    project.configure_api()
Example #16
0
def build_repo(repository, ref, docker_repo, docker_tag, namespace, push,
               registry, repos_folder, logger):
    ''' Builds one line of a library file.
        repository:     URL of the git repository that needs to be built
        ref:            Git reference (or commit ID) that needs to be built
        docker_repo:    Name of the docker repository where the image will
                        end up.
        docker_tag:     Tag for the image in the docker repository.
        namespace:      Namespace for the docker repository.
        push:           If the image should be pushed at the end of the build
        registry:       URL to private registry where image should be pushed
        repos_folder:   Directory where repositories should be cloned
        logger:         Logger instance
    '''
    dst_folder = None
    img_id = None
    commit_id = None
    if repos_folder:
        # Repositories are stored in a fixed location and can be reused
        dst_folder = os.path.join(repos_folder, docker_repo + _random_suffix())
    docker_repo = '{0}/{1}'.format(namespace or 'library', docker_repo)

    if '{0}@{1}'.format(repository, ref) not in processed.keys():
        # Not already built
        rep = None
        logger.info('Cloning {0} (ref: {1})'.format(repository, ref))
        if repository not in processed:  # Repository not cloned yet
            rep, dst_folder = git.clone(repository, ref, dst_folder)
            processed[repository] = rep
            processed_folders.append(dst_folder)
        else:
            rep = processed[repository]
            if ref in rep.refs:
                # The ref already exists, we just need to checkout
                dst_folder = git.checkout(rep, ref)
            else:  # ref is not present, try pulling it from the remote origin
                rep, dst_folder = git.pull(repository, rep, ref)
        if not 'Dockerfile' in os.listdir(dst_folder):
            raise RuntimeError('Dockerfile not found in cloned repository')
        commit_id = rep.head()
        logger.info('Building using dockerfile...')
        img_id, logs = client.build(path=dst_folder, quiet=True)
    else:
        logger.info('This ref has already been built, reusing image ID')
        img_id = processed['{0}@{1}'.format(repository, ref)]
        if ref.startswith('refs/'):
            commit_id = processed[repository].ref(ref)
        else:
            commit_id = ref
    logger.info('Committing to {0}:{1}'.format(docker_repo,
                docker_tag or 'latest'))
    client.tag(img_id, docker_repo, docker_tag)
    if push:
        logger.info('Pushing result to registry {0}'.format(
            registry or "default"))
        push_repo(img_id, docker_repo, registry=registry, logger=logger)
    return img_id, commit_id
Example #17
0
    def _git_clone(self,url,submodules=False):
        import fs,git
        target = fs.projects()+'/gaudi'
        if os.path.exists(target):
            log.info('Directory already exists, skipping clone to: %s'%target)
            return

        fs.goto(fs.projects(),True)
        git.clone(url,'gaudi')
        fs.goback()

        if submodules:
            fs.goto(os.path.join(fs.projects(),'gaudi'),True)
            git.submodule('init')
            git.submodule('update')
            fs.goback()

        return
Example #18
0
def build_repo(repository, ref, docker_repo, docker_tag, namespace, push,
               registry, repos_folder, logger):
    ''' Builds one line of a library file.
        repository:     URL of the git repository that needs to be built
        ref:            Git reference (or commit ID) that needs to be built
        docker_repo:    Name of the docker repository where the image will
                        end up.
        docker_tag:     Tag for the image in the docker repository.
        namespace:      Namespace for the docker repository.
        push:           If the image should be pushed at the end of the build
        registry:       URL to private registry where image should be pushed
        repos_folder:   Directory where repositories should be cloned
        logger:         Logger instance
    '''
    dst_folder = None
    img_id = None
    commit_id = None
    if repos_folder:
        # Repositories are stored in a fixed location and can be reused
        dst_folder = os.path.join(repos_folder, docker_repo + _random_suffix())
    docker_repo = '{0}/{1}'.format(namespace or 'library', docker_repo)

    if '{0}@{1}'.format(repository, ref) not in processed.keys():
        # Not already built
        rep = None
        logger.info('Cloning {0} (ref: {1})'.format(repository, ref))
        if repository not in processed:  # Repository not cloned yet
            rep, dst_folder = git.clone(repository, ref, dst_folder)
            processed[repository] = rep
            processed_folders.append(dst_folder)
        else:
            rep = processed[repository]
            if ref in rep.refs:
                # The ref already exists, we just need to checkout
                dst_folder = git.checkout(rep, ref)
            else:  # ref is not present, try pulling it from the remote origin
                rep, dst_folder = git.pull(repository, rep, ref)
        if not 'Dockerfile' in os.listdir(dst_folder):
            raise RuntimeError('Dockerfile not found in cloned repository')
        commit_id = rep.head()
        logger.info('Building using dockerfile...')
        img_id, logs = client.build(path=dst_folder, quiet=True)
    else:
        logger.info('This ref has already been built, reusing image ID')
        img_id = processed['{0}@{1}'.format(repository, ref)]
        if ref.startswith('refs/'):
            commit_id = processed[repository].ref(ref)
        else:
            commit_id = ref
    logger.info('Committing to {0}:{1}'.format(docker_repo, docker_tag
                                               or 'latest'))
    client.tag(img_id, docker_repo, docker_tag)
    if push:
        logger.info('Pushing result to registry {0}'.format(registry
                                                            or "default"))
        push_repo(img_id, docker_repo, registry=registry, logger=logger)
    return img_id, commit_id
Example #19
0
def clone(git_params,
          repo_path,
          logfile,
          workDir=None,
          clone_once=False,
          **kwargs):

    git = Git(git_params, repo_path, clone_once, logfile)

    git.clone(**kwargs)

    if git.repo_cloned:

        git.checkout()

        git.apply_patch(workDir)

        git.log()
Example #20
0
def api():
    """Setups a project database using django on the remote server."""
    wsgi_file = os.path.join(env.config.ApiServer.document_root, "django.wsgi")
    repo = "[email protected]:klooff/klooff-server.git"

    if not files.exists(wsgi_file):
        print "Can't file the project code at %s." % env.config.ApiServer.document_root
        query = "Do you want to clone it from github?"
        if prompt(query):
            git.clone(repo, env.config.ApiServer.document_root,
                      env.config.branch)
            run("chmod 777 %s" % os.path.join(
                env.config.ApiServer.document_root, "media", "user_uploads"))
        else:
            print "Aborting."
            sys.exit(1)

    virtualenv()
    project.configure_api()
Example #21
0
 def testCloneModifyPushToBranch(self):
     new_repo = clone(self.repo, utils.get_temporary_location())
     prev_branch = self.repo.getCurrentBranch()
     self.repo.checkout(self.repo.createBranch('temp'))
     branch = new_repo.createBranch('work')
     new_repo.checkout(branch)
     utils.commit_change(new_repo)
     new_repo.push(self.repo, fromBranch=branch, toBranch='work')
     self.repo.checkout(prev_branch)
     self.assertTrue(self.repo.getBranchByName('work').getHead() == new_repo.getHead())
Example #22
0
    def _clone_or_checkout(self, url, ref, dst_folder, rep):
        if rep:
            try:
                # The ref already exists, we just need to checkout
                dst_folder = git.checkout(rep, ref)
            except git.GitException:
                # ref is not present, try pulling it from the remote origin
                rep, dst_folder = git.pull(url, rep, ref)
            return rep, dst_folder

        if dst_folder:
            rmtree(dst_folder)
        return git.clone(url, ref, dst_folder)
Example #23
0
    def _clone_or_checkout(self, url, ref, dst_folder, rep):
        if rep:
            try:
                # The ref already exists, we just need to checkout
                dst_folder = git.checkout(rep, ref)
            except git.GitException:
                # ref is not present, try pulling it from the remote origin
                rep, dst_folder = git.pull(url, rep, ref)
            return rep, dst_folder

        if dst_folder:
            rmtree(dst_folder)
        return git.clone(url, ref, dst_folder)
Example #24
0
    def _check_new_branch(self, branch, input_work_dir, output_work_dir,
                          output_reference):
        # try to find merge-base in input tree,
        # assumes master branch is always mirrored
        #
        # this handles the case of having created a new branch,
        # and asking for that to be mirrored into the prune tree.
        base_id = git.merge_base('origin/' + self.master,
                                 'origin/' + branch,
                                 tree=input_work_dir)
        git.clone(output_reference, output_work_dir, options=['-q'])
        git.set_origin_url(self._output_tree, output_work_dir)

        # try to find the merge-base or its parent/grandparent/... in the
        # output tree - since it should have been branched from the master
        # (or in any case we look at the merge-base between master and it)
        # this should exist - HOWEVER: some commits might not be mirrored,
        # so look for the *parent [with a reasonable limit]
        for offset in range(0, self.max_parents):
            search = git.rev_parse(rev='%s~%d' % (base_id, offset),
                                   tree=input_work_dir)
            self.debug('search for %s~%d=%s:' % (base_id, offset, search))
            grep = '%s: %s' % (self._commit_id_prefix, search)
            out_commits = git.log(options=[
                '--grep', grep, '--format=format:%H',
                'origin/' + self.output_branch_name(self.master)
            ],
                                  tree=output_work_dir)
            out_commits = out_commits.split()
            if not out_commits or len(out_commits) > 1:
                self.debug('not found')
                continue
            start_commit = out_commits[0]
            self.debug('found at %s' % start_commit)
            return start_commit
        raise Exception('failed to find parent/grandparent/...')
def checkout(llvm_commit, clang_commit, compilerrt_commit):
    cwd = os.path.abspath(os.getcwd())

    if not os.path.exists('llvm'):
        git.clone(llvm_git_repo, ['-n'])
    os.chdir('llvm')
    git.checkout(llvm_commit)

    os.chdir(os.path.join(cwd, 'llvm', 'tools'))
    if not os.path.exists('clang'):
        git.clone(clang_git_repo, ['-n'])
    os.chdir('clang')
    git.checkout(clang_commit)

    os.chdir(os.path.join(cwd, 'llvm', 'projects'))
    if not os.path.exists('compiler-rt'):
        git.clone(compilerrt_git_repo, ['-n'])
    os.chdir('compiler-rt')
    git.checkout(compilerrt_commit)

    os.chdir(cwd)
Example #26
0
    def _check_new_branch(self, branch, input_work_dir, output_work_dir,
                          output_reference):
        # This handles the case of having created a new branch,
        # and asking for that to be mirrored into the prune tree.
        #
        # Try to find a starting point in the input tree. We assume
        # this will basically always succeed, since the master branch
        # is always mirrored, we _should_ find something (but can fail
        # if somebody created a branch without a merge-base.)

        # unfortunately we now need to do this first, to sort out which
        # branches we already know in the output
        git.clone(output_reference, output_work_dir, options=['-q'])

        self.debug("trying to find starting point for new branch %s" % branch)
        candidate_branches = []
        for other in self._branches:
            if other == branch:
                continue
            out_branch = 'origin/' + self.output_branch_name(other)
            try:
                git.rev_parse(out_branch, tree=output_work_dir)
            except:
                self.debug("    branch %s doesn't exist in output %s (yet)" %
                           (out_branch, output_work_dir))
                continue
            candidate_branches.append(other)

        potential_merge_bases = []
        for other in candidate_branches:
            try:
                base = git.merge_base('origin/' + other,
                                      'origin/' + branch,
                                      tree=input_work_dir)
                potential_merge_bases.append(base)
                self.debug("    base to %s is %s" % (other, base))
            except git.GitError:
                self.debug("    no base to %s" % (other, ))
        bases = git.independent_commits(potential_merge_bases,
                                        tree=input_work_dir)
        self.debug("found starting points %s" % (", ".join(bases)))
        assert len(bases) == 1, "No single merge base found: %r" % bases
        base_id = bases[0]

        base_branch = None
        for other in candidate_branches:
            if git.merge_base('origin/' + other, base_id,
                              tree=input_work_dir) == base_id:
                base_branch = 'origin/' + self.output_branch_name(other)
                break
        assert base_branch, "This shouldn't happen, found no base branch?!"

        # try to find the merge-base or its parent/grandparent/... in the
        # output tree - since it should have been branched from the master
        # (or in any case we look at the merge-base between master and it)
        # this should exist - HOWEVER: some commits might not be mirrored,
        # so look for the *parent [with a reasonable limit]
        for offset in range(0, self.max_parents):
            search = git.rev_parse(rev='%s~%d' % (base_id, offset),
                                   tree=input_work_dir)
            self.debug('search for %s~%d=%s:' % (base_id, offset, search))
            grep = '%s: %s' % (self._commit_id_prefix, search)
            out_commits = git.log(
                options=['--grep', grep, '--format=format:%H', base_branch],
                tree=output_work_dir)
            out_commits = out_commits.split()
            if not out_commits or len(out_commits) > 1:
                self.debug('not found')
                continue
            start_commit = out_commits[0]
            self.debug('found at %s' % start_commit)
            return start_commit
        raise Exception('failed to find parent/grandparent/...')
Example #27
0
def install(idp_fqdn=FQDN):
    """
    Installe un serveur d'indentité disponible à *idp_fqdn*.
    """
    # DEPS
    lasso.install()
    mysql.install()
    ssl.install()
    git.install()
    apache.install()
    apache.add_mod_rewrite()
    apache.add_mod_ssl()
    apache.add_mod_wsgi()
    venv.install_virtualenv()
    runcmd('apt-get install -y python-ldap')
    runcmd('apt-get install -y python-mysqldb --force-yes')

    # DB
    db_name = idp_fqdn.replace('.', '_')
    mysql.setup_db(db_name)

    # WEB
    ssl.create_certificats(idp_fqdn)
    extra = {'venv': venv.get_path(idp_fqdn), }
    apache.add_vhost(idp_fqdn, 'idp_vhost.txt', extra)

    # SOURCE
    git.clone('git://git.auf.org/authentic2', idp_fqdn)
    git.checkout(idp_fqdn, 'master')

    # VIRTUALENV
    venv.mkenv(idp_fqdn)
    bin_pip = venv.get_bin_pip(idp_fqdn)
    with cd(git.home(idp_fqdn)):
        git.sudo("%s install -r requirements.txt" % bin_pip)
        git.sudo("%s install django-auth-ldap" % bin_pip)

    # WSGI
    data = {
        'project_path': git.home(idp_fqdn),
        'venv': venv.get_path(idp_fqdn),
        }
    filename = os.path.join(TPL_DIR, 'idp_wsgi.txt')
    destination = os.path.join(venv.get_bin_path(idp_fqdn), 'idp_wsgi.py')
    upload_template(
        filename,
        destination,
        context=data,
        use_sudo=True,)
    runcmd('chown %s:%s %s' % (git.GIT_USER, git.GIT_GROUP, destination,))
    runcmd('chmod 644 %s' % (destination, ))

    # LOG file
    log_file = os.path.join(git.home(idp_fqdn), 'log.log')
    runcmd('touch %s' % log_file)
    runcmd('chmod g+w %s' % log_file)
    runcmd('chown %s:www-data %s' % (git.GIT_USER, log_file))

    # CONF
    data.update({
        'db_name': db_name,
        'db_user': db_name,
        'db_password': db_name,
        })
    filename = os.path.join(TPL_DIR, 'idp_local_settings.txt')
    destination = os.path.join(
        git.home(idp_fqdn),
        'aufcustom',
        'local_settings.py')
    upload_template(
        filename,
        destination,
        context=data,
        use_sudo=True,)
    runcmd('chown %s:%s %s' % (git.GIT_USER, git.GIT_GROUP, destination,))

    # manage.py
    data.update({
        'venv': venv.get_path(idp_fqdn),
        })
    filename = os.path.join(TPL_DIR, 'idp_manage.txt')
    destination = os.path.join(
        git.home(idp_fqdn),
        'manage.py')
    upload_template(
        filename,
        destination,
        context=data,
        use_sudo=True,)
    runcmd('chown %s:%s %s' % (git.GIT_USER, git.GIT_GROUP, destination,))
    runcmd('chmod +x %s' % (destination,))
    git.sudo('%s syncdb --migrate --noinput' % (destination,))
    git.sudo('%s collectstatic --noinput' % (destination,))

    apache.restart()
Example #28
0
                logging.warning(traceback.format_exc())
            finally:
                heapq.heappush(h, (time.time() + tasks[task], task))
                if TRACE:
                    print int(time.time()), " add task again: ", int(time.time()) + tasks[task], task
        else:
            # if TRACE:
            #    print time.time(), " miss task: ", exec_time, task
            heapq.heappush(h, (exec_time, task))
        time.sleep(1)


if __name__ == "__main__":
    init_db()
    logging.info("Please wait: cloning %s to %s ..." % (config.GIT_REMOTE_PATH, config.GIT_WORK_DIR))
    git.clone()
    git_update_remote_heads()
    logging.info("Please wait: Initial branch merging ...")
    git_merge_updated(limit=5)
    logging.info("Please wait: Initial jira task information upload ...")
    jira_get_statuses_resolutions_priorities()
    jira_update_new()
    loop = threading.Thread(target=main_loop)
    loop.start()
    web_.init_web(dbcon, jira_priority_map)


#            import utils.memory
#            print utils.memory.stacksize()
#            print utils.memory.memory()
#            print utils.memory.resident()
Example #29
0
def verify_issue(defender,
                 repo_name,
                 issue_no,
                 config,
                 github,
                 target_commit=None):
    timeout = config["exploit_timeout"]["exercise_phase"]
    repo_owner = config['repo_owner']
    title, submitter, create_time, content = \
        get_github_issue(repo_owner, repo_name, issue_no, github)

    # Issue convention: "exploit-[branch_name]"
    target_branch = title[8:]

    clone(repo_owner, repo_name)

    # Write the fetched issue content to temp file
    tmpfile = "/tmp/gitctf_%s.issue" % random_string(6)
    tmpdir = "/tmp/gitctf_%s.dir" % random_string(6)

    with open(tmpfile, "w") as f:
        f.write(content)

    # Decrypt the exploit
    mkdir(tmpdir)

    team = defender
    decrypt_exploit(tmpfile, config, team, tmpdir, submitter)
    rmfile(tmpfile)

    # Now iterate through branches and verify exploit
    # zchn: not sure about this, was: branches = list_branches(repo_name)
    bug_branches = config['teams'][team]['bug_branches']
    branches = bug_branches + ['master'] if len(bug_branches) > 0 \
        else list_branches(repo_name)

    candidates = []
    if (target_branch in branches) and (target_commit is None):
        # Iterate through branches and collect candidates
        commit = get_latest_commit_hash(repo_name, create_time, target_branch)
        candidates.append((target_branch, commit))

    verified_branch = None
    verified_commit = None

    log = 'About %s (exploit-service branch)\n' % title

    for (branch, commit) in candidates:
        if branch in title:
            result, log = verify_exploit(tmpdir, repo_name, commit, timeout, \
                    config, log=log)
        else:
            result, _ = verify_exploit(tmpdir, repo_name, commit, timeout, \
                    config)

        if result:
            verified_branch = branch
            verified_commit = commit
            break

    rmdir(tmpdir)
    rmdir(repo_name)

    if verified_branch is None:
        print("[*] The exploit did not work against branch '%s'" % \
                target_branch)
    else:
        print("[*] The exploit has been verified against branch '%s'" %
              verified_branch)

    return (verified_branch, verified_commit, submitter, log)
Example #30
0
import os

from git import clone
from prompt import interact
from render import render_all
from helper import schema_path

template = 'https://github.com/deepankarm/pod-template.git'


if __name__ == '__main__':
    try:
        repo_dir = clone(url=template)
        schema = schema_path(repo_dir=repo_dir)
        user_context = interact(filepath=schema)
        if not user_context:
            print('Something wrong')
        render_all(context=user_context,
                   project_directory=os.path.join(repo_dir, user_context['project_dir']))
    except KeyboardInterrupt:
        print('\nUser interrupted. Nothing to do. Bye!')
Example #31
0

#提醒功能
def alert(result):
    print(result)
    '''
		可以在下面添加发邮件提醒功能
		或通过其他通讯工具发送提醒的功能
	'''


#开始定时器任务
def start_scheduler(seconds):
    scheduler = BlockingScheduler()
    print('Croner is working. Press Ctrl+{0} to exit'.format(
        'c/Enter' if os.name == 'nt' else 'C'))
    test_task()
    scheduler.add_job(test_task,
                      'interval',
                      seconds=int(seconds),
                      id='test_job')
    try:
        scheduler.start()
    except (KeyboardInterrupt, SystemExit):
        scheduler.shutdown()


if __name__ == '__main__':
    git.clone()
    start_scheduler(sys.argv[1])
Example #32
0
    def _sync(self, remote):
        """
        Pushes all normal branches from a source repo to gerrit.

        @param remote - gerrit.Remote object

        """
        # Only sync if source repo is provided.
        if not self.source:
            return

        # Only sync if heads and/or tags are specified
        if not self.heads and not self.tags:
            return

        msg = "Project %s: syncing with repo %s." % (self.name, self.source)
        logger.info(msg)
        print msg

        repo_dir = '~/tmp'
        repo_dir = os.path.expanduser(repo_dir)
        repo_dir = os.path.abspath(repo_dir)

        # Make Empty directory - We want this to stop and fail on OSError
        if not os.path.isdir(repo_dir):
            os.makedirs(repo_dir)
            logger.debug("Project %s: Created directory %s" %
                         (self.name, repo_dir))

        # Save the current working directory
        old_cwd = os.getcwd()

        try:
            # Change cwd to that repo
            os.chdir(repo_dir)

            uuid_dir = str(uuid4())
            repo_dir = os.path.join(repo_dir, uuid_dir)

            # Do a git clone --bare <source_repo>
            git.clone(self.source, name=uuid_dir, bare=True)

            # Change to bare cloned directory
            os.chdir(uuid_dir)

            # Add remote named gerrit
            ssh_url = 'ssh://%s@%s:%s/%s' % (remote.username, remote.host,
                                             remote.port, self.name)
            git.add_remote('gerrit', ssh_url)

            # Push heads
            if self.heads:
                kwargs = {'all_': True}
                if self.force:
                    kwargs['force'] = True
                git.push('gerrit', **kwargs)

            # Push tags
            if self.tags:
                kwargs = {'tags': True}
                if self.force:
                    kwargs['force'] = True
                git.push('gerrit', **kwargs)

            ref_kwargs = self.ref_kwargs()

            # Grab origin refs
            origin_refset = git.remote_refs('origin', **ref_kwargs)

            # Grab gerrit refs
            gerrit_refset = git.remote_refs('gerrit', **ref_kwargs)

            # Find refs that should be removed.
            prune_refset = gerrit_refset - origin_refset
            if self.preserve_prefix:
                msg = "Project %s: Preserving refs with prefixes of %s" \
                      % (self.name, self.preserve_prefix)
                logger.debug(msg)
                print msg
                heads_prefix = "refs/heads/%s" % self.preserve_prefix
                tags_prefix = "refs/tags/%s" % self.preserve_prefix
                keep = lambda ref: not ref.startswith(heads_prefix) and \
                    not ref.startswith(tags_prefix)
                prune_refset = filter(keep, prune_refset)

            # Prefix each ref in refset with ':' to delete
            colonize = lambda ref: ':%s' % ref
            prune_refset = map(colonize, prune_refset)

            # Remove branches no longer needed
            if prune_refset:
                git.push('gerrit', refspecs=prune_refset)

        finally:
            # Change to old current working directory
            os.chdir(old_cwd)

            # Attempt to clean up created directory
            shutil.rmtree(repo_dir)
    def _sync(self, remote):
        """
        Pushes all normal branches from a source repo to gerrit.

        @param remote - gerrit.Remote object

        """
        # Only sync if source repo is provided.
        if not self.source:
            return

        # Only sync if heads and/or tags are specified
        if not self.heads and not self.tags:
            return

        msg = "Project %s: syncing with repo %s." % (self.name, self.source)
        logger.info(msg)
        print msg

        repo_dir = '~/tmp'
        repo_dir = os.path.expanduser(repo_dir)
        repo_dir = os.path.abspath(repo_dir)

        # Make Empty directory - We want this to stop and fail on OSError
        if not os.path.isdir(repo_dir):
            os.makedirs(repo_dir)
            logger.debug(
                "Project %s: Created directory %s" % (self.name, repo_dir)
            )

        # Save the current working directory
        old_cwd = os.getcwd()

        try:
            # Change cwd to that repo
            os.chdir(repo_dir)

            uuid_dir = str(uuid4())
            repo_dir = os.path.join(repo_dir, uuid_dir)

            # Do a git clone --bare <source_repo>
            git.clone(self.source, name=uuid_dir, bare=True)

            # Change to bare cloned directory
            os.chdir(uuid_dir)

            # Add remote named gerrit
            ssh_url = 'ssh://%s@%s:%s/%s' % (
                remote.username,
                remote.host,
                remote.port,
                self.name
            )
            git.add_remote('gerrit', ssh_url)

            # Push heads
            if self.heads:
                kwargs = {'all_': True}
                if self.force:
                    kwargs['force'] = True
                git.push('gerrit', **kwargs)

            # Push tags
            if self.tags:
                kwargs = {'tags': True}
                if self.force:
                    kwargs['force'] = True
                git.push('gerrit', **kwargs)

            ref_kwargs = self.ref_kwargs()

            # Grab origin refs
            origin_refset = git.remote_refs('origin', **ref_kwargs)

            # Grab gerrit refs
            gerrit_refset = git.remote_refs('gerrit', **ref_kwargs)

            # Find refs that should be removed.
            prune_refset = gerrit_refset - origin_refset
            if self.preserve_prefix == PRESERVE_ALL_BRANCHES:
                msg = "Project %s: Preserving all refs" % self.name
                logger.debug(msg)
                print msg
                prune_refset = set([])
            elif not self.preserve_prefix is None:
                msg = "Project %s: Preserving refs with prefixes of %s" \
                      % (self.name, self.preserve_prefix)
                logger.debug(msg)
                print msg
                heads_prefix = "refs/heads/%s" % self.preserve_prefix
                tags_prefix = "refs/tags/%s" % self.preserve_prefix
                keep = lambda ref: not ref.startswith(heads_prefix) and \
                    not ref.startswith(tags_prefix)
                prune_refset = filter(keep, prune_refset)

            # Prefix each ref in refset with ':' to delete
            colonize = lambda ref: ':%s' % ref
            prune_refset = map(colonize, prune_refset)

            # Remove branches no longer needed
            if prune_refset:
                git.push('gerrit', refspecs=prune_refset)

        finally:
            # Change to old current working directory
            os.chdir(old_cwd)

            # Attempt to clean up created directory
            shutil.rmtree(repo_dir)
Example #34
0
from apps import app
from git import verify_new_branch, clone, push, merge
from k8s import apply_cluster, verify_existed
from efs import efs_create_directory
import sys

emissor = sys.argv[1]
ambiente = sys.argv[2]
new_branch = sys.argv[3]


def create_object(emissor, ambiente, new_branch):
    app_object = app(name_app = emissor, ambiente = ambiente, new_branch = new_branch)
    return app_object

if __name__ == "__main__":
    verify_new_branch(new_branch, ambiente)
    clone(new_branch, ambiente)
    app_object = create_object(emissor, ambiente, new_branch)
    NEW_COMMIT = verify_existed(app_object.name_deployment, app_object.name_ingress, app_object.name_app, app_object.ambiente)
    NEW_PUSH = push(NEW_COMMIT, app_object.ambiente, app_object.new_branch)
    merge(app_object.ambiente, NEW_PUSH, app_object.new_branch)
    efs_create_directory(app_object.name_app, app_object.ambiente)
    apply_cluster(app_object.name_app, app_object.name_deployment, app_object.name_ingress, app_object.ambiente)
Example #35
0
parser.add_argument('url')
args = parser.parse_known_args()

name = str(args[0].url).rsplit('/', maxsplit=1)[-1]
if name[-4:] == '.git':
    name = name[:-4]
path = Path.cwd() / name

print(path)

identities = configparser.ConfigParser()
identities.read(identities_file_path, encoding='utf_8')

result = find_identity(path, identities)
if result.identity_key is None:
    print(Colors.yellow + 'Warning:' + Colors.default +
          ' No identity matches the current path, so no identity will be set.')
    returned = git.clone(args[0].url, args[1])
else:
    line = 'Selected ' + Colors.bold + result.identity_key + Colors.default + ' based on '
    if result.keyword is not None:
        line += 'keyword "%s".' % result.keyword
    elif result.path is not None:
        line += 'path "%s" with weakness %s.' % (result.path, result.weakness)
    print(line)

    returned = git.clone(args[0].url, args[1] + ['--config', 'user.name=%s' % result.identity['name'],
                                                 '--config', 'user.email=%s' % result.identity['email']])

exit(returned.returncode)