Beispiel #1
0
    def _clone(
        self,
        workspace,
        ssh_pub_key,
        ssh_priv_key,
        ssh_user='******',
        ssh_pass='',
    ):
        ws = util.create_workspace(workspace, self.alias)
        keypair = Keypair(
            username=ssh_user,
            pubkey=ssh_pub_key,
            privkey=ssh_priv_key,
            passphrase=ssh_pass,
        )

        try:
            cb = RemoteCallbacks(credentials=keypair)
            repo = clone_repository(
                url=self.url,
                path=ws,
                callbacks=cb,
            )
            try:
                repo.checkout(self.refs)
                print(f'Cloned {repo} to {ws}')
            except Exception as err:
                raise errors.CannotFetchRef(
                    f'Cannot fetch ref: {self.refs}'
                )
        except Exception as err:
                raise errors.CannotCloneRepository(
                    f'Cannot clone repository: {err}'
                )
Beispiel #2
0
    def clone_repo(self, repo_from, repo_to):
        self.repo_from = repo_from
        self.repo_to = repo_to
        keypair = Keypair("git", self.public_key, self.private_key, "")
        callbacks = RemoteCallbacks(credentials=keypair)

        clone_repository(self.repo_from, self.repo_to, callbacks=callbacks)
Beispiel #3
0
 def __init__(self, settings):
     self.repo = "https://github.com/" + settings[
         'nixbot.hydra_jobsets_repo']
     self.repo_path = settings['nixbot.repo_dir']
     self.user = settings['nixbot.bot_name']
     self.token = settings['nixbot.github_token']
     self.creds = RemoteCallbacks(UserPass(self.user, self.token))
Beispiel #4
0
def find_version(repo: Repository) -> str:
    keypair = construct_keypair()
    repo.remotes['origin'].fetch(
        ['template'], callbacks=RemoteCallbacks(credentials=keypair))
    # Return the commit message for the HEAD of the "template" branch.
    return repo[repo.references.get(
        f'refs/remotes/origin/template').target].message
Beispiel #5
0
def push_changes(repo_path: Path):
    """Push the repository at repo_path to the remote named origin."""
    repo = Repository(repo_path)
    remote = repo.remotes['origin']
    creds = UserPass('Technical27', GITHUB_TOKEN)
    callback = RemoteCallbacks(credentials=creds)
    remote.connect(callbacks=callback)
    remote.push(['refs/heads/master:refs/heads/master'], callbacks=callback)
Beispiel #6
0
 def _fetch_remote_template(self):
     # First try to pull it from the remote origin/TEMPLATE_BRANCH
     keypair = construct_keypair()
     self.repo.remotes['origin'].fetch(
         [TEMPLATE_BRANCH], callbacks=RemoteCallbacks(credentials=keypair))
     self.repo.references.create(
         f'refs/heads/{TEMPLATE_BRANCH}',
         self.repo.references.get(
             f'refs/remotes/origin/{TEMPLATE_BRANCH}').target)
def push(repo, ref="refs/heads/master", remote_name="origin"):
    print("Pushing...")
    ssh_rsa_dir = str(Path.home()) + "/.ssh/"
    for remote in repo.remotes:
        if remote.name == remote_name:
            remote.credentials = Keypair("git", ssh_rsa_dir + "id_rsa.pub",
                                         ssh_rsa_dir + "id_rsa", "")
            callbacks = RemoteCallbacks(credentials=remote.credentials)
            remote.push([ref], callbacks=callbacks)
Beispiel #8
0
def clone_code(repo_url, clone_path, revision, branch=None):
    """branch 为 None, 默认用远端的 default branch"""
    cred = _get_credit(repo_url)
    cbs = RemoteCallbacks(cred, None)
    repo = clone_repository(repo_url,
                            clone_path,
                            bare=False,
                            checkout_branch=branch,
                            callbacks=cbs)
    repo.checkout('HEAD')
    obj = repo.revparse_single(revision)
    repo.checkout_tree(obj.tree)
Beispiel #9
0
 def git_push(self, repo):
     if not pygit2.features & pygit2.GIT_FEATURE_SSH:
         print("libgit2/pygit2 cant use SSH")
     if repo in self.ahead:
         repo_obj = self.repos[repo]['repo']
         origin = repo_obj.remotes['origin']
         print(repo_obj.remotes["origin"].url)
         credentials = KeypairFromAgent(self.push_user)
         origin.credentials = credentials
         callbacks = RemoteCallbacks(credentials=credentials)
         try:
             origin.push([repo_obj.head.name], callbacks=callbacks)
             return True
         except GitError as e:
             return e
     return False
Beispiel #10
0
 def clone_from_github(self) -> bool:
     try:
         callbacks = RemoteCallbacks(
             pygit2.UserPass(self.api_key, 'x-oauth-basic'))
         clone_repository(url='https://github.com/{}'.format(
             self.repo_name),
                          path=self.old_repo,
                          callbacks=callbacks,
                          checkout_branch=self.branch)
         if os.path.exists(path='{}/.git'.format(self.old_repo)):
             return True
         else:
             return False
     except (GitError, ValueError) as error:
         print(error)
         return False
Beispiel #11
0
def pull(job):
    keybucket=os.environ['KEYS_BUCKET']
    outputbucket=job['data']['outputArtifacts'][0]['location']['s3Location']['bucketName']
    outputKey=job['data']['outputArtifacts'][0]['location']['s3Location']['objectKey']

    repo_name = job['data']['pipelineContext']['pipelineName'] 
    remote_url = job['data']['actionConfiguration']['configuration']['GitUrl']
    branch = job['data']['actionConfiguration']['configuration']['Branch']
    repo_path = '/tmp/%s' % repo_name
    creds = RemoteCallbacks( credentials=get_keys(keybucket), )

    try:
        repository_path = discover_repository(repo_path)
        repo = Repository(repository_path)
        logger.info('found existing repo, using that...')
    except:
        logger.info('creating new repo for %s in %s' % (remote_url, repo_path))
        repo = create_repo(repo_path, remote_url, creds)

    pull_repo(repo,remote_url,branch,creds)
    zipfile = zip_repo(repo_path, repo_name)
    push_s3(zipfile,repo_name,outputbucket,outputKey)

    commit = repo.head.get_object()
    revision = str(commit.id)
    commit_message = commit.message
    created = commit.commit_time

    currentRevision = {
        'revision': revision,
        'changeIdentifier': '???',
        'created': created,
        'revisionSummary': commit_message
    }

    if cleanup:
        logger.info('Cleanup Lambda container...')
        shutil.rmtree(repo_path)
        os.remove(zipfile)
        os.remove('/tmp/id_rsa')
        os.remove('/tmp/id_rsa.pub')
    
    return currentRevision
Beispiel #12
0
def clone_repo(repo, creds, branch, commit, s3bucket, key):
  """
  Clone git repo, zip and upload to S3
  """
  # create folder
  tempfolder = tempfile.TemporaryDirectory()
  tempfolder_name = os.path.realpath(tempfolder.name)
  logger.info("Temp dir for clone: %s" % (tempfolder_name))
  repo = clone_repository(
    url=repo,
    path=tempfolder_name,
    checkout_branch=branch,
    callbacks=RemoteCallbacks(credentials=creds)
  )
  logger.info("Cloned")
  # switch to detached head for commit we want
  repo.checkout_tree(repo.get(commit))
  logger.info("Switched to deatched head for: %s" % (commit))
  # zip it up
  zipfile = tempfile.NamedTemporaryFile(suffix=".zip")
  zipfile_name = os.path.realpath(zipfile.name)
  logger.info("Zipping up to: %s" % (zipfile_name))
  zf = ZipFile(zipfile_name, "w")
  for dirname, subdirs, files in os.walk(tempfolder_name):
    try:
      subdirs.remove('.git')
    except ValueError:
      pass
    zdirname = dirname[len(tempfolder_name)+1:]
    zf.write(dirname, zdirname)
    for filename in files:
      zf.write(os.path.join(dirname, filename), os.path.join(zdirname, filename))
  zf.close()
  logger.info("Zip complete")
  # upload to s3
  logger.info("Uploading to S3")
  s3_file_id = commit
  s3_key = "%s/%s.zip" % (key, s3_file_id)
  s3.upload_file(zipfile_name, s3bucket, s3_key)
  s3_key = "%s/latest.zip" % (key)
  s3.upload_file(zipfile_name, s3bucket, s3_key)
  logger.info("Upload complete.")
  return s3_key
Beispiel #13
0
def lambda_handler(event, context):
    keybucket = event['context']['key-bucket']
    outputbucket = event['context']['output-bucket']
    pubkey = event['context']['public-key']
    # Source IP ranges to allow requests from, if the IP is in one of these the request will not be chacked for an api key
    ipranges = []
    for i in event['context']['allowed-ips'].split(','):
        ipranges.append(ip_network(u'%s' % i))
    # APIKeys, it is recommended to use a different API key for each repo that uses this function
    apikeys = event['context']['api-secrets'].split(',')
    ip = ip_address(event['context']['source-ip'])
    secure = False
    for net in ipranges:
        if ip in net:
            secure = True
    if 'X-Gitlab-Token' in event['params']['header'].keys():
        if event['params']['header']['X-Gitlab-Token'] in apikeys:
            secure = True
    if 'X-Git-Token' in event['params']['header'].keys():
        if event['params']['header']['X-Git-Token'] in apikeys:
            secure = True
    if 'X-Gitlab-Token' in event['params']['header'].keys():
        if event['params']['header']['X-Gitlab-Token'] in apikeys:
            secure = True
    if 'X-Hub-Signature' in event['params']['header'].keys():
        for k in apikeys:
            if 'use-sha256' in event['context']:
                k1 = hmac.new(str(k), str(event['context']['raw-body']),
                              hashlib.sha256).hexdigest()
                k2 = str(event['params']['header']['X-Hub-Signature'].replace(
                    'sha256=', ''))
            else:
                k1 = hmac.new(str(k), str(event['context']['raw-body']),
                              hashlib.sha1).hexdigest()
                k2 = str(event['params']['header']['X-Hub-Signature'].replace(
                    'sha1=', ''))
            if k1 == k2:
                secure = True
    # TODO: Add the ability to clone TFS repo using SSH keys
    try:
        # GitHub
        full_name = event['body-json']['repository']['full_name']
    except KeyError:
        try:
            # BitBucket #14
            full_name = event['body-json']['repository']['fullName']
        except KeyError:
            try:
                # GitLab
                full_name = event['body-json']['repository'][
                    'path_with_namespace']
            except KeyError:
                try:
                    # GitLab 8.5+
                    full_name = event['body-json']['project'][
                        'path_with_namespace']
                except KeyError:
                    try:
                        # BitBucket server
                        full_name = event['body-json']['repository']['name']
                    except KeyError:
                        # BitBucket pull-request
                        full_name = event['body-json']['pullRequest'][
                            'fromRef']['repository']['name']
    if not secure:
        logger.error('Source IP %s is not allowed' %
                     event['context']['source-ip'])
        raise Exception('Source IP %s is not allowed' %
                        event['context']['source-ip'])

    # GitHub publish event
    if ('action' in event['body-json']
            and event['body-json']['action'] == 'published'):
        branch_name = 'tags/%s' % event['body-json']['release']['tag_name']
        repo_name = full_name + '/release'
    else:
        repo_name = full_name
        try:
            # branch names should contain [name] only, tag names - "tags/[name]"
            branch_name = event['body-json']['ref'].replace(
                'refs/heads/', '').replace('refs/tags/', 'tags/')
        except:
            branch_name = 'master'
    try:
        # GitLab
        remote_url = event['body-json']['project']['git_ssh_url']
    except Exception:
        try:
            remote_url = 'git@' + event['body-json']['repository'][
                'links']['html']['href'].replace('https://', '').replace(
                    '/', ':', 1) + '.git'
        except:
            try:
                # GitHub
                remote_url = event['body-json']['repository']['ssh_url']
            except:
                # Bitbucket
                try:
                    for i, url in enumerate(event['body-json']['repository']
                                            ['links']['clone']):
                        if url['name'] == 'ssh':
                            ssh_index = i
                    remote_url = event['body-json']['repository']['links'][
                        'clone'][ssh_index]['href']
                except:
                    # BitBucket pull-request
                    for i, url in enumerate(
                            event['body-json']['pullRequest']['fromRef']
                        ['repository']['links']['clone']):
                        if url['name'] == 'ssh':
                            ssh_index = i

                    remote_url = event['body-json']['pullRequest']['fromRef'][
                        'repository']['links']['clone'][ssh_index]['href']
    repo_path = '/tmp/%s' % repo_name
    creds = RemoteCallbacks(credentials=get_keys(keybucket, pubkey), )
    try:
        repository_path = discover_repository(repo_path)
        repo = Repository(repository_path)
        logger.info('found existing repo, using that...')
    except Exception:
        logger.info('creating new repo for %s in %s' % (remote_url, repo_path))
        repo = create_repo(repo_path, remote_url, creds)
    pull_repo(repo, branch_name, remote_url, creds)
    zipfile = zip_repo(repo_path, repo_name)
    push_s3(zipfile, repo_name, outputbucket)
    if cleanup:
        logger.info('Cleanup Lambda container...')
        shutil.rmtree(repo_path)
        os.remove(zipfile)
        os.remove('/tmp/id_rsa')
        os.remove('/tmp/id_rsa.pub')
    return 'Successfully updated %s' % repo_name
Beispiel #14
0
def merge_push(pr, base, settings):
    MAIN_REPO = "https://github.com/" + settings['nixbot.repo']
    PR_REPO = "https://github.com/" + settings['nixbot.pr_repo']
    REPO_PATH = settings['nixbot.repo_dir']

    user = settings['nixbot.bot_name']
    token = settings['nixbot.github_token']

    creds = RemoteCallbacks(UserPass(user, token))

    path = os.path.join(REPO_PATH, "nixpkgs.git")

    try:
        log.info('Cloning {} to {}'.format(MAIN_REPO, path))
        repo = pygit2.clone_repository(MAIN_REPO,
                                       path,
                                       bare=False,
                                       callbacks=creds)
    except ValueError:
        repo = pygit2.Repository(path)

    log.info('Fetching base repository including PRs')
    repo.remotes.add_fetch('origin',
                           "+refs/pull/*/head:refs/remotes/origin/pr/*")
    repo.remotes['origin'].fetch(callbacks=creds)

    try:
        repo.create_remote('pr', PR_REPO)
    except:
        pass

    log.info('Checking out and resetting to PR base branch')
    repo.checkout('refs/heads/{}'.format(base))
    repo.reset(
        repo.lookup_reference('refs/remotes/origin/{}'.format(base)).target,
        pygit2.GIT_RESET_HARD)
    base = repo.lookup_branch(base)

    log.info('Merging PR {} to base branch'.format(pr))
    origin_pr = repo.lookup_reference('refs/remotes/origin/pr/{}'.format(pr))
    repo.merge(origin_pr.target)

    log.info('Commiting merge')
    author = Signature(settings['nixbot.bot_name'], '*****@*****.**')
    tree = repo.index.write_tree()
    repo.create_commit(base.name, author, author, 'Merge PR #{}'.format(pr),
                       tree, [repo.head.target, origin_pr.target])

    repo.state_cleanup()

    remote = repo.remotes['pr']
    pr_branch = 'pr-{}'.format(pr)
    log.info('Pushing merge to {} branch at {}'.format(pr_branch, PR_REPO))
    repo.create_branch(pr_branch, repo.head.get_object(), True)
    repo.checkout('refs/heads/{}'.format(pr_branch))
    remote.push(['+refs/heads/{b}:refs/heads/{b}'.format(b=pr_branch)],
                callbacks=creds)

    repo.state_cleanup()

    return repo
Beispiel #15
0
def auth( username=None, pubkey=None, privkey=None, password=None ):
    credentials = Keypair(username=username, pubkey=pubkey, privkey=privkey, passphrase=password)
    return RemoteCallbacks( credentials = credentials )
Beispiel #16
0
def lambda_handler(event, context):
    keybucket = event['context']['key-bucket']
    outputbucket = event['context']['output-bucket']
    pubkey = event['context']['public-key']
    ### Source IP ranges to allow requests from, if the IP is in one of these the request will not be chacked for an api key
    ipranges = []
    for i in event['context']['allowed-ips'].split(','):
        ipranges.append(ip_network(u'%s' % i))
    ### APIKeys, it is recommended to use a different API key for each repo that uses this function
    apikeys = event['context']['api-secrets'].split(',')
    ip = ip_address(event['context']['source-ip'])
    secure = False
    for net in ipranges:
        if ip in net:
            secure = True
    if 'X-Gitlab-Token' in event['params']['header'].keys():
        if event['params']['header']['X-Gitlab-Token'] in apikeys:
            secure = True
    if 'X-Git-Token' in event['params']['header'].keys():
        if event['params']['header']['X-Git-Token'] in apikeys:
            secure = True
    if 'X-Gitlab-Token' in event['params']['header'].keys():
        if event['params']['header']['X-Gitlab-Token'] in apikeys:
            secure = True
    if 'X-Hub-Signature' in event['params']['header'].keys():
        for k in apikeys:
            if hmac.new(
                    str(k), str(event['context']['raw-body']),
                    hashlib.sha1).hexdigest() == str(
                        event['params']['header']['X-Hub-Signature'].replace(
                            'sha1=', '')):
                secure = True
    if not secure:
        logger.error('Source IP %s is not allowed' %
                     event['context']['source-ip'])
        raise Exception('Source IP %s is not allowed' %
                        event['context']['source-ip'])
    try:
        repo_name = event['body-json']['project']['path_with_namespace']
    except:
        repo_name = event['body-json']['repository']['full_name']
    try:
        remote_url = event['body-json']['project']['git_ssh_url']
    except:
        try:
            remote_url = 'git@' + event['body-json']['repository'][
                'links']['html']['href'].replace('https://', '').replace(
                    '/', ':', 1) + '.git'
        except:
            remote_url = event['body-json']['repository']['ssh_url']
    repo_path = '/tmp/%s' % repo_name
    creds = RemoteCallbacks(credentials=get_keys(keybucket, pubkey), )
    try:
        repository_path = discover_repository(repo_path)
        repo = Repository(repository_path)
        logger.info('found existing repo, using that...')
    except:
        logger.info('creating new repo for %s in %s' % (remote_url, repo_path))
        repo = create_repo(repo_path, remote_url, creds)
    pull_repo(repo, remote_url, creds)
    zipfile = zip_repo(repo_path, repo_name)
    push_s3(zipfile, repo_name, outputbucket)
    if cleanup:
        logger.info('Cleanup Lambda container...')
        shutil.rmtree(repo_path)
        shutil.rm(zipfile)
        shutil.rm('/tmp/id_rsa')
        shutil.rm('/tmp/id_rsa.pub')
    return 'Successfully updated %s' % repo_name
Beispiel #17
0
def job_runner(job):
    job = Job.loads(job)
    db = make_db()

    def log(line):
        db(db.jobs.id == job.id).update(logs=db.jobs.logs.coalesce('') +
                                        '{0}\n'.format(line))

    log('[JOB #{0}] Started'.format(job.id))

    db(db.jobs.id == job.id).update(status=JobStatus.STARTED)
    db.commit()

    container = None
    repo_path = None

    try:
        repo_name = str(uuid4())
        repo_path = os.path.join(os.getcwd(), 'repos', repo_name)
        repos_path = os.path.dirname(repo_path)

        if config.REPO_HOST_PATH:
            repo_host_path = os.path.join(config.REPO_HOST_PATH, repo_name)

        else:
            repo_host_path = repo_path

        if not os.path.exists(repos_path):
            os.makedirs(repos_path)

        log('[JOB #{0}] git clone {1}'.format(job.id, job.ssh_url))
        repo = clone_repository(
            job.ssh_url,
            repo_path,
            callbacks=RemoteCallbacks(credentials=Keypair(
                config.SSH_USERNAME, config.SSH_PUBKEY, config.SSH_PRIVKEY,
                config.SSH_PASSPHRASE),
                                      certificate=lambda *_: True))

        log('[JOB #{0}] git checkout {1}'.format(job.id, job.commit_id))
        commit = repo.get(job.commit_id)
        repo.checkout_tree(commit.tree)

        cfg_path = os.path.join(repo_path, '.microci.json')
        cfg = {
            'dockerimg': config.DOCKER_IMAGE,
            'command': '/bin/sh microci.sh'
        }

        if os.path.exists(cfg_path):
            log('[JOB #{0}] Load .microci.json'.format(job.id))
            try:
                with open(cfg_path) as f:
                    cfg.update(json.load(f))

            except Exception:
                log('[JOB #{0}] {1}'.format(job.id, traceback.format_exc()))

        log('[JOB #{0}] docker run {1}'.format(job.id, cfg['dockerimg']))
        client = DockerClient(base_url=config.DOCKER_URL)
        container = client.containers.run(
            cfg['dockerimg'],
            command=cfg['command'],
            working_dir='/repo',
            volumes=['{0}:/repo:rw'.format(repo_host_path)],
            detach=True)

        for line in container.logs(stdout=True, stderr=True, stream=True):
            log(line.decode().rstrip('\n'))

        retcode = container.wait()
        success = retcode == 0

        log('[JOB #{0}] Returned {1}'.format(job.id, retcode))

        status = JobStatus.SUCCEED if success else JobStatus.FAILED
        db(db.jobs.id == job.id).update(status=status)

    except Exception:
        log('[JOB #{0}] {1}'.format(job.id, traceback.format_exc()))
        db(db.jobs.id == job.id).update(status=JobStatus.ERRORED)

    db.commit()

    if container is not None:
        log('[JOB #{0}] Remove container'.format(job.id))
        container.remove()

    if repo_path is not None and os.path.exists(repo_path):
        log('[JOB #{0}] Remove repository'.format(job.id))
        rmtree(repo_path)

    db.close()
def create(event, context):
    """
    Place your code to handle Create events here
    """
    logger.info(event)
    physical_resource_id = 'myResourceId'
    response_data = {}
    source_url = event['ResourceProperties']['SourceRepoUrl']
    source_branch = event['ResourceProperties']['SourceRepoBranch']
    source_bucket = event['ResourceProperties']['SourceS3Bucket']
    source_key = event['ResourceProperties']['SourceS3Key']
    s3_zip_filename = source_key.split('/')[-1]
    dest_url = event['ResourceProperties']['DestRepoUrl']
    repo_name = event['ResourceProperties']['DestRepoName']
    username = event['ResourceProperties']['DestRepoName']
    if len(username) >= 64:
        raise Exception('Username is longer than 64 chars')
    user_id, codecommit_username, password = get_codecommit_credentials(
        username, repo_name)
    try:
        creds = RemoteCallbacks(
            credentials=UserPass(codecommit_username, password))
        if source_url != "":
            repo = pull_repo(source_url, source_branch)
            # Uncomment the next line if you want to update your ci files to a minimal default
            # setup_ci_config(repo)
        else:
            # Fetch source from S3
            repo = create_repo('/tmp/s3source')
            r = requests.get('https://' + s3_region_url() + '/' +
                             source_bucket + '/' + source_key,
                             stream=True)
            if r.status_code == 200:
                with open('/tmp/' + s3_zip_filename, 'wb') as f:
                    for chunk in r:
                        f.write(chunk)
            else:
                raise Exception("cannot fetch zip, s3 returned %s: %s" %
                                (r.status_code, r.reason))
            zip = zipfile.ZipFile('/tmp/' + s3_zip_filename)
            zip.extractall(path='/tmp/s3source')
            author = Signature('Template Validation Pipeline Clone',
                               '*****@*****.**')
            tree = repo.TreeBuilder().write()
            repo.create_commit('HEAD', author, author, 'initial commit', tree,
                               [])
            index = repo.index
            index.add_all()
            index.write()
            tree = index.write_tree()
            repo.create_commit('refs/heads/%s' % source_branch, author, author,
                               'initial commit', tree,
                               [repo.head.get_object().hex])
        while not push_repo(repo, dest_url, creds, source_branch):
            logger.info("waiting for git credential propagation...")
            sleep(5)
    except Exception:
        logger.error("Unhandled exception: ", exc_info=1)
        raise
    delete_codecommit_credentials(user_id, username)
    return physical_resource_id, response_data