Пример #1
0
def CloneRepo(repo, username, password):
    try:
        git.exec_command('clone', '{}'.format(repo), 'repo', cwd="{}".format(new_repo_path))
    except Exception as e:
        os.error(e)
        print(e)
        exit(100) # 100 Error means failed to clone repo
Пример #2
0
def git_clone_zip(user, password, repo_url, file, repo):
    git.exec_command('clone',
                     'https://' + user + ':' + password + '@' + repo_url,
                     cwd='/tmp/')  # Command to make a Git Clone
    src = os.path.realpath('/tmp/' + repo +
                           '/')  # Taking the real path to directory
    shutil.make_archive('/tmp/' + file, 'zip', src)  # Creating the zip file
Пример #3
0
def CreateBranch(branch_name):
    try:
        git.exec_command('checkout', '-B',  '{}'.format(branch_name), cwd="{}/repo".format(new_repo_path))
    except Exception as e:
        print(e)
        os.error(e)
        exit(200)
Пример #4
0
def CommitChanges(branch_name):
    try:
        git.exec_command('add', '.', cwd="{}/repo".format(new_repo_path))
        git.exec_command('commit','-m "Updating Instance type via automation"', cwd="{}/repo".format(new_repo_path), env=commit_env)
    except Exception as e:
        os.error(e)
        exec(300)
def build_blog_from_git(main_dir, blog_dir):
    if os.path.isdir(blog_dir):
        shutil.rmtree(
            blog_dir
        )  # just fetch from the repo (including submodule dependencies)
    stdout, stderr = git.exec_command('clone',
                                      os.environ["URL_TO_GIT_REPO_HTTPS"],
                                      blog_dir,
                                      cwd=main_dir)
    _logger.info('Git stdout: {}, stderr: {}'.format(stdout.decode("utf-8"),
                                                     stderr.decode("utf-8")))
    os.chdir(blog_dir)

    stdout, stderr = git.exec_command(
        'clone',
        os.environ["URL_TO_GIT_REPO_THEME_HTTPS"],
        blog_dir + "/" + os.environ["THEME_NAME"],
        cwd=blog_dir)
    _logger.info('Git theme stdout: {}, stderr: {}'.format(
        stdout.decode("utf-8"), stderr.decode("utf-8")))

    settings = read_settings("publishconf.py")
    pelican = Pelican(settings)
    pelican.run()

    upload_recursively(blog_dir + "/output", os.environ["BUCKET_NAME"])
def set_up_repo():
    # Lambda seems to reuse resources sometimes and the clone already exists.
    if not os.path.isdir(GIT_WORKING_DIRECTORY):
        git.exec_command('clone', GITHUB_REPO_URL_WITH_CREDENTIALS)
    git.exec_command('pull',
                     GITHUB_REPO_URL_WITH_CREDENTIALS,
                     cwd=GIT_WORKING_DIRECTORY)
    return f'{LAMBDA_WORKING_DIRECTORY}/{GITHUB_REPO_NAME}'
Пример #7
0
def pull_source_repo(repo_name, repo_url, branch, user, pwd, path):

    auth_repo_url = re.sub('//', '//' + user + ':' + pwd + '@', repo_url)
    os.mkdir(path)
    try:
        git.exec_command('clone',
                         '--single-branch',
                         '-b',
                         branch,
                         auth_repo_url,
                         cwd=path)
    except Exception:
        raise (Exception)
Пример #8
0
    def test_exec_command(self, PopenMock, PipeMock):
        branch_name = 'js/my_new_branch'

        PopenMock.return_value.communicate.return_value = ('output', 'error')
        PopenMock.return_value.returncode = 0
        PipeMock.return_value = StringIO()

        git.exec_command('checkout', '-b', branch_name)

        PopenMock.assert_called_with(['git', 'checkout', '-b', branch_name],
                                     stdout=PipeMock,
                                     stderr=PipeMock,
                                     cwd='/tmp',
                                     env=os.environ)
Пример #9
0
def generate_tfvars(baseline_repo_name, org_name, project_name,
                    terraform_vars_map):
    #authorization = getAuthorization()
    print(f"Generating new tfvars file and adding to: {baseline_repo_name}")
    new_repo_path = f"/tmp/{baseline_repo_name}"
    os.mkdir(new_repo_path)

    #Prevents Git credentials from getting logged
    logging.getLogger("git").setLevel(logging.WARNING)
    repo_url = f"https://{pat}@dev.azure.com/{org_name}/{project_name}/_git/{baseline_repo_name}"

    git_user_name = "pl-css-admin"
    git_user_email = "*****@*****.**"

    commit_env = os.environ
    commit_env['GIT_AUTHOR_NAME'] = 'PLUser'
    commit_env['GIT_AUTHOR_EMAIL'] = '*****@*****.**'
    commit_env['GIT_COMMITTER_NAME'] = 'PLUser'
    commit_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'

    git.exec_command('clone', repo_url, new_repo_path)

    #Write and commit the tfvars file to the baseline repo
    f = open(new_repo_path + "/terraform.auto.tfvars", "w")
    f.write("## tfvars ##\n\n")
    for k, v in terraform_vars_map.items():
        f.write(f"{k}    =   \"{v}\" \n")
    f.close()

    #Commit the tfvars file
    git.exec_command('add', 'terraform.auto.tfvars', cwd=new_repo_path)
    git.exec_command('commit', '-m', 'added auto.tfvars', cwd=new_repo_path)
    git.exec_command('push', cwd=new_repo_path)
Пример #10
0
def clone_and_process(filename, url, path):
    import git

    download_scc()

    os.chdir('/tmp')

    os.system('rm -rf /tmp/scc-tmp-path')
    git.exec_command('clone', '--depth=1', url, 'scc-tmp-path', cwd='/tmp')

    os.system('./scc -f json -o /tmp/' + filename + ' scc-tmp-path')

    with open('/tmp/' + filename, 'rb') as f:
        s3.upload_fileobj(f, bucket_name, filename)

    os.system('rm -rf /tmp/scc-tmp-path')
Пример #11
0
 def git_exec(self, *params, cwd=None):
     if cwd is None:
         cwd = self.path_repo
     stdout, stderr = git.exec_command(*params, cwd=cwd)
     self.exec_stderr = stderr.decode()
     self.exec_stdout = stdout.decode()
     return self.exec_stdout
Пример #12
0
def create_baseline_subdirectory(org, project_name, baseline_repo_name,
                                 subdir_name):

    #Make tmp directory to checkout Git repo
    local_repo_path = f"/tmp/{baseline_repo_name}"
    os.mkdir(local_repo_path)

    #Prevents Git credentials from getting logged
    logging.getLogger("git").setLevel(logging.WARNING)
    baseline_repo_url = f"https://{pat}@dev.azure.com/{org}/{project_name}/_git/{baseline_repo_name}"

    git_user_name = "pl-css-admin"
    git_user_email = "*****@*****.**"

    commit_env = os.environ
    commit_env['GIT_AUTHOR_NAME'] = 'PLUser'
    commit_env['GIT_AUTHOR_EMAIL'] = '*****@*****.**'
    commit_env['GIT_COMMITTER_NAME'] = 'PLUser'
    commit_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'

    git.exec_command('clone', baseline_repo_url, local_repo_path)

    #Define subdir path
    subdir_path = f"{local_repo_path}/{subdir_name}"
    # define the access rights
    access_rights = 0o755

    try:
        os.mkdir(subdir_path, access_rights)
    except FileExistsError as e:
        print(
            f"The baseline sub-directory: {subdir_name} already exists, skipping changes"
        )

    else:
        print(
            f"Successfully created the account baseline sub-directory: {subdir_name}"
        )

        #Creating test file to commit
        f = open(subdir_path + "/terraform.auto.tfvars", "w")
        f.write("## Autogenerated terraform.auto.tfvars ##\n\n")
        f.close()

        #Commit the tfvars file
        print(f"Committing changes")
        git.exec_command('add', '-A', cwd=local_repo_path)
        git.exec_command('commit',
                         '-m',
                         'added auto.tfvars',
                         cwd=local_repo_path)
        git.exec_command('push', cwd=local_repo_path)
Пример #13
0
def push_to_cc(repo_name, branch, cc_user, cc_pass, sr_url, path):

    cc = boto3.client('codecommit')
    try:
        cc.create_repository(repositoryName=repo_name,
                             repositoryDescription='Sync\'d from ' + sr_url)
    except Exception as e:
        if "RepositoryNameExistsException" in str(e):
            logger.info("Repository " + repo_name +
                        " already exists in CodeCommit.")
        else:
            return 1

    codecommit_repo_url = 'https://' + cc_user + ':' + cc_pass + '@git-codecommit.' + aws_region + '.amazonaws.com/v1/repos/' + repo_name

    git.exec_command('remote',
                     'add',
                     'codecommit',
                     codecommit_repo_url,
                     cwd=path + '/' + repo_name)
    git.exec_command('push', 'codecommit', branch, cwd=path + '/' + repo_name)

    return 0
def commit_and_push_changes(json_filename):
    # Leaving this in here because it's useful to test on separate branches sometimes.
    # now = datetime.datetime.now().isoformat()
    # for char in ['-', ':', '.', 'T']:
    #     now = now.replace(char, '_')
    # branch_name = f'{now}_sync'
    # git.exec_command('checkout', f'-b{branch_name}', cwd=GIT_WORKING_DIRECTORY)
    git_diff = git.exec_command('diff', cwd=GIT_WORKING_DIRECTORY)
    if git_diff != (b'', b''):
        git.exec_command('add', f'{json_filename}', cwd=GIT_WORKING_DIRECTORY)
        git.exec_command('commit',
                         '-m Syncing new repository changes.',
                         cwd=GIT_WORKING_DIRECTORY)
        git.exec_command('push',
                         GITHUB_REPO_URL_WITH_CREDENTIALS,
                         cwd=GIT_WORKING_DIRECTORY)
Пример #15
0
def PushBranch(origin, branch_name):
    try:
         git.exec_command("push", "-u", "{}".format(origin), "{}".format(branch_name), cwd="{}/repo".format(new_repo_path))
    except Exception as e:
        os.error(e)
        exec(300)    
def lambda_handler(event, context):
    # Logging
    logger = logging.getLogger()
    # logger.setLevel(logging.INFO)
    logger.setLevel(logging.DEBUG)
    logging.getLogger("botocore").setLevel(logging.ERROR)

    session = boto3.Session()
    s3_client = session.client('s3')

    logger.debug(f'event: {event}')
    logger.debug(f'context: {context}')
    product_name = event['ResourceProperties']['ProductName']
    logger.debug(f'product_name: {product_name}')
    git_url = event['ResourceProperties']['GitHubRepoUrl']
    git_url = git_url.split("https://")[1]
    logger.debug(f'git_url: {git_url}')
    developer_token = event['ResourceProperties']['GitHubDeveloperToken']
    logger.debug(f'developer_token: {developer_token}')
    repo_name = git_url.split('/')[-1]
    logger.debug(f'repo_name: {repo_name}')

    if 'Create' in event['RequestType']:
        # Ensure bucket exists
        try:
            key = 'deploy.zip'
            source_bucket = event['ResourceProperties']['SourceBucket']
            logger.debug(f'source_bucket: {source_bucket}')

            response = s3_client.head_bucket(Bucket=source_bucket)
            print(f'response: {response}')
            print(
                f'response: {response["ResponseMetadata"]["HTTPStatusCode"]}')
        except Exception as e:
            logger.error(
                f'Something happened attempting to verify the bucket exists.  It probably failed to create...'
            )
            raise e

        # Download deploy.zip to local container
        try:
            logger.info(f'Downloading {key} from {source_bucket}')
            downloaded_file = s3_client.download_file(source_bucket, key,
                                                      '/tmp/deploy.zip')
        except Exception as e:
            logger.error(f'Failed downloading {key} from {source_bucket}')
            logger.error(e)
            raise e

        # Unzip local deploy.zip
        try:
            directory_contents = os.listdir('/tmp')
            logger.debug(
                f'/tmp directory_contents before unzip: {directory_contents}')

            logger.info(f'Unzipping /tmp/deploy.zip')
            with zipfile.ZipFile('/tmp/deploy.zip', 'r') as zip_ref:
                zip_ref.extractall('/tmp')
            directory_contents = os.listdir('/tmp')
            logger.debug(
                f'/tmp directory_contents after unzip: {directory_contents}')
            directory_contents = os.listdir('/tmp/tmp')
            logger.debug(
                f'/tmp/tmp directory_contents after unzip: {directory_contents}'
            )
        except Exception as e:
            logger.error(f'Failed unzipping deploy.zip')
            logger.error(e)
            raise e

        # Clone repo locally
        try:
            logger.info(f'Cloning repo locally...')
            response = git.exec_command(
                'clone', 'https://' + developer_token + ':x-oauth-basic@' +
                str(git_url) + '.git')
            logger.debug(f'response: {response}')
            directory_contents = os.listdir('/tmp')
            logger.debug(f'/tmp directory_contents: {directory_contents}')
            directory_contents = os.listdir('/tmp/' + repo_name)
            logger.debug(
                f'/tmp/{repo_name} directory_contents: {directory_contents}')
            if 'iac' in directory_contents:
                logger.info(
                    f'Removing previous iac folder from {repo_name}...')
                shutil.rmtree('/tmp/' + repo_name + '/iac')
                directory_contents = os.listdir('/tmp/' + repo_name)
                logger.debug(
                    f'/tmp/{repo_name} directory_contents after iac folder removal: {directory_contents}'
                )
        except Exception as e:
            logger.error(f'Failed cloning the repo')
            logger.error(f'e: {e}')

        # Copy files from s3 copy to git repo folder
        try:
            logger.info(f'Copying files from s3 extract to git repo folder...')
            shutil.copytree('/tmp/tmp/iac', '/tmp/' + repo_name + '/iac')
            directory_contents = os.listdir('/tmp/' + repo_name + '/iac')
            logger.debug(
                f'/tmp/{repo_name}/iac directory_contents: {directory_contents}'
            )
        except Exception as e:
            logger.error(
                f'Failed copying files from s3 copied folder to git clone folder'
            )
            logger.error(f'e: {e}')

        # Add files to git folder
        try:
            logger.info(f'Adding files to git repo...')
            response = git.exec_command('add', '.', cwd='/tmp/' + repo_name)
            logger.debug(f'response: {response}')
            directory_contents = os.listdir('/tmp/' + repo_name)
            logger.debug(
                f'/tmp/{repo_name} directory_contents: {directory_contents}')
        except Exception as e:
            logger.error(f'Failed adding files to git repo')
            logger.error(f'e: {e}')

        # Commit files to git repo
        try:
            logger.debug(f'Committing files to git repo...')
            commit_env = os.environ
            commit_env['GIT_AUTHOR_NAME'] = 'Cloud_Transformation'
            commit_env['GIT_AUTHOR_EMAIL'] = '*****@*****.**'
            commit_env[
                'GIT_COMMITTER_NAME'] = 'Cloud_Transformation_AWS_Lambda'
            commit_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'
            response = git.exec_command('commit',
                                        '-am "CodePipeline Initial Commit"',
                                        cwd='/tmp/' + repo_name,
                                        env=commit_env)
            logger.debug(f'response: {response}')
        except Exception as e:
            logger.info(f'Branch already up-to-date.  Nothing to commit.')
            logger.debug(f'e: {e}')

        # Push source files to git repo
        try:
            logger.info(f'Pushing source files to {repo_name}...')
            response = git.exec_command('push', cwd='/tmp/' + repo_name)
            logger.debug(f'response: {response}')
        except Exception as e:
            logger.error(f'Failed pushing source files to {repo_name}')
            logger.error(e)
            raise e

        # Cleanup
        try:
            logger.info(f'Starting cleanup...')
            logger.debug(f'Removing /tmp/{repo_name}')
            shutil.rmtree('/tmp/' + repo_name)
            logger.debug(f'Removing /tmp/deploy.zip')
            os.remove('/tmp/deploy.zip')
            logger.debug(f'Removing /tmp/iac')
            shutil.rmtree('/tmp/iac')
            directory_contents = os.listdir('/tmp')
            logger.debug(f'/tmp directory_contents: {directory_contents}')
        except Exception as e:
            logger.error(f'Failed cleaning up')
            logger.error(f'e: {e}')

    # Send Signal
    response_data = dict()
    response_data['SUCCESS'] = "SUCCESS - This worked"
    logger.debug(f'response_data: {response_data}')

    cfnresponse.send(event, context, cfnresponse.SUCCESS, response_data,
                     "CustomResourcePhysicalID")
Пример #17
0
def lambda_handler(event, context):
    logger.debug(f'event: {event}')
    logger.debug(f'context: {context}')

    # Variables
    git_dynamo_table = 'git_repo_table_name'

    # Setup Connections
    session = boto3.Session()
    iam_client = session.client('iam')
    dynamo_client = session.client('dynamodb')
    ce_client = session.client('ce')
    sts_client = session.client('sts')

    # Verify DynamoTable Exists
    try:
        logger.debug(f'Verifying DynamoTable {git_dynamo_table} exists')
        response = dynamo_client.describe_table(TableName=git_dynamo_table)
        if response:
            logger.debug(f'DynamoTable {git_dynamo_table} found')
        else:
            logger.error(
                f'DynamoTable {git_dynamo_table} not found.  Verify it exists.'
            )
            exit(1)
    except Exception as e:
        logger.error(f'Unable to find DynamoTable {git_dynamo_table}')
        logger.error(e)
        raise e

    # Pull Contents of DynamoTable
    # Git Repos
    try:
        response = get_values_from_dynamo_column(dynamo_client,
                                                 git_dynamo_table, 'git_url')
        logger.debug(f'response: {response}')
    except Exception as e:
        logger.error(f'Failed getting information from DynamoTable')
        logger.error(e)
        raise e
    # Account IDs
    try:
        account_ids = get_values_from_dynamo_column(dynamo_client,
                                                    "Environments",
                                                    "AccountId")
        logger.info(f'AccountIds found in Environments dynamo: {account_ids}')
        accounts_list = []
        for account_id in account_ids:
            accounts_list.append(account_id['AccountId']['S'])
        logger.info(f'accounts_list: {accounts_list}')

    except Exception as e:
        logger.error(f'Failed while getting information from dynamo table')
        logger.debug(e)
        raise e

    # Check git repos for CommitId changes
    try:
        changed_repos = []
        for resp in response:
            logger.debug(f'resp: {resp}')
            git_url = resp['git_url']['S']
            git_url = git_url.split("https://")[1]
            logger.debug(f'git_url: {git_url}')
            developer_token = resp['developer_token']['S']
            logger.debug(f'developer_token: {developer_token}')
            repo_name = resp['repo_name']['S']
            logger.debug(f'repo_name: {repo_name}')
            stored_commit_id = resp['commit_id']['S']
            logger.debug(f'stored_commit_id: {stored_commit_id}')

            # Clone repo locally
            if 'xxx_Repo_Name' in repo_name:
                response = git.exec_command(
                    'clone', 'https://' + developer_token + ':x-oauth-basic@' +
                    str(git_url) + '.git')
                logger.debug(f'response: {response}')
                directory_contents = os.listdir('/tmp')
                logger.debug(f'/tmp directory_contents: {directory_contents}')
                directory_contents = os.listdir('/tmp/' + repo_name)
                logger.debug(f'directory_contents: {directory_contents}')

                # # Pull Metrics for Account
                # response = get_account_metrics(
                #     iam_client,
                #     ce_client,
                #     start_range='2020-02-01',
                #     end_range='2020-05-01',
                #     granularity='MONTHLY',
                #     metric='UnblendedCost'
                # )
                # logger.debug(f'response: {response}')

                # Get Date Ranges for Metrics
                current_date = datetime.datetime.now()
                # Year
                current_year = '%04d' % current_date.year
                previous_year = int(current_year) - 1
                logger.debug(f'previous_year: {previous_year}')

                # Month
                current_month = '%02d' % current_date.month
                previous_month = int(current_month) - 1
                if previous_month < 10:
                    previous_month = '0' + str(previous_month)
                logger.debug(f'previous_month: {previous_month}')

                # Day
                current_day = '%02d' % current_date.day

                start_range = str(previous_year) + '-' + current_month + '-01'
                daily_start_range = current_year + '-' + str(
                    previous_month) + '-' + current_day
                monthly_start_range = str(
                    previous_year) + '-' + current_month + '-01'
                end_range = current_year + '-' + current_month + '-' + current_day
                logger.debug(f'start_range: {start_range}')
                logger.debug(f'end_range: {end_range}')
                logger.debug(f'daily_start_range: {daily_start_range}')
                logger.debug(f'monthly_start_range: {monthly_start_range}')

                # Pull Metrics for Accounts
                base_table = '{"graphs": [ '
                for account in accounts_list:
                    logger.info(f'Starting actions in {account}')
                    # Get credentials for account_id (Turn into a function?)
                    try:
                        assumed_credentials = sts_client.assume_role(
                            RoleArn=('arn:aws:iam::' + account + ':role/' +
                                     ROLE_TO_ASSUME),
                            RoleSessionName='AssumedRole')
                        logger.debug(
                            f'assumed_credentials: {assumed_credentials}')
                    except Exception as e:
                        logger.error(
                            f'Failed getting credentials for {account}')
                        logger.error(e)
                        raise e

                    # Setup connection in specified account
                    try:
                        iam_client = session.client(
                            'iam',
                            aws_access_key_id=assumed_credentials[
                                "Credentials"]['AccessKeyId'],
                            aws_secret_access_key=assumed_credentials[
                                "Credentials"]['SecretAccessKey'],
                            aws_session_token=assumed_credentials[
                                "Credentials"]['SessionToken'],
                        )
                        logger.debug(f'iam_client: {iam_client}')

                        ce_client = session.client(
                            'ce',
                            aws_access_key_id=assumed_credentials[
                                "Credentials"]['AccessKeyId'],
                            aws_secret_access_key=assumed_credentials[
                                "Credentials"]['SecretAccessKey'],
                            aws_session_token=assumed_credentials[
                                "Credentials"]['SessionToken'],
                        )
                        logger.debug(f'ce_client: {ce_client}')
                    except Exception as e:
                        logger.error(f'Failed creating session connection')
                        logger.error(e)
                        raise e

                    # Total Charge for Account
                    response = get_account_monthly_cost_metrics(
                        iam_client,
                        ce_client,
                        start_range=monthly_start_range,
                        end_range=end_range,
                        granularity='MONTHLY',
                        metric='UnblendedCost')
                    logger.debug(f'response: {response}')
                    base_table += response

                    # Daily Billing for by Team Tag
                    response = get_account_team_cost_metrics(
                        iam_client,
                        ce_client,
                        start_range=daily_start_range,
                        end_range=end_range,
                        granularity='DAILY',
                        metric='UnblendedCost')
                    logger.debug(f'response: {response}')
                    base_table += response

                base_table = base_table[:-1]
                base_table += ']}'

                # Write response to file
                fp = open('/tmp/' + repo_name + '/data/test.json', "w")
                # fp.write(str(response))
                fp.write(str(base_table))
                fp.close()
                fp = open('/tmp/' + repo_name + '/data/test.json', "r")
                metrics_contents = fp.read()
                fp.close()
                logger.debug(f'metrics_contents: {metrics_contents}')

                # Add file to git folder
                logger.debug(f'Adding files to git repo')
                response = git.exec_command('add',
                                            '.',
                                            cwd='/tmp/' + repo_name)
                logger.debug(f'response: {response}')
                directory_contents = os.listdir('/tmp/' + repo_name)
                logger.debug(
                    f'/tmp/{repo_name} directory_contents: {directory_contents}'
                )

                # Commit files to git repo
                logger.debug(f'Committing files to git repo')
                commit_env = os.environ
                commit_env['GIT_AUTHOR_NAME'] = 'info'
                commit_env['GIT_AUTHOR_EMAIL'] = '*****@*****.**'
                commit_env['GIT_COMMITTER_NAME'] = 'info_AWS_Lambda'
                commit_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'
                logger.debug(f'Pushing files to git repo')
                response = git.exec_command('commit',
                                            '-am "Updated Metrics"',
                                            cwd='/tmp/' + repo_name,
                                            env=commit_env)
                logger.debug(f'response: {response}')

                # Push files to git repo
                response = git.exec_command('push', cwd='/tmp/' + repo_name)
                logger.debug(f'response: {response}')

    except Exception as e:
        print('something happened during git work')
        print(e)