Exemplo n.º 1
0
    def testWorkflowIsTriggered(self):
        commands = git_patrol.GitPatrolCommands()

        previous_uuid = uuid.uuid4()
        current_uuid = uuid.uuid4()
        mock_record_git_poll = AsyncioMock(return_value=current_uuid)
        mock_db = MockGitPatrolDb(record_git_poll=mock_record_git_poll)

        loop = asyncio.get_event_loop()

        upstream_url = 'file://' + self._upstream_dir
        ref_filters = []
        utc_datetime = datetime.datetime.utcnow()
        current_uuid, current_refs, new_refs = loop.run_until_complete(
            git_patrol.run_workflow_triggers(commands, mock_db, 'upstream',
                                             upstream_url, ref_filters,
                                             utc_datetime, previous_uuid,
                                             {'refs/heads/master': 'none'}))

        mock_record_git_poll.inner_mock.assert_called_with(
            utc_datetime, upstream_url, 'upstream', previous_uuid, self._refs,
            ref_filters)

        # The git commit hashes are always unique across test runs, thus the
        # acrobatics here to extract the HEADs and tag names only.
        record_git_poll_args, _ = mock_record_git_poll.inner_mock.call_args
        self.assertCountEqual(
            ['refs/heads/master', 'refs/tags/r0001', 'refs/tags/r0002'],
            list(record_git_poll_args[4].keys()))

        self.assertDictEqual(current_refs, self._refs)
        self.assertDictEqual(new_refs, self._refs)
Exemplo n.º 2
0
    def testFetchGitRefsSuccess(self):
        commands = git_patrol.GitPatrolCommands()

        upstream_url = 'file://' + self._upstream_dir
        ref_filters = []
        refs = asyncio.get_event_loop().run_until_complete(
            git_patrol.fetch_git_refs(commands, upstream_url, ref_filters))
        self.assertDictEqual(refs, self._refs)
Exemplo n.º 3
0
    def testFetchGitRefsFilteredSuccess(self):
        commands = git_patrol.GitPatrolCommands()

        upstream_url = 'file://' + self._upstream_dir
        ref_filters = ['refs/tags/*']
        refs = asyncio.get_event_loop().run_until_complete(
            git_patrol.fetch_git_refs(commands, upstream_url, ref_filters))
        self.assertDictEqual(refs, {
            k: v
            for k, v in self._refs.items() if k.startswith('refs/tags/')
        })
Exemplo n.º 4
0
def main():
    # Parse command line flags.
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--poll_interval',
        type=int,
        default=7200,
        help='Time between repository poll attempts in seconds.')
    parser.add_argument(
        '--config_path',
        required=True,
        help='Path to configuration file and Cloud Build source archives.')
    parser.add_argument(
        '--config',
        default='gitpatrol.yaml',
        help='Name of configuration file within the --config_path folder.')
    parser.add_argument('--db_host',
                        default='localhost',
                        help='Hostname or IP address of the database server.')
    parser.add_argument('--db_port',
                        type=int,
                        default=5432,
                        help='Destination port on the database server.')
    parser.add_argument(
        '--db_user',
        help='The name of the database role used for authentication.')
    parser.add_argument('--db_password',
                        help='The password used for authentication.')
    parser.add_argument(
        '--db_name',
        help='Name of the database to access on the database server.')
    args = parser.parse_args()

    # Use actual subprocess commands in production.
    commands = git_patrol.GitPatrolCommands()

    # Read and parse the configuration file.
    # TODO(brianorr): Parse the YAML into a well defined Python object to easily
    # handle parse errors etc.
    with open(os.path.join(args.config_path, args.config), 'r') as f:
        raw_config = f.read()
    git_patrol_config = yaml.safe_load(raw_config)
    git_patrol_targets = git_patrol_config['targets']

    # Connect to the persistent state database.
    loop = asyncio.get_event_loop()
    db_pool = None
    for i in range(DB_CONNECT_ATTEMPTS):
        try:
            db_pool = loop.run_until_complete(
                asyncpg.create_pool(host=args.db_host,
                                    port=args.db_port,
                                    user=args.db_user,
                                    password=args.db_password,
                                    database=args.db_name))
            if db_pool:
                break
        except asyncpg.exceptions.InvalidPasswordError as e:
            logging.error('Bad database login: %s', e)
            break
        except asyncpg.exceptions.InvalidCatalogNameError as e:
            logging.error('Unknown database: %s', e)
            break
        except OSError as e:
            logging.warning('OSError while connecting: %s', e)

        # Retry non-fatal errors after a brief timeout.
        if i < (DB_CONNECT_ATTEMPTS - 1):
            logging.warning('Connect error. Retry in %d seconds...',
                            DB_CONNECT_WAIT_SECS)
            time.sleep(DB_CONNECT_WAIT_SECS)

    if not db_pool:
        return
    db = git_patrol_db.GitPatrolDb(db_pool)

    # Create a polling loop coroutine for each target repository. Provide an
    # initial time offset for each coroutine so they don't all hammer the remote
    # server(s) at once.
    target_loops = [
        git_patrol.target_loop(commands=commands,
                               loop=loop,
                               db=db,
                               config_path=args.config_path,
                               target_config=target_config,
                               offset=idx * args.poll_interval /
                               len(git_patrol_targets),
                               interval=args.poll_interval)
        for idx, target_config in enumerate(git_patrol_targets)
    ]

    # Use asyncio.gather() to submit all coroutines to the event loop as
    # recommended by @gvanrossum in the GitHub issue comments at
    # https://github.com/python/asyncio/issues/477#issuecomment-269038238
    try:
        loop.run_until_complete(asyncio.gather(*target_loops))
    except KeyboardInterrupt:
        logger.warning('Received interrupt: shutting down')
    finally:
        loop.close()
Exemplo n.º 5
0
    def testRunOneWorkflowSuccess(self):
        cloud_build_uuid = '7d1bb5a7-545f-4c30-b640-f5461036e2e7'

        cloud_build_json = [
            ('{ "createTime": "2018-11-01T20:49:31.802340417Z", '
             '"id": "7d1bb5a7-545f-4c30-b640-f5461036e2e7", '
             '"startTime": "2018-11-01T20:50:24.132599935Z", '
             '"status": "QUEUED" }').encode(),
            ('{ "createTime": "2018-11-01T20:49:31.802340417Z", '
             '"finishTime": "2018-11-01T22:44:36.303015Z", '
             '"id": "7d1bb5a7-545f-4c30-b640-f5461036e2e7", '
             '"startTime": "2018-11-01T20:50:24.132599935Z", '
             '"status": "SUCCESS" }').encode()
        ]

        # Queue up three different stdout strings for the gcloud mock to return,
        # one for each of the different commands we expect the client to call.
        def gcloud_builds_stdout(*args, count):
            if args[1] == 'submit':
                return ('7d1bb5a7-545f-4c30-b640-f5461036e2e7 '
                        '2018-11-01T20:49:31+00:00 '
                        '1H54M12S '
                        '- '
                        '- '
                        'QUEUED').encode()
            if args[1] == 'log':
                return ''.encode()
            if args[1] == 'describe':
                return cloud_build_json[count]
            raise ValueError('Unexpected gcloud command: {}'.format(args[1]))

        commands = git_patrol.GitPatrolCommands()
        commands.gcloud = unittest.mock.MagicMock()
        commands.gcloud.side_effect = _MakeFakeCommand(
            stdout_fn=gcloud_builds_stdout)

        # The "record_cloud_build()" method returns the journal_id of the created
        # entry. This must be the value of parent_id for the next entry.
        journal_ids = [1, 2]
        mock_record_cloud_build = AsyncioMock(side_effect=journal_ids)
        mock_db = MockGitPatrolDb(record_cloud_build=mock_record_cloud_build)

        target_config = yaml.safe_load("""
        alias: upstream
        workflows:
        - alias: first
          config: first.yaml
          sources: first.tar.gz
          substitutions:
            _VAR0: val0
            _VAR1: val1
        """)
        workflow = target_config['workflows'][0]
        substitutions = workflow['substitutions']
        substitution_list = (','.join('{!s}={!s}'.format(k, v)
                                      for (k, v) in substitutions.items()))

        config_path = '/some/path'
        git_poll_uuid = uuid.uuid4()
        git_ref = ('refs/tags/r0002', 'deadbeef')

        workflow_success = asyncio.get_event_loop().run_until_complete(
            git_patrol.run_workflow_body(commands, mock_db, config_path,
                                         target_config, git_poll_uuid,
                                         git_ref))
        self.assertTrue(workflow_success)

        commands.gcloud.assert_any_call(
            'builds', 'submit', '--async',
            '--config={}'.format(os.path.join(config_path,
                                              workflow['config'])),
            '--substitutions=TAG_NAME={},{}'.format(
                git_ref[0].replace('refs/tags/', ''), substitution_list),
            os.path.join(config_path, workflow['sources']))

        commands.gcloud.assert_any_call('builds', 'log', '--stream',
                                        '--no-user-output-enabled',
                                        cloud_build_uuid)

        commands.gcloud.assert_any_call('builds', 'describe', '--format=json',
                                        cloud_build_uuid)

        # We know the method will be called with only positional arguments so we
        # can unpack call_args_list to discard the unused kwargs.
        record_cloud_build_args = [
            args
            for (args, _) in mock_record_cloud_build.inner_mock.call_args_list
        ]

        # There should be two calls to "record_cloud_build()".
        self.assertEqual(len(record_cloud_build_args), 2)

        # The first call should have parent_id set to "0", indicating this is the
        # first entry. The second call should have parent_id set to "1", indicating
        # this entry has a parent.
        self.assertEqual(record_cloud_build_args[0][0], 0)
        self.assertEqual(record_cloud_build_args[1][0], 1)

        # The recorded Cloud Build JSON status should reflect what we passed via the
        # fake gcloud commands.
        self.assertEqual(
            record_cloud_build_args[0][5].items(),
            json.loads(cloud_build_json[0].decode('utf-8', 'ignore')).items())
        self.assertEqual(
            record_cloud_build_args[1][5].items(),
            json.loads(cloud_build_json[1].decode('utf-8', 'ignore')).items())