Beispiel #1
0
def main(argv):
    parser = GetParser()
    options = parser.parse_args(argv)

    logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)

    # Check that we have no uncommitted files, and that our checkout's HEAD is
    # contained in a remote branch. This is to ensure that we don't accidentally
    # run uncommitted migrations.
    uncommitted_files = git.RunGit(os.getcwd(), ['status', '-s']).output
    if uncommitted_files:
        cros_build_lib.Die('You appear to have uncommitted files. Aborting!')

    remote_branches = git.RunGit(os.getcwd(),
                                 ['branch', '-r', '--contains']).output
    if not remote_branches:
        cros_build_lib.Die(
            'You appear to be on a local branch of chromite. Aborting!')

    if options.command == MIGRATE:
        positive_confirmation = 'please modify my database'
        warn = (
            'This option will apply schema changes to your existing database. '
            'You should not run this against the production database unless '
            'your changes are thoroughly tested, and those tests included '
            'in cidb_integration_test.py (including tests that old data is '
            'sanely migrated forward). Database corruption could otherwise '
            'result. Are you sure you want to proceed? If so, type "%s" '
            'now.\n') % positive_confirmation
    elif options.command == WIPE:
        positive_confirmation = 'please delete my data'
        warn = (
            'This operation will wipe (i.e. DELETE!) the entire contents of '
            'the database pointed at by %s. Are you sure you want to proceed? '
            'If so, type "%s" now.\n') % (os.path.join(
                options.cred_dir, 'host.txt'), positive_confirmation)
    else:
        cros_build_lib.Die('No command or unsupported command. Exiting.')

    print(warn)
    conf_string = cros_build_lib.GetInput('(%s)?: ' % positive_confirmation)
    if conf_string != positive_confirmation:
        cros_build_lib.Die('You changed your mind. Aborting.')

    if options.command == MIGRATE:
        print('OK, applying migrations...')
        db = cidb.CIDBConnection(options.cred_dir)
        db.ApplySchemaMigrations(maxVersion=options.migrate_version)
    elif options.command == WIPE:
        print('OK, wiping database...')
        db = cidb.CIDBConnection(options.cred_dir)
        db.DropDatabase()
        print('Done.')
  def LocalCIDBConnection(self, cidb_user):
    """Create a CIDBConnection with the local mysqld instance.

    Args:
      cidb_user: The mysql user to connect as.

    Returns:
      The created CIDBConnection object.
    """
    creds_dir_path = os.path.join(self.tempdir, 'local_cidb_creds')
    osutils.RmDir(creds_dir_path, ignore_missing=True)
    osutils.SafeMakedirs(creds_dir_path)

    osutils.WriteFile(os.path.join(creds_dir_path, 'host.txt'),
                      self.mysqld_host)
    osutils.WriteFile(os.path.join(creds_dir_path, 'port.txt'),
                      str(self.mysqld_port))
    osutils.WriteFile(os.path.join(creds_dir_path, 'user.txt'), cidb_user)

    if cidb_user in self.CIDB_CREDS_DIR:
      shutil.copy(os.path.join(self.CIDB_CREDS_DIR[cidb_user], 'password.txt'),
                  creds_dir_path)

    return cidb.CIDBConnection(
        creds_dir_path,
        query_retry_args=cidb.SqlConnectionRetryArgs(4, 1, 1.1))
Beispiel #3
0
    def Run(self):
        """Run cros uprevchrome.

    Raises:
      Exception if the PFQ build_id is not valid.
      Exception if UprevChrome raises exceptions.
    """
        self.options.Freeze()

        # Delay import so sqlalchemy isn't pulled in until we need it.
        from chromite.lib import cidb

        cidb_creds = self.options.cred_dir
        if cidb_creds is None:
            try:
                cidb_creds = cros_cidbcreds.CheckAndGetCIDBCreds()
            except:
                logging.error('Failed to download CIDB creds from gs.\n'
                              'Can try obtaining your credentials at '
                              'go/cros-cidb-admin and manually passing it in '
                              'with --cred-dir.')
                raise

        db = cidb.CIDBConnection(cidb_creds)

        build_number = self.ValidatePFQBuild(self.options.pfq_build, db)

        with osutils.TempDir(prefix='uprevchrome_',
                             delete=self.options.wipe) as work_dir:
            self.UprevChrome(work_dir, self.options.pfq_build, build_number)
            logging.info('Used working directory: %s', work_dir)
Beispiel #4
0
def main(argv):
    # Parse command line arguments.
    parser = GetParser()
    options = parser.parse_args(argv)

    # Set up clients.
    credentials = options.cred_dir or cros_cidbcreds.CheckAndGetCIDBCreds()
    db = cidb.CIDBConnection(credentials)
    topology.FetchTopologyFromCIDB(db)
    milo_client = milo.MiloClient(options.service_acct_json,
                                  host=options.milo_host)

    builds = []

    # Add explicitly requested builds.
    if options.build_ids:
        for build_id in options.build_ids:
            builds.append(
                MakeBuildEntry(db,
                               milo_client,
                               build_id,
                               no_suites=options.no_suites))

    # Search for builds by build config.
    if options.build_config:
        masters = db.GetBuildHistory(options.build_config,
                                     options.num_builds,
                                     final=True)
        for master in masters:
            builds.append(
                MakeBuildEntry(db,
                               milo_client,
                               master['id'],
                               master,
                               no_suites=options.no_suites))
            statuses = db.GetSlaveStatuses(master['id'])
            for slave in statuses:
                builds.append(
                    MakeBuildEntry(db,
                                   milo_client,
                                   slave['id'],
                                   slave,
                                   no_suites=options.no_suites))

    if not options.allow_empty and not options.no_suites:
        builds = [b for b in builds if len(b.get('suite_ids', []))]

    # Output results.
    with open(options.output, 'w') if options.output else sys.stdout as f:
        if options.json:
            output = {
                'builds': builds,
            }
            json.dump(output, f)
        else:
            for b in builds:
                f.write(StringifyBuildEntry(b))
                f.write('\n')
def main(argv):
  parser = GetParser()
  options = parser.parse_args(argv)

  if not _CheckOptions(options):
    sys.exit(1)

  credentials = options.cred_dir
  if not credentials:
    credentials = cros_cidbcreds.CheckAndGetCIDBCreds()

  db = cidb.CIDBConnection(credentials)

  if options.end_date:
    end_date = options.end_date
  else:
    end_date = datetime.datetime.now().date()

  # Determine the start date to use, which is required.
  if options.start_date:
    start_date = options.start_date
  else:
    assert options.past_month or options.past_week or options.past_day
    # Database search results will include both the starting and ending
    # days.  So, the number of days to subtract is one less than the
    # length of the search period.
    #
    # For instance, the starting day for the week ending 2014-04-21
    # should be 2017-04-15 (date - 6 days).
    if options.past_month:
      start_date = end_date - datetime.timedelta(days=29)
    elif options.past_week:
      start_date = end_date - datetime.timedelta(days=6)
    else:
      start_date = end_date

  if options.build_type == 'cq':
    master_config = constants.CQ_MASTER
  else:
    master_config = constants.PFQ_MASTER

  cl_stats_engine = CLStatsEngine(db)
  cl_stats_engine.Gather(start_date, end_date, master_config)
  summary = cl_stats_engine.Summarize(options.build_type,
                                      options.bad_patch_candidates)

  if options.report_file:
    with open(options.report_file, "w") as f:
      logging.info("Writing report to %s", options.report_file)
      GenerateReport(f, summary)
Beispiel #6
0
    def Run(self):
        """Run cros buildresult."""
        self.options.Freeze()

        commandline.RunInsideChroot(self)

        credentials = self.options.cred_dir
        if not credentials:
            credentials = cros_cidbcreds.CheckAndGetCIDBCreds(
                force_update=self.options.force_update)

        # Delay import so sqlalchemy isn't pulled in until we need it.
        from chromite.lib import cidb

        db = cidb.CIDBConnection(credentials)

        build_statuses = FetchBuildStatuses(db, self.options)

        if build_statuses:
            # Filter out builds that don't exist in CIDB, or which aren't finished.
            build_statuses = [
                b for b in build_statuses if IsBuildStatusFinished(b)
            ]

        # If we found no builds at all, return a different exit code to help
        # automated scripts know they should try waiting longer.
        if not build_statuses:
            logging.error('No build found. Perhaps not started?')
            return 2

        # Fixup all of the builds we have.
        build_statuses = [FixUpBuildStatus(db, b) for b in build_statuses]

        # Produce our final result.
        if self.options.report == 'json':
            report = ReportJson(build_statuses)
        else:
            report = Report(build_statuses)

        print(report)
def main(argv):
    parser = GetParser()
    options = parser.parse_args(argv)

    # Determine which hosts to connect to.
    db = cidb.CIDBConnection(options.cred_dir)
    topology.FetchTopologyFromCIDB(db)

    if options.json_file:
        # Use the specified alerts.
        logging.info('Using JSON file %s', options.json_file)
        with open(options.json_file) as f:
            summary_json = f.read()
            print(summary_json)
    else:
        builds = [tuple(x.split(',')) for x in options.builds]
        if not builds:
            builds = constants.SOM_BUILDS[options.som_tree]

        # Generate the set of alerts to send.
        logdog_client = logdog.LogdogClient(options.service_acct_json,
                                            host=options.logdog_host)
        milo_client = milo.MiloClient(options.service_acct_json,
                                      host=options.milo_host)
        summary_json = GenerateAlertsSummary(
            db,
            builds=builds,
            logdog_client=logdog_client,
            milo_client=milo_client,
            allow_experimental=options.allow_experimental)
        if options.output_json:
            with open(options.output_json, 'w') as f:
                logging.info('Writing JSON file %s', options.output_json)
                f.write(summary_json)

    # Authenticate and send the alerts.
    som_client = som.SheriffOMaticClient(options.service_acct_json,
                                         insecure=options.som_insecure,
                                         host=options.som_host)
    som_client.SendAlerts(summary_json, tree=options.som_tree)
Beispiel #8
0
    def Run(self):
        """Run cros uprevchrome.

    Raises:
      Exception if the PFQ build_id is not valid.
      Exception if UprevChrome raises exceptions.
    """
        self.options.Freeze()

        # Delay import so sqlalchemy isn't pulled in until we need it.
        from chromite.lib import cidb

        cidb_creds = self.options.cred_dir
        if cidb_creds is None:
            try:
                cidb_creds = cros_cidbcreds.CheckAndGetCIDBCreds()
            except:
                logging.error('Failed to download CIDB creds from gs.\n'
                              'Can try obtaining your credentials at '
                              'go/cros-cidb-admin and manually passing it in '
                              'with --cred-dir.')
                raise

        db = cidb.CIDBConnection(cidb_creds)

        build_number = self.ValidatePFQBuild(self.options.pfq_build, db)

        chroot_tmp = os.path.join(constants.SOURCE_ROOT,
                                  constants.DEFAULT_CHROOT_DIR, 'tmp')
        tmp_override = None if cros_build_lib.IsInsideChroot() else chroot_tmp
        work_dir = tempfile.mkdtemp(prefix='uprevchrome_', dir=tmp_override)

        try:
            self.UprevChrome(work_dir, self.options.pfq_build, build_number)
        finally:
            if self.options.wipe:
                osutils.RmDir(work_dir)
                logging.info('Removed work_dir %s', work_dir)
            else:
                logging.info('Leaving working directory at %s', work_dir)
def main(argv):
    parser = GetParser()
    options = parser.parse_args(argv)

    if not _CheckOptions(options):
        sys.exit(1)

    db = cidb.CIDBConnection(options.cred_dir)

    if options.end_date:
        end_date = options.end_date
    else:
        end_date = datetime.datetime.now().date()

    # Determine the start date to use, which is required.
    if options.start_date:
        start_date = options.start_date
    else:
        assert options.past_month or options.past_week or options.past_day
        if options.past_month:
            start_date = end_date - datetime.timedelta(days=30)
        elif options.past_week:
            start_date = end_date - datetime.timedelta(days=7)
        else:
            start_date = end_date - datetime.timedelta(days=1)

    if options.build_type == 'cq':
        master_config = constants.CQ_MASTER
    else:
        master_config = constants.PFQ_MASTER

    cl_stats_engine = CLStatsEngine(db)
    cl_stats_engine.Gather(start_date,
                           end_date,
                           master_config,
                           starting_build_number=options.starting_build,
                           ending_build_number=options.ending_build)
    cl_stats_engine.Summarize(options.build_type, options.bad_patch_candidates)
    def InitializeClients(self):
        """Check if underlying clients are initialized.

    Returns:
      A boolean indicating the client statuses.
    """
        pid_mismatch = (self.process_id != os.getpid())
        if self._IsCIDBClientMissing() or pid_mismatch:
            self.process_id = os.getpid()
            if self.cidb_creds:
                for_service = self.for_service if self.for_service else False
                self.cidb_conn = cidb.CIDBConnection(self.cidb_creds,
                                                     for_service=for_service)
            elif not cidb.CIDBConnectionFactory.IsCIDBSetup():
                self.cidb_conn = None
            else:
                self.cidb_conn = (
                    cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder())

        if self._IsBuildbucketClientMissing() or pid_mismatch:
            self.bb_client = buildbucket_v2.BuildbucketV2()

        return not (self._IsCIDBClientMissing()
                    or self._IsBuildbucketClientMissing())
 def testValidatePFQBuild(self):
     """Test ValidatePFQBuild."""
     self.SetupCommandMock(self.cmd_args)
     db = cidb.CIDBConnection('cred_dir')
     self.assertEqual('100', self.cmd_mock.inst.ValidatePFQBuild(100, db))
Beispiel #12
0
    def Run(self):
        """Run cros build."""
        self.options.Freeze()

        commandline.RunInsideChroot(self)

        credentials = self.options.cred_dir
        if not credentials:
            credentials = cros_cidbcreds.CheckAndGetCIDBCreds()

        # Delay import so sqlalchemy isn't pulled in until we need it.
        from chromite.lib import cidb

        db = cidb.CIDBConnection(credentials)

        # Timeframe for discovering builds, if options.build_id not used.
        start_date, end_date = self.OptionsToStartEndDates(self.options)

        # Trending is sufficiently different to be handled on it's own.
        if not self.options.trending and self.options.report != 'success':
            assert not self.options.csv, (
                '--csv can only be used with --trending or --report success.')

        # Data about a single build (optional).
        focus_build = None

        if self.options.build_id:
            logging.info('Gathering data for %s', self.options.build_id)
            focus_status = build_time_stats.BuildIdToBuildStatus(
                db, self.options.build_id)
            focus_build = build_time_stats.GetBuildTimings(focus_status)

            build_config = focus_status['build_config']
            builds_statuses = build_time_stats.BuildConfigToStatuses(
                db, build_config, start_date, end_date)
            description = 'Focus %d - %s' % (self.options.build_id,
                                             build_config)

        elif self.options.build_config:
            builds_statuses = build_time_stats.BuildConfigToStatuses(
                db, self.options.build_config, start_date, end_date)
            description = 'Config %s' % self.options.build_config

        elif self.options.build_type:
            builds_statuses = build_time_stats.MasterConfigToStatuses(
                db, BUILD_TYPE_MAP[self.options.build_type], start_date,
                end_date)
            description = 'Type %s' % self.options.build_type

        if not builds_statuses:
            logging.critical('No Builds Found For: %s', description)
            return 1

        if self.options.report == 'success':
            # Calculate per-build success rates and per-stage success rates.
            build_success_rates = build_time_stats.GetBuildSuccessRates(
                builds_statuses)
            stage_success_rates = (
                build_time_stats.GetStageSuccessRates(builds_statuses)
                if self.options.stages else {})
            if self.options.csv:
                build_time_stats.SuccessReportCsv(sys.stdout,
                                                  build_success_rates,
                                                  stage_success_rates)
            else:
                build_time_stats.SuccessReport(sys.stdout, description,
                                               build_success_rates,
                                               stage_success_rates)
            return 0

        # Compute per-build timing.
        builds_timings = [
            build_time_stats.GetBuildTimings(status)
            for status in builds_statuses
        ]

        if not builds_timings:
            logging.critical('No timing results For: %s', description)
            return 1

        # Report results.
        if self.options.report == 'standard':
            build_time_stats.Report(sys.stdout, description, focus_build,
                                    builds_timings, self.options.stages,
                                    self.options.trending, self.options.csv)
        elif self.options.report == 'stability':
            build_time_stats.StabilityReport(sys.stdout, description,
                                             builds_timings)