예제 #1
0
def cleanup_pubsub_topics(project_names):
  """Delete old pubsub topics and subscriptions."""
  client = pubsub.PubSubClient()
  application_id = utils.get_application_id()

  expected_topics = set()
  for platform in PUBSUB_PLATFORMS:
    expected_topics.update(
        [untrusted.queue_name(project, platform) for project in project_names])

  pubsub_config = local_config.Config('pubsub.queues')
  unmanaged_queues = [queue['name'] for queue in pubsub_config.get('resources')]

  for topic in client.list_topics(pubsub.project_name(application_id)):
    _, name = pubsub.parse_name(topic)

    if (not name.startswith(tasks.JOBS_PREFIX) and
        not name.startswith(tasks.HIGH_END_JOBS_PREFIX)):
      # Some topic created by another service, ignore.
      continue

    if name in unmanaged_queues:
      continue

    if name in expected_topics:
      continue

    for subscription in client.list_topic_subscriptions(topic):
      client.delete_subscription(subscription)

    client.delete_topic(topic)
예제 #2
0
def init():
    """Explicitly (re-)initialize _client(). This is useful for testing."""
    # We discard the project from the service account credentials, as it may be
    # different from the Datastore project we wish to connect to.
    creds = credentials.get_default()[0]
    _local.client = datastore.Client(project=utils.get_application_id(),
                                     credentials=creds)
예제 #3
0
def get_regular_task(queue=None):
    """Get a regular task."""
    if not queue:
        queue = regular_queue()

    pubsub_client = pubsub.PubSubClient()
    application_id = utils.get_application_id()
    while True:
        messages = pubsub_client.pull_from_subscription(
            pubsub.subscription_name(application_id, queue), max_messages=1)

        if not messages:
            return None

        try:
            task = PubSubTask(messages[0])
        except KeyError:
            logs.log_error('Received an invalid task, discarding...')
            messages[0].ack()
            continue

        # Check that this task should be run now (past the ETA). Otherwise we defer
        # its execution.
        if not task.defer():
            return task
예제 #4
0
def add_task(command, argument, job_type, queue=None, wait_time=None):
    """Add a new task to the job queue."""
    # Old testcases may pass in queue=None explicitly,
    # so we must check this here.
    if not queue:
        queue = default_queue()

    if wait_time is None:
        wait_time = random.randint(1, TASK_CREATION_WAIT_INTERVAL)

    if job_type != 'none':
        job = data_types.Job.query(data_types.Job.name == job_type).get()
        if not job:
            raise Error(f'Job {job_type} not found.')

        if job.is_external():
            external_tasks.add_external_task(command, argument, job)
            return

    # Add the task.
    eta = utils.utcnow() + datetime.timedelta(seconds=wait_time)
    task = Task(command, argument, job_type, eta=eta)
    pubsub_client = pubsub.PubSubClient()
    pubsub_client.publish(pubsub.topic_name(utils.get_application_id(), queue),
                          [task.to_pubsub_message()])
예제 #5
0
def set_service_account_roles(service_account):
  """Set roles for service account."""
  project_id = utils.get_application_id()
  resource_manager = _create_client('cloudresourcemanager')

  request = resource_manager.projects().getIamPolicy(
      resource=project_id, body={})
  policy = request.execute()

  # Set logging and metrics permissions.
  policy_changed = False
  policy_changed |= _add_service_account_role(policy, 'roles/logging.logWriter',
                                              service_account['email'])
  policy_changed |= _add_service_account_role(
      policy, 'roles/prodx.metricPublisher', service_account['email'])
  policy_changed |= _add_service_account_role(
      policy, 'roles/monitoring.metricWriter', service_account['email'])

  if not policy_changed:
    return

  request = resource_manager.projects().setIamPolicy(
      resource=project_id, body={
          'policy': policy,
      })
  request.execute()
예제 #6
0
def get_data_bundle_bucket_name(data_bundle_name):
  """Return data bundle bucket name on GCS."""
  domain = local_config.ProjectConfig().get('bucket_domain_suffix')
  if not domain:
    domain = '%s.appspot.com' % utils.get_application_id()

  return '%s-corpus.%s' % (data_bundle_name, domain)
예제 #7
0
def auth_domain():
  """Get the auth domain."""
  domain = local_config.ProjectConfig().get('firebase.auth_domain')
  if domain:
    return domain

  return utils.get_application_id() + '.firebaseapp.com'
예제 #8
0
def _create_storage_client_new():
  """Create a storage client."""
  creds, project = credentials.get_default()
  if not project:
    project = utils.get_application_id()

  return gcs.Client(project=project, credentials=creds)
예제 #9
0
    def _table_name(self):
        """Return the table name for the query."""
        app_id = utils.get_application_id()

        dataset = dataset_name(self.fuzzer_or_engine_name)

        return '`%s`.%s.%s' % (app_id, dataset, self.base_table)
예제 #10
0
    def run(self):
        """Run the flusher thread."""
        create_time_series = _retry_wrap(
            _monitoring_v3_client.create_time_series)
        project_path = _monitoring_v3_client.project_path(
            utils.get_application_id())

        while True:
            try:
                if self.stop_event.wait(FLUSH_INTERVAL_SECONDS):
                    return

                time_series = []
                end_time = time.time()
                for metric, labels, start_time, value in _metrics_store.iter_values(
                ):
                    if (metric.metric_kind == monitoring_v3.enums.
                            MetricDescriptor.MetricKind.GAUGE):
                        start_time = end_time

                    series = monitoring_v3.types.TimeSeries()
                    metric.monitoring_v3_time_series(series, labels,
                                                     start_time, end_time,
                                                     value)
                    time_series.append(series)

                    if len(time_series) == MAX_TIME_SERIES_PER_CALL:
                        create_time_series(project_path, time_series)
                        time_series = []

                if time_series:
                    create_time_series(project_path, time_series)
            except Exception:
                logs.log_error('Failed to flush metrics.')
예제 #11
0
 def get(self):
   """Handle a get request."""
   self.render(
       'login.html', {
           'apiKey': local_config.ProjectConfig().get('firebase.api_key'),
           'projectId': utils.get_application_id(),
           'dest': self.request.get('dest'),
       })
예제 #12
0
    def _load_data(self, bigquery, fuzzer):
        """Load yesterday's stats into BigQuery."""
        project_id = utils.get_application_id()

        yesterday = (self._utc_now().date() - datetime.timedelta(days=1))
        date_string = yesterday.strftime('%Y%m%d')
        timestamp = utils.utc_date_to_timestamp(yesterday)

        dataset_id = fuzzer_stats.dataset_name(fuzzer)
        if not self._create_dataset_if_needed(bigquery, dataset_id):
            return

        for kind in STATS_KINDS:
            kind_name = kind.__name__
            table_id = kind_name
            if not self._create_table_if_needed(bigquery, dataset_id,
                                                table_id):
                continue

            if kind == fuzzer_stats.TestcaseRun:
                schema = fuzzer_stats_schema.get(fuzzer)
            else:
                schema = kind.SCHEMA

            gcs_path = fuzzer_stats.get_gcs_stats_path(kind_name, fuzzer,
                                                       timestamp)
            load = {
                'destinationTable': {
                    'projectId': project_id,
                    'tableId': table_id + '$' + date_string,
                    'datasetId': dataset_id,
                },
                'schemaUpdateOptions': [
                    'ALLOW_FIELD_ADDITION',
                ],
                'sourceFormat': 'NEWLINE_DELIMITED_JSON',
                'sourceUris': ['gs:/' + gcs_path + '*.json'],
                'writeDisposition': 'WRITE_TRUNCATE',
            }
            if schema is not None:
                load['schema'] = schema

            job_body = {
                'configuration': {
                    'load': load,
                },
            }

            logs.log("Uploading job to BigQuery.", job_body=job_body)
            request = bigquery.jobs().insert(projectId=project_id,
                                             body=job_body)
            response = request.execute()

            # We cannot really check the response here, as the query might be still
            # running, but having a BigQuery jobId in the log would make our life
            # simpler if we ever have to manually check the status of the query.
            # See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query.
            logs.log('Response from BigQuery: %s' % response)
예제 #13
0
    def get(self):
        """Handle a get request."""
        resource_manager = googleapiclient.discovery.build(
            'cloudresourcemanager', 'v1')
        project_id = utils.get_application_id()
        policy = resource_manager.projects().getIamPolicy(resource=project_id,
                                                          body={}).execute()

        admins = admins_from_iam_policy(policy)
        update_admins(admins)
예제 #14
0
def _client():
    """Get or initialize the NDB client."""
    global _ndb_client
    global _initial_pid

    if not _ndb_client:
        with _ndb_client_lock:
            if not _ndb_client:
                _ndb_client = ndb.Client(project=utils.get_application_id())
                _initial_pid = os.getpid()

    return _ndb_client
예제 #15
0
def _client():
    """Get or initialize the NDB client."""
    global _ndb_client

    if not _ndb_client:
        with _ndb_client_lock:
            if not _ndb_client:
                _ndb_client = ndb.Client(project=utils.get_application_id())
                # TODO(ochang): Remove hack once migration to Python 3 is done.
                _ndb_client.host = utils.newstr_to_native_str(_ndb_client.host)

    return _ndb_client
예제 #16
0
    def _create_dataset_if_needed(self, bigquery, dataset_id):
        """Create a new dataset if necessary."""
        project_id = utils.get_application_id()
        dataset_body = {
            'datasetReference': {
                'datasetId': dataset_id,
                'projectId': project_id,
            },
        }
        dataset_insert = bigquery.datasets().insert(projectId=project_id,
                                                    body=dataset_body)

        return self._execute_insert_request(dataset_insert)
예제 #17
0
def create_pubsub_topics(project):
  """Create pubsub topics for tasks."""
  for platform in PUBSUB_PLATFORMS:
    name = untrusted.queue_name(project, platform)
    client = pubsub.PubSubClient()
    application_id = utils.get_application_id()

    topic_name = pubsub.topic_name(application_id, name)
    if client.get_topic(topic_name) is None:
      client.create_topic(topic_name)

    subscription_name = pubsub.subscription_name(application_id, name)
    if client.get_subscription(subscription_name) is None:
      client.create_subscription(subscription_name, topic_name)
예제 #18
0
def _ndb_entity_to_cloud_entity(ndb_entity):
    """Convert ndb.Model to cloud entity to prepare for put()."""
    if ndb_entity is None:
        return None

    project_id = utils.get_application_id()
    unindexed_properties = _unindexed_properties(ndb_entity.__class__)

    ndb_entity._prepare_for_put()
    ndb_entity._pre_put_hook()

    if ndb_entity.key:
        # Existing key.
        cloud_entity = datastore.Entity(
            key=_ndb_key_to_cloud_key(ndb_entity.key),
            exclude_from_indexes=unindexed_properties)
    else:
        # Auto-generate key.
        base_key = datastore.Key(ndb_entity._get_kind(), project=project_id)
        generated_key = _retry_wrap(_client().allocate_ids)(base_key, 1)[0]
        cloud_entity = datastore.Entity(
            key=generated_key, exclude_from_indexes=unindexed_properties)

        ndb_entity.key = _cloud_key_to_ndb_key(generated_key)

    for key, value in ndb_entity.to_dict().iteritems():
        ndb_property = getattr(ndb_entity.__class__, key)
        if type(ndb_property) in UNSUPPORTED_PROPERTY_TYPES:
            raise NdbPatcherException('Unsupported property type: ' +
                                      ndb_property.__name__)

        if (isinstance(value, str)
                and type(ndb_property) is not ndb.BlobProperty):
            # All 'str' values are written as byte strings by Cloud Datastore, but ndb
            # ndb_entitys can have 'str' values for StringProperty or TextProperty, so
            # check the type of the property.
            value = unicode(value)
        elif type(value) is datetime.date:
            value = datetime.datetime.combine(value,
                                              datetime.datetime.min.time())
        elif type(value) is datetime.time:
            value = datetime.datetime.combine(datetime.date(1970, 1, 1), value)
        elif isinstance(value, ndb.Key):
            value = _ndb_key_to_cloud_key(value)

        cloud_entity[key] = value

    return cloud_entity
예제 #19
0
def add_task(command, argument, job_type, queue=None, wait_time=None):
    """Add a new task to the job queue."""
    # Old testcases may pass in queue=None explicitly,
    # so we must check this here.
    if not queue:
        queue = default_queue()

    if wait_time is None:
        wait_time = random.randint(1, TASK_CREATION_WAIT_INTERVAL)

    # Add the task.
    eta = utils.utcnow() + datetime.timedelta(seconds=wait_time)
    task = Task(command, argument, job_type, eta=eta)
    pubsub_client = pubsub.PubSubClient()
    pubsub_client.publish(pubsub.topic_name(utils.get_application_id(), queue),
                          [task.to_pubsub_message()])
예제 #20
0
  def _create_table_if_needed(self, bigquery, dataset_id, table_id):
    """Create a new table if needed."""
    project_id = utils.get_application_id()
    table_body = {
        'tableReference': {
            'datasetId': dataset_id,
            'projectId': project_id,
            'tableId': table_id,
        },
        'timePartitioning': {
            'type': 'DAY',
        },
    }

    table_insert = bigquery.tables().insert(
        projectId=project_id, datasetId=dataset_id, body=table_body)
    return self._execute_insert_request(table_insert)
예제 #21
0
def _send_email_to_uploader(testcase_id, to_email, content):
    """Send email to uploader when all the testcase tasks are finished."""
    # Based on https://cloud.google.com/appengine/docs/standard/go/mail/.
    sender = 'noreply@{app_id}.appspotmail.com'.format(
        app_id=utils.get_application_id())

    subject = 'Your testcase upload %d analysis is complete.' % testcase_id
    body = ('%s\n\n'
            'If you suspect that the result above is incorrect, '
            'try re-doing that job on the testcase report page.'
            ) % content.strip()

    try:
        mail.send_mail(sender=sender, to=to_email, subject=subject, body=body)
    except Exception:
        logs.log_error(
            'Failed to send email that testcase %d is fully processed.' %
            testcase_id)
예제 #22
0
  def create_bucket(self, name, object_lifecycle, cors):
    """Create a new bucket."""
    project_id = utils.get_application_id()
    request_body = {'name': name}
    if object_lifecycle:
      request_body['lifecycle'] = object_lifecycle

    if cors:
      request_body['cors'] = cors

    client = create_discovery_storage_client()
    try:
      client.buckets().insert(project=project_id, body=request_body).execute()
    except HttpError as e:
      logs.log_warn('Failed to create bucket %s: %s' % (name, e))
      raise

    return True
예제 #23
0
def setup_metrics(non_dry_run):
    """Set up metrics."""
    client = monitoring_v3.MetricServiceClient()
    project_name = utils.get_application_id()
    project_path = client.project_path(project_name)

    for name in dir(monitoring_metrics):
        metric = getattr(monitoring_metrics, name)
        if not isinstance(metric, monitor.Metric):
            continue

        descriptor = monitoring_v3.types.MetricDescriptor()
        metric.monitoring_v3_metric_descriptor(descriptor)

        if non_dry_run:
            print('Creating metric', descriptor)
            client.create_metric_descriptor(project_path, descriptor)
        else:
            print('Skip creating metric', descriptor, '(dry-run mode)')
예제 #24
0
    def setUp(self):
        helpers.patch_environ(self)
        helpers.patch(self, [
            'bot.tasks.minimize_task._run_libfuzzer_testcase',
            'bot.tasks.minimize_task._run_libfuzzer_tool',
        ])

        test_utils.setup_pubsub(utils.get_application_id())

        environment.set_value('APP_ARGS', '%TESTCASE% fuzz_target')
        environment.set_value('APP_DIR', '/libfuzzer')
        environment.set_value('APP_NAME', '')
        environment.set_value('APP_PATH', '')
        environment.set_value('BOT_TMPDIR', '/bot_tmpdir')
        environment.set_value('CRASH_STACKTRACES_DIR', '/crash_stacks')
        environment.set_value('FUZZER_DIR', '/fuzzer_dir')
        environment.set_value('INPUT_DIR', '/input_dir')
        environment.set_value('JOB_NAME', 'libfuzzer_asan_test')
        environment.set_value('USER_PROFILE_IN_MEMORY', True)
예제 #25
0
def _client():
    """Get or initialize the NDB client."""
    global _ndb_client
    global _initial_pid

    if not _ndb_client:
        with _ndb_client_lock:
            if not _ndb_client:
                _ndb_client = ndb.Client(project=utils.get_application_id())
                _initial_pid = os.getpid()

                # TODO(ochang): Remove hack once migration to Python 3 is done.
                if sys.version_info.major == 2:
                    # NDB doesn't like newstrs. On Python 3, keeping this breaks because
                    # the bytes gets propgated down to a DNS resolution on
                    # "b'datastore.googleapis.com'" which doesn't work.
                    _ndb_client.host = utils.newstr_to_native_str(
                        _ndb_client.host)

    return _ndb_client
예제 #26
0
  def _update_schema_if_needed(self, bigquery, dataset_id, table_id, schema):
    """Update the table's schema if needed."""
    if not schema:
      return

    project_id = utils.get_application_id()
    table = bigquery.tables().get(
        datasetId=dataset_id, tableId=table_id, projectId=project_id).execute()

    if 'schema' in table and table['schema'] == schema:
      return

    body = {
        'schema': schema,
    }

    logs.log('Updating schema for %s:%s' % (dataset_id, table_id))
    bigquery.tables().patch(
        datasetId=dataset_id, tableId=table_id, projectId=project_id,
        body=body).execute()
예제 #27
0
  def get(self):
    """Handle a cron job."""
    backup_bucket = local_config.Config(
        local_config.PROJECT_PATH).get('backup.bucket')
    if not backup_bucket:
      logs.log('No backup bucket is set, skipping.')
      return

    kinds = [
        kind for kind in ndb.Model._kind_map  # pylint: disable=protected-access
        if (not kind.startswith('_') and kind not in EXCLUDED_MODELS)
    ]

    app_id = utils.get_application_id()
    timestamp = datetime.datetime.utcnow().strftime('%Y-%m-%d-%H:%M:%S')
    output_url_prefix = (
        'gs://{backup_bucket}/datastore-backups/{timestamp}'.format(
            backup_bucket=backup_bucket, timestamp=timestamp))
    body = {
        'output_url_prefix': output_url_prefix,
        'entity_filter': {
            'kinds': kinds
        }
    }

    try:
      request = _datastore_client().projects().export(
          projectId=app_id, body=body)
      response = request.execute()

      message = 'Datastore export succeeded.'
      status_code = 200
      logs.log(message, response=response)
    except googleapiclient.errors.HttpError as e:
      message = 'Datastore export failed.'
      status_code = e.resp.status
      logs.log_error(message, error=str(e))

    self.response.headers['Content-Type'] = 'text/plain'
    self.response.out.write(message)
    self.response.set_status(status_code)
예제 #28
0
def _validate_iap_jwt(iap_jwt):
    """Validate JWT assertion."""
    project_id = utils.get_application_id()
    expected_audience = '/projects/{}/apps/{}'.format(
        _project_number_from_id(project_id), project_id)

    try:
        key_id = jwt.get_unverified_header(iap_jwt).get('kid')
        if not key_id:
            raise AuthError('No key ID.')

        key = _get_iap_key(key_id)
        decoded_jwt = jwt.decode(iap_jwt,
                                 key,
                                 algorithms=['ES256'],
                                 issuer='https://cloud.google.com/iap',
                                 audience=expected_audience)
        return decoded_jwt['email']
    except (jwt.exceptions.InvalidTokenError,
            requests.exceptions.RequestException) as e:
        raise AuthError('JWT assertion decode error: ' + str(e))
예제 #29
0
def _initialize_monitored_resource():
  """Monitored resources."""
  global _monitored_resource
  _monitored_resource = monitoring_v3.types.MonitoredResource()

  # TODO(ochang): Use generic_node when that is available.
  _monitored_resource.type = 'gce_instance'

  # The project ID must be the same as the one we write metrics to, not the ID
  # where the instance lives.
  _monitored_resource.labels['project_id'] = utils.get_application_id()

  # Use bot name here instance as that's more useful to us.
  _monitored_resource.labels['instance_id'] = environment.get_value('BOT_NAME')

  if compute_metadata.is_gce():
    # Returned in the form projects/{id}/zones/{zone}
    zone = compute_metadata.get('instance/zone').split('/')[-1]
    _monitored_resource.labels['zone'] = zone
  else:
    # Default zone for instances not on GCE.
    _monitored_resource.labels['zone'] = 'us-central1-f'
예제 #30
0
    def setUp(self):
        helpers.patch_environ(self)
        helpers.patch(
            self,
            [
                "bot.tasks.minimize_task._run_libfuzzer_testcase",
                "bot.tasks.minimize_task._run_libfuzzer_tool",
            ],
        )

        test_utils.setup_pubsub(utils.get_application_id())

        environment.set_value("APP_ARGS", "%TESTCASE% fuzz_target")
        environment.set_value("APP_DIR", "/libfuzzer")
        environment.set_value("APP_NAME", "")
        environment.set_value("APP_PATH", "")
        environment.set_value("BOT_TMPDIR", "/bot_tmpdir")
        environment.set_value("CRASH_STACKTRACES_DIR", "/crash_stacks")
        environment.set_value("FUZZER_DIR", "/fuzzer_dir")
        environment.set_value("INPUT_DIR", "/input_dir")
        environment.set_value("JOB_NAME", "libfuzzer_asan_test")
        environment.set_value("USER_PROFILE_IN_MEMORY", True)