コード例 #1
0
def ParseScopeFlagsForUpdate(ref, args, request, crawler):
    """Python hook that parses the crawl scope args into the update request.

  Args:
    ref: The crawler resource reference.
    args: The parsed args namespace.
    request: The update crawler request.
    crawler: CachedResult, The cached crawler result.
  Returns:
    Request with crawl scope set appropriately.
  """
    del ref
    client = crawlers.CrawlersClient()
    messages = client.messages

    if _IsChangeBucketsSpecified(args):
        buckets = _GetBucketsPatch(args, crawler, messages)
        # Infer the crawl scope so that the user can update buckets of an existing
        # bucket-scoped crawler without needing to explicitly specify
        # `--crawl-scope=bucket` again.
        crawl_scope = 'bucket'
    else:
        buckets = None
        crawl_scope = args.crawl_scope

    return _SetScopeInRequest(crawl_scope, buckets, request, messages)
コード例 #2
0
def ValidateAndParseFlagsForUpdate(ref, args, request):
    """Python hook that validates and parses crawler config flags.

  Normally all the functions called here would be provided directly as
  modify_request_hooks in the update command YAML file. However, this would
  require setting read_modify_update: True to obtain the existing crawler
  beforehand, incurring an extra GET API call that may be unnecessary depending
  on what fields need to be updated.

  Args:
    ref: The crawler resource reference.
    args: The parsed args namespace.
    request: The update crawler request.
  Returns:
    Request with scope and scheduling flags set appropriately.
  Raises:
    InvalidCrawlScopeError: If the crawl scope configuration is not valid.
    InvalidRunOptionError: If the scheduling configuration is not valid.
  """
    client = crawlers.CrawlersClient()
    # Call client.Get(ref) lazily and cache the result, so we only make one API
    # call to get the existing crawler (and only if necessary).
    crawler = repeated.CachedResult.FromFunc(client.Get, ref)

    request = ValidateScopeFlagsForUpdate(ref, args, request, crawler)
    request = ValidateSchedulingFlagsForUpdate(ref, args, request, crawler)
    request = ParseScopeFlagsForUpdate(ref, args, request, crawler)
    request = ParseSchedulingFlagsForUpdate(ref, args, request)
    request = ParseBundleSpecsFlagsForUpdate(ref, args, request, crawler)
    return request
コード例 #3
0
def ParseSchedulingFlagsForUpdate(ref, args, request):
    del ref
    client = crawlers.CrawlersClient()
    messages = client.messages
    # Infer the run schedule so that the user can update the schedule of an
    # existing scheduled crawler without needing to explicitly specify
    # `--run-option=scheduled` again.
    run_option = ('scheduled'
                  if args.IsSpecified('run_schedule') else args.run_option)
    return _SetRunOptionInRequest(run_option, args.run_schedule, request,
                                  messages)
コード例 #4
0
ファイル: util.py プロジェクト: bopopescu/Assignment
def _SetRunOptionInRequest(run_option, run_schedule, request):
    """Returns request with the run option set."""
    client = crawlers.CrawlersClient()
    messages = client.messages

    if run_option == 'manual':
        arg_utils.SetFieldInMessage(
            request, 'googleCloudDatacatalogV1alpha3Crawler.config.adHocRun',
            messages.GoogleCloudDatacatalogV1alpha3AdhocRun())
    elif run_option == 'scheduled':
        scheduled_run_option = arg_utils.ChoiceToEnum(
            run_schedule, (messages.GoogleCloudDatacatalogV1alpha3ScheduledRun.
                           ScheduledRunOptionValueValuesEnum))
        arg_utils.SetFieldInMessage(
            request,
            'googleCloudDatacatalogV1alpha3Crawler.config.scheduledRun.scheduledRunOption',
            scheduled_run_option)
    return request
コード例 #5
0
def ParseScopeFlagsForCreate(ref, args, request):
    """Python hook that parses the crawl scope args into the create request.

  Args:
    ref: The crawler resource reference.
    args: The parsed args namespace.
    request: The create crawler request.
  Returns:
    Request with crawl scope set appropriately.
  """
    del ref
    client = crawlers.CrawlersClient()
    messages = client.messages
    if args.IsSpecified('buckets'):
        buckets = [
            messages.GoogleCloudDatacatalogV1alpha3BucketSpec(bucket=b)
            for b in args.buckets
        ]
    else:
        buckets = None
    return _SetScopeInRequest(args.crawl_scope, buckets, request, messages)
コード例 #6
0
ファイル: util.py プロジェクト: bopopescu/Assignment
def _SetScopeInRequest(crawl_scope, buckets, request):
    """Returns request with the crawl scope set."""
    client = crawlers.CrawlersClient()
    messages = client.messages

    if crawl_scope == 'bucket' and buckets is not None:
        arg_utils.SetFieldInMessage(
            request,
            'googleCloudDatacatalogV1alpha3Crawler.config.bucketScope.buckets',
            buckets)
    elif crawl_scope == 'project':
        arg_utils.SetFieldInMessage(
            request,
            'googleCloudDatacatalogV1alpha3Crawler.config.projectScope',
            messages.GoogleCloudDatacatalogV1alpha3ParentProjectScope())
    elif crawl_scope == 'organization':
        arg_utils.SetFieldInMessage(
            request,
            'googleCloudDatacatalogV1alpha3Crawler.config.organizationScope',
            messages.GoogleCloudDatacatalogV1alpha3ParentOrganizationScope())

    return request
コード例 #7
0
def ParseSchedulingFlagsForCreate(ref, args, request):
    del ref
    client = crawlers.CrawlersClient()
    messages = client.messages
    return _SetRunOptionInRequest(args.run_option, args.run_schedule, request,
                                  messages)