Пример #1
0
 def _ConstructField(self, local_field_names, optional, output_array):
     for qualified_field_name in local_field_names:
         field_ns, raw_field_name = field_lib.SplitFieldName(
             qualified_field_name)
         std_field_name, increment = SeparateFieldIncrement(raw_field_name)
         # Field will look local if undefined, but we'll catch the error later
         # Because we do explict existence checks and it will fail
         # TODO(berkoben) refactor so validation happens in an order that
         # prevents this logic lint
         field_ns = self.local_namespace.GetQualifiedNamespace(
             field_ns, std_field_name)
         output_array.append(
             OptWrapper(field=FieldParts(namespace=field_ns,
                                         field=std_field_name,
                                         increment=increment),
                        optional=optional))
Пример #2
0
 def _ConstructField(self, local_field_names, optional, output_array):
     for field in local_field_names:
         field_ns, field_name = field_lib.SplitFieldName(field)
         increment = ''
         match = FIELD_INCREMENT_STRIPPER_REGEX.match(field_name)
         if match:
             field_name = match.group(1)
             increment = match.group(2)
         # Field will look local if undefined, but we'll catch the error later
         # Because we do explict existence checks and it will fail
         # TODO(berkoben) refactor so validation happens in an order that
         # prevents this logic lint
         field_ns = self.local_namespace.GetQualifiedNamespace(
             field_ns, field_name)
         output_array.append(
             OptWrapper(field=FieldParts(namespace=field_ns,
                                         field=field_name,
                                         increment=increment),
                        optional=optional))
Пример #3
0
def RunInteractive(filter_text, modified_base, modified_client):
  """Runs interactive mode when presubmit is run as a standalone application.

  This will run all files in the ontology as if they were new.

  Args:
    filter_text: command line arguments. The only available argument is
      'match:<value>' which will simply perform a simple string 'contains' on
        the finding output and cause only matching findings to print.
    modified_base: paths to original versions of changed files in validation.
    modified_client: the list of modified files to validate.

  Returns:
    zero.
  """
  print('Analyzing...')
  start_time = time.time()
  findings, universe = _ValidateConfigInner([], modified_base, modified_client,
                                            True)

  PrintFindings(findings, filter_text)

  end_time = time.time()
  print('Elapsed time: {0} seconds.\n'.format(str(end_time - start_time)))

  etu = universe.entity_type_universe
  print('Enter one or more fully qualified names <NS/TYPE> separated by spaces')
  print('Prefix command with "findings:" to get findings for each type')
  while True:
    user_input = str(input('what type do you want to see? <NS/type>: '))
    input_split = user_input.strip().split(':')
    type_input = user_input
    include_findings = False
    if len(input_split) == 2:
      mode = input_split[0].strip()
      if mode == 'findings':
        include_findings = True
      type_input = input_split[1].strip()
    type_name_list = type_input.split(' ')
    first_type = None
    first_type_name = ''
    type_dict = {}
    for fqn in type_name_list:
      ns, etn = field_lib.SplitFieldName(fqn)
      et = etu.GetEntityType(ns, etn)
      if not et:
        print('no type for ' + fqn)
        continue
      if not first_type:
        first_type = et
        first_type_name = ns + '/' + etn
      else:
        type_dict[ns + '/' + etn] = et

      _PrintType(ns, et)
      if include_findings:
        print('  findings:\n')
        findings = et.GetFindings()
        for finding in findings:
          print('\t' + str(finding))

    if not first_type or not type_dict:
      continue
    first_field_set = (
        set(first_type.inherited_field_names)
        | set(first_type.local_field_names))
    print('Checking fields against ' + first_type_name)
    for name in type_dict:
      et = type_dict[name]
      field_set = (set(et.inherited_field_names) | set(et.local_field_names))
      outer = first_field_set.symmetric_difference(field_set)

      if not outer:
        print('\n' + name + ' is matching')
        continue

      missing_from_first = sorted(field_set.difference(first_field_set))
      missing_from_other = sorted(first_field_set.difference(field_set))
      print('\n' + name)
      print('\tIs missing:\n\t\t' + str(list(missing_from_other)))
      print('\tHas added:\n\t\t' + str(list(missing_from_first)))

    print('\n')

  return 0