예제 #1
0
파일: importer.py 프로젝트: rmistry/luci-py
def legacy_json_config_to_proto(config_json):
    """Converts legacy JSON config to config_pb2.GroupImporterConfig message.

  TODO(vadimsh): Remove once all instances of auth service use protobuf configs.
  """
    try:
        config = json.loads(config_json)
    except ValueError as ex:
        logging.error('Invalid JSON: %s', ex)
        return None
    msg = config_pb2.GroupImporterConfig()
    for item in config:
        fmt = item.get('format', 'tarball')
        if fmt == 'tarball':
            entry = msg.tarball.add()
        elif fmt == 'plainlist':
            entry = msg.plainlist.add()
        else:
            logging.error('Unrecognized format: %s', fmt)
            continue
        entry.url = item.get('url') or ''
        entry.oauth_scopes.extend(item.get('oauth_scopes') or [])
        if 'domain' in item:
            entry.domain = item['domain']
        if fmt == 'tarball':
            entry.systems.extend(item.get('systems') or [])
            entry.groups.extend(item.get('groups') or [])
        elif fmt == 'plainlist':
            entry.group = item.get('group') or ''
        else:
            assert False, 'Not reachable'
    return msg
예제 #2
0
  def test_load_config_happy(self):
    self.assertIsNone(importer.load_config())

    put_config("""
      tarball {
        url: "http://example.com/tarball"
        oauth_scopes: "scope1"
        oauth_scopes: "scope2"
        domain: "zzz1.example.com"
        systems: "s1"
        groups: "s1/g1"
        groups: "s1/g2"
      }

      tarball_upload {
        name: "tarball upload"
        authorized_uploader: "*****@*****.**"
        authorized_uploader: "*****@*****.**"
        domain: "zzz2.example.com"
        systems: "s2"
        groups: "s2/g1"
        groups: "s2/g2"
      }

      plainlist {
        url: "http://example.com/plainlist"
        oauth_scopes: "scope1"
        oauth_scopes: "scope2"
        domain: "zzz3.example.com"
        group: "g3"
      }
    """)

    cfg = importer.load_config()
    self.assertEqual(config_pb2.GroupImporterConfig(
      tarball=[config_pb2.GroupImporterConfig.TarballEntry(
        url='http://example.com/tarball',
        oauth_scopes=['scope1', 'scope2'],
        domain='zzz1.example.com',
        systems=['s1'],
        groups=['s1/g1', 's1/g2'],
      )],
      tarball_upload=[config_pb2.GroupImporterConfig.TarballUploadEntry(
        name='tarball upload',
        authorized_uploader=['*****@*****.**', '*****@*****.**'],
        domain='zzz2.example.com',
        systems=['s2'],
        groups=['s2/g1', 's2/g2'],
      )],
      plainlist=[config_pb2.GroupImporterConfig.PlainlistEntry(
        url='http://example.com/plainlist',
        oauth_scopes=['scope1', 'scope2'],
        domain='zzz3.example.com',
        group='g3',
      )]
    ), cfg)
예제 #3
0
파일: importer.py 프로젝트: rmistry/luci-py
def validate_config(text):
    """Deserializes text to config_pb2.GroupImporterConfig and validates it.

  Raise:
    ValueError if config is not valid.
  """
    msg = config_pb2.GroupImporterConfig()
    try:
        protobuf.text_format.Merge(text, msg)
    except protobuf.text_format.ParseError as ex:
        raise ValueError('Config is badly formated: %s' % ex)
    validate_config_proto(msg)
예제 #4
0
def load_config():
    """Reads and parses the config, returns it as GroupImporterConfig or None.

  Raises BundleImportError if the config can't be parsed or doesn't pass
  the validation. Missing config is not an error (the function just returns
  None).
  """
    config_text = read_config()
    if not config_text:
        return None
    config = config_pb2.GroupImporterConfig()
    try:
        protobuf.text_format.Merge(config_text, config)
    except protobuf.text_format.ParseError as ex:
        raise BundleImportError('Bad config format: %s' % ex)
    try:
        validate_config_proto(config)
    except ValueError as ex:
        raise BundleImportError('Bad config structure: %s' % ex)
    return config
예제 #5
0
파일: importer.py 프로젝트: rmistry/luci-py
def import_external_groups():
    """Refetches all external groups.

  Runs as a cron task. Raises BundleImportError in case of import errors.
  """
    # Missing config is not a error.
    config_text = read_config()
    if not config_text:
        logging.info('Not configured')
        return
    config = config_pb2.GroupImporterConfig()
    try:
        protobuf.text_format.Merge(config_text, config)
    except protobuf.text_format.ParseError as ex:
        raise BundleImportError('Bad config format: %s' % ex)
    try:
        validate_config_proto(config)
    except ValueError as ex:
        raise BundleImportError('Bad config structure: %s' % ex)

    # Fetch all files specified in config in parallel.
    entries = list(config.tarball) + list(config.plainlist)
    futures = [fetch_file_async(e.url, e.oauth_scopes) for e in entries]

    # {system name -> group name -> list of identities}
    bundles = {}
    for e, future in zip(entries, futures):
        # Unpack tarball into {system name -> group name -> list of identities}.
        if isinstance(e, config_pb2.GroupImporterConfig.TarballEntry):
            fetched = load_tarball(future.get_result(), e.systems, e.groups,
                                   e.domain)
            assert not (set(fetched) & set(bundles)), (fetched.keys(),
                                                       bundles.keys())
            bundles.update(fetched)
            continue

        # Add plainlist group to 'external/*' bundle.
        if isinstance(e, config_pb2.GroupImporterConfig.PlainlistEntry):
            group = load_group_file(future.get_result(), e.domain)
            name = 'external/%s' % e.group
            if 'external' not in bundles:
                bundles['external'] = {}
            assert name not in bundles['external'], name
            bundles['external'][name] = group
            continue

        assert False, 'Unreachable'

    # Nothing to process?
    if not bundles:
        return

    @ndb.transactional
    def snapshot_groups():
        """Fetches all existing groups and AuthDB revision number."""
        groups = model.AuthGroup.query(ancestor=model.root_key()).fetch_async()
        return auth.get_auth_db_revision(), groups.get_result()

    @ndb.transactional
    def apply_import(revision, entities_to_put, entities_to_delete, ts):
        """Transactionally puts and deletes a bunch of entities."""
        # DB changed between transactions, retry.
        if auth.get_auth_db_revision() != revision:
            return False
        # Apply mutations, bump revision number.
        for e in entities_to_put:
            e.record_revision(modified_by=model.get_service_self_identity(),
                              modified_ts=ts,
                              comment='External group import')
        for e in entities_to_delete:
            e.record_deletion(modified_by=model.get_service_self_identity(),
                              modified_ts=ts,
                              comment='External group import')
        futures = []
        futures.extend(ndb.put_multi_async(entities_to_put))
        futures.extend(
            ndb.delete_multi_async(e.key for e in entities_to_delete))
        for f in futures:
            f.check_success()
        auth.replicate_auth_db()
        return True

    # Try to apply the change until success or deadline. Split transaction into
    # two (assuming AuthDB changes infrequently) to avoid reading and writing too
    # much stuff from within a single transaction (and to avoid keeping the
    # transaction open while calculating the diff).
    while True:
        # Use same timestamp everywhere to reflect that groups were imported
        # atomically within a single transaction.
        ts = utils.utcnow()
        entities_to_put = []
        entities_to_delete = []
        revision, existing_groups = snapshot_groups()
        for system, groups in bundles.iteritems():
            to_put, to_delete = prepare_import(system, existing_groups, groups,
                                               ts)
            entities_to_put.extend(to_put)
            entities_to_delete.extend(to_delete)
        if not entities_to_put and not entities_to_delete:
            break
        if apply_import(revision, entities_to_put, entities_to_delete, ts):
            break
    logging.info('Groups updated: %d',
                 len(entities_to_put) + len(entities_to_delete))