def test_get_bucket(self): config.Bucket( id='master.tryserver.chromium.linux', project_id='chromium', revision='deadbeef', config_content=MASTER_TRYSERVER_CHROMIUM_LINUX_CONFIG_TEXT, config_content_binary=text_to_binary( MASTER_TRYSERVER_CHROMIUM_LINUX_CONFIG_TEXT), ).put() project, cfg = config.get_bucket('master.tryserver.chromium.linux') self.assertEqual(project, 'chromium') self.assertEqual( cfg, project_config_pb2.Bucket( name='master.tryserver.chromium.linux', acls=[ project_config_pb2.Acl(role=project_config_pb2.Acl.READER, group='all'), project_config_pb2.Acl( role=project_config_pb2.Acl.SCHEDULER, group='tryjob-access'), ]), ) self.assertIsNone(config.get_bucket('non.existing')[0])
def test_get_bucket(self): config.put_bucket('chromium', 'deadbeef', LUCI_CHROMIUM_TRY) rev, cfg = config.get_bucket('chromium/try') self.assertEqual(rev, 'deadbeef') self.assertEqual(cfg, short_bucket_cfg(LUCI_CHROMIUM_TRY)) self.assertIsNone(config.get_bucket('chromium/nonexistent')[0])
def _reject_swarming_bucket(bucket_id): config.validate_bucket_id(bucket_id) _, cfg = config.get_bucket(bucket_id) assert cfg, 'permission check should have failed' if config.is_swarming_config(cfg): raise errors.InvalidInputError( 'Invalid operation on a Swarming bucket')
def get_bucket(self, request): """Returns bucket information.""" bucket_id = convert_bucket(request.bucket) # checks access project_id, _ = config.parse_bucket_id(bucket_id) rev, bucket_cfg = config.get_bucket(bucket_id) assert bucket_cfg # access check would have failed. return BucketMessage( name=request.bucket, project_id=project_id, config_file_content=protobuf.text_format.MessageToString(bucket_cfg), config_file_rev=rev, config_file_url=config.get_buildbucket_cfg_url(project_id), )
def test_cron_update_buckets_with_broken_configs(self, get_project_configs): config.put_bucket('chromium', 'deadbeef', MASTER_TRYSERVER_CHROMIUM_LINUX) get_project_configs.return_value = { 'chromium': ( 'new!', None, config_component.ConfigFormatError('broken!') ), } config.cron_update_buckets() # We must not delete buckets defined in a project that currently have a # broken config. bucket_id = 'chromium/' + MASTER_TRYSERVER_CHROMIUM_LINUX.name _, actual = config.get_bucket(bucket_id) self.assertEqual(actual, MASTER_TRYSERVER_CHROMIUM_LINUX)
def set_next_build_number(self, request): """Sets the build number that will be used for the next build.""" bucket_id = api.convert_bucket(request.bucket) if not user.can_set_next_number_async(bucket_id).get_result(): raise endpoints.ForbiddenException('access denied') _, bucket = config.get_bucket(bucket_id) if not any(b.name == request.builder for b in bucket.swarming.builders): raise endpoints.BadRequestException( 'builder "%s" not found in bucket "%s"' % (request.builder, bucket_id)) project, bucket = config.parse_bucket_id(bucket_id) builder_id = build_pb2.BuilderID(project=project, bucket=bucket, builder=request.builder) seq_name = sequence.builder_seq_name(builder_id) try: sequence.set_next(seq_name, request.next_number) except ValueError as ex: raise endpoints.BadRequestException(str(ex)) return message_types.VoidMessage()
import petl as etl import cx_Oracle import boto3 from passyunk.parser import PassyunkParser from config import get_dsn, get_bucket month = '2018_09' parser = PassyunkParser() # Input locations loc = r'C:/Projects/etl/data/usps/' csbyst = '/pa.txt' zip4 = '/pa' # Output params s3_bucket = get_bucket() dsn = get_dsn('ais') connection = cx_Oracle.Connection(dsn) zip4_write_table_name = 'USPS_ZIP4S' cityzip_write_table_name = 'USPS_CITYZIP' alias_write_table_name = 'USPS_ALIAS' address_standardization_report_table_name = 'USPS_ZIP4_ADDRESS_CHECK' alias_outfile_path = alias_write_table_name.lower() + '.csv' cityzip_outfile_path = cityzip_write_table_name.lower() + '.csv' zip4_outfile_path = zip4_write_table_name.lower() + '.csv' temp_zip4_outfile_path = 'T_' + zip4_outfile_path ##################################### # Meta: zip3s = ['190', '191', '192'] zips = ['19019',
import petl as etl import cx_Oracle import boto3 from passyunk.parser import PassyunkParser from config import get_dsn, get_bucket month = '2020_03' parser = PassyunkParser() # Input locations loc = r'C:/Projects/etl/data/usps/' csbyst = '/pa.txt' zip4 = '/pa' # Output params s3_bucket = get_bucket() dsn = get_dsn('ais') connection = cx_Oracle.Connection(dsn) zip4_write_table_name = 'USPS_ZIP4S' cityzip_write_table_name = 'USPS_CITYZIP' alias_write_table_name = 'USPS_ALIAS' address_standardization_report_table_name = 'USPS_ZIP4_ADDRESS_CHECK' alias_outfile_path = alias_write_table_name.lower() + '.csv' cityzip_outfile_path = cityzip_write_table_name.lower() + '.csv' zip4_outfile_path = zip4_write_table_name.lower() + '.csv' temp_zip4_outfile_path = 'T_' + zip4_outfile_path ##################################### # Meta: zip3s = ['190', '191', '192'] zips = [