class Shows(core.Stack): def __init__(self, app: core.App, id: str, domain_name: str, **kwargs) -> None: super().__init__(app, id, **kwargs) self.domain_name = domain_name self.layers = {} self.lambdas = {} self._create_tables() self._create_topic() self._create_lambdas_config() self._create_layers() self._create_lambdas() self._create_gateway() def _create_tables(self): self.shows_table = Table( self, "shows_table", table_name="shows", partition_key=Attribute(name="id", type=AttributeType.STRING), billing_mode=BillingMode.PAY_PER_REQUEST, ) self.shows_table.add_global_secondary_index(partition_key=Attribute( name="tvmaze_id", type=AttributeType.NUMBER), index_name="tvmaze_id") self.episodes_table = Table( self, "episodes_table", table_name="shows-eps", partition_key=Attribute(name="show_id", type=AttributeType.STRING), sort_key=Attribute(name="id", type=AttributeType.STRING), billing_mode=BillingMode.PAY_PER_REQUEST, ) self.episodes_table.add_local_secondary_index(sort_key=Attribute( name="id", type=AttributeType.STRING), index_name="episode_id") self.episodes_table.add_global_secondary_index(partition_key=Attribute( name="tvmaze_id", type=AttributeType.NUMBER), index_name="tvmaze_id") def _create_topic(self): self.show_updates_topic = Topic( self, "shows_updates", topic_name="shows-updates", ) def _create_lambdas_config(self): self.lambdas_config = { "api-shows_by_id": { "layers": ["utils", "databases", "api"], "variables": { "SHOWS_DATABASE_NAME": self.shows_table.table_name, "LOG_LEVEL": "INFO", }, "policies": [ PolicyStatement(actions=["dynamodb:GetItem"], resources=[self.shows_table.table_arn]) ], "timeout": 3, "memory": 128 }, "api-shows": { "layers": ["utils", "databases", "api"], "variables": { "SHOWS_DATABASE_NAME": self.shows_table.table_name, "LOG_LEVEL": "INFO", }, "policies": [ PolicyStatement( actions=["dynamodb:Query"], resources=[ f"{self.shows_table.table_arn}/index/tvmaze_id" ]), PolicyStatement(actions=["dynamodb:UpdateItem"], resources=[self.shows_table.table_arn]), ], "timeout": 10, "memory": 128 }, "api-episodes": { "layers": ["utils", "databases", "api"], "variables": { "SHOWS_DATABASE_NAME": self.shows_table.table_name, "SHOW_EPISODES_DATABASE_NAME": self.episodes_table.table_name, "LOG_LEVEL": "INFO", }, "policies": [ PolicyStatement(actions=["dynamodb:GetItem"], resources=[self.shows_table.table_arn]), PolicyStatement( actions=["dynamodb:Query"], resources=[ f"{self.episodes_table.table_arn}/index/tvmaze_id" ]), PolicyStatement(actions=["dynamodb:UpdateItem"], resources=[self.episodes_table.table_arn]), PolicyStatement(actions=["dynamodb:GetItem"], resources=[self.episodes_table.table_arn]), ], "timeout": 10, "memory": 128 }, "api-episodes_by_id": { "layers": ["utils", "databases", "api"], "variables": { "SHOW_EPISODES_DATABASE_NAME": self.episodes_table.table_name, "LOG_LEVEL": "INFO", }, "policies": [ PolicyStatement(actions=["dynamodb:Query"], resources=[self.episodes_table.table_arn]), ], "timeout": 3, "memory": 128 }, "cron-update_eps": { "layers": ["utils", "databases", "api", "publishers"], "variables": { "SHOWS_DATABASE_NAME": self.shows_table.table_name, "LOG_LEVEL": "INFO", "UPDATES_TOPIC_ARN": self.show_updates_topic.topic_arn, }, "policies": [ PolicyStatement( actions=["dynamodb:Query"], resources=[ f"{self.shows_table.table_arn}/index/tvmaze_id" ], ), PolicyStatement( actions=["sns:Publish"], resources=[self.show_updates_topic.topic_arn], ) ], "timeout": 60, "memory": 1024 }, } def _create_layers(self): if os.path.isdir(BUILD_FOLDER): shutil.rmtree(BUILD_FOLDER) os.mkdir(BUILD_FOLDER) for layer in os.listdir(LAYERS_DIR): layer_folder = os.path.join(LAYERS_DIR, layer) build_folder = os.path.join(BUILD_FOLDER, layer) shutil.copytree(layer_folder, build_folder) requirements_path = os.path.join(build_folder, "requirements.txt") if os.path.isfile(requirements_path): packages_folder = os.path.join(build_folder, "python", "lib", "python3.8", "site-packages") # print(f"Installing layer requirements to target: {os.path.abspath(packages_folder)}") subprocess.check_output([ "pip", "install", "-r", requirements_path, "-t", packages_folder ]) clean_pycache() self.layers[layer] = LayerVersion( self, layer, layer_version_name=f"shows-{layer}", code=Code.from_asset(path=build_folder), compatible_runtimes=[Runtime.PYTHON_3_8], ) def _create_lambdas(self): for root, dirs, files in os.walk(LAMBDAS_DIR): for f in files: if f != "__init__.py": continue parent_folder = os.path.basename(os.path.dirname(root)) lambda_folder = os.path.basename(root) name = f"{parent_folder}-{lambda_folder}" lambda_config = self.lambdas_config[name] layers = [] for layer_name in lambda_config["layers"]: layers.append(self.layers[layer_name]) lambda_role = Role(self, f"{name}_role", assumed_by=ServicePrincipal( service="lambda.amazonaws.com")) for policy in lambda_config["policies"]: lambda_role.add_to_policy(policy) lambda_role.add_managed_policy( ManagedPolicy.from_aws_managed_policy_name( "service-role/AWSLambdaBasicExecutionRole")) self.lambdas[name] = Function( self, name, code=Code.from_asset(root), handler="__init__.handle", runtime=Runtime.PYTHON_3_8, layers=layers, function_name=name, environment=lambda_config["variables"], role=lambda_role, timeout=Duration.seconds(lambda_config["timeout"]), memory_size=lambda_config["memory"], ) Rule(self, "update_eps", schedule=Schedule.cron(hour="2", minute="10"), targets=[LambdaFunction(self.lambdas["cron-update_eps"])]) def _create_gateway(self): cert = Certificate(self, "certificate", domain_name=self.domain_name, validation_method=ValidationMethod.DNS) domain_name = DomainName( self, "domain_name", certificate=cert, domain_name=self.domain_name, ) http_api = HttpApi( self, "shows_gateway", create_default_stage=False, api_name="shows", cors_preflight=CorsPreflightOptions( allow_methods=[HttpMethod.GET, HttpMethod.POST], allow_origins=["https://moshan.tv", "https://beta.moshan.tv"], allow_headers=["authorization", "content-type"])) routes = { "get_shows": { "method": "GET", "route": "/shows", "target_lambda": self.lambdas["api-shows"] }, "post_shows": { "method": "POST", "route": "/shows", "target_lambda": self.lambdas["api-shows"] }, "get_shows_by_id": { "method": "GET", "route": "/shows/{id}", "target_lambda": self.lambdas["api-shows_by_id"] }, "get_episodes": { "method": "GET", "route": "/episodes", "target_lambda": self.lambdas["api-episodes"] }, "post_episodes": { "method": "POST", "route": "/shows/{id}/episodes", "target_lambda": self.lambdas["api-episodes"] }, "get_episodes_by_id": { "method": "GET", "route": "/shows/{id}/episodes/{episode_id}", "target_lambda": self.lambdas["api-episodes_by_id"] }, } for r in routes: integration = HttpIntegration( self, f"{r}_integration", http_api=http_api, integration_type=HttpIntegrationType.LAMBDA_PROXY, integration_uri=routes[r]["target_lambda"].function_arn, method=getattr(HttpMethod, routes[r]["method"]), payload_format_version=PayloadFormatVersion.VERSION_2_0, ) CfnRoute( self, r, api_id=http_api.http_api_id, route_key=f"{routes[r]['method']} {routes[r]['route']}", # authorization_type="AWS_IAM", # TODO: add back when: https://github.com/aws/aws-cdk/pull/14853 gets merged (set this manually for now) target="integrations/" + integration.integration_id) routes[r]["target_lambda"].add_permission( f"{r}_apigateway_invoke", principal=ServicePrincipal("apigateway.amazonaws.com"), source_arn= f"arn:aws:execute-api:{self.region}:{self.account}:{http_api.http_api_id}/*" ) HttpStage(self, "live", http_api=http_api, auto_deploy=True, stage_name="live", domain_mapping=DomainMappingOptions( domain_name=domain_name, ))
class Anime(core.Stack): def __init__(self, app: core.App, id: str, mal_client_id: str, anidb_client: str, domain_name: str, **kwargs) -> None: super().__init__(app, id, **kwargs) self.mal_client_id = mal_client_id self.anidb_client = anidb_client self.domain_name = domain_name self.layers = {} self.lambdas = {} self._create_buckets() self._create_tables() self._create_queues() self._create_lambdas_config() self._create_layers() self._create_lambdas() self._create_gateway() def _create_buckets(self): self.anidb_titles_bucket = Bucket( self, "anidb_titles_bucket", block_public_access=BlockPublicAccess( block_public_acls=True, block_public_policy=True, ), removal_policy=core.RemovalPolicy.DESTROY, lifecycle_rules=[ LifecycleRule(expiration=Duration.days(3)), ] ) def _create_tables(self): self.anime_table = Table( self, "anime_items", partition_key=Attribute(name="id", type=AttributeType.STRING), billing_mode=BillingMode.PAY_PER_REQUEST, ) self.anime_table.add_global_secondary_index( partition_key=Attribute(name="mal_id", type=AttributeType.NUMBER), index_name="mal_id" ) self.anime_table.add_global_secondary_index( partition_key=Attribute(name="broadcast_day", type=AttributeType.STRING), index_name="broadcast_day" ) self.anime_episodes = Table( self, "anime_episodes", partition_key=Attribute(name="anime_id", type=AttributeType.STRING), sort_key=Attribute(name="episode_number", type=AttributeType.NUMBER), billing_mode=BillingMode.PAY_PER_REQUEST, ) self.anime_episodes.add_local_secondary_index( sort_key=Attribute(name="id", type=AttributeType.STRING), index_name="episode_id" ) self.anime_episodes.add_global_secondary_index( partition_key=Attribute(name="anidb_id", type=AttributeType.NUMBER), index_name="anidb_id" ) self.anime_params = Table( self, "anime_params", partition_key=Attribute(name="name", type=AttributeType.STRING), billing_mode=BillingMode.PAY_PER_REQUEST, ) def _create_queues(self): post_anime_dl = Queue(self, "post_anime_dl") self.post_anime_queue = Queue( self, "anime", dead_letter_queue=DeadLetterQueue(max_receive_count=5, queue=post_anime_dl), receive_message_wait_time=Duration.seconds(20) ) def _create_lambdas_config(self): self.lambdas_config = { "api-anime_by_id": { "layers": ["utils", "databases"], "variables": { "ANIME_DATABASE_NAME": self.anime_table.table_name, "LOG_LEVEL": "INFO", }, "policies": [ PolicyStatement( actions=["dynamodb:GetItem"], resources=[self.anime_table.table_arn] ) ], "timeout": 3, "memory": 128 }, "api-anime_episodes": { "layers": ["utils", "databases"], "variables": { "ANIME_EPISODES_DATABASE_NAME": self.anime_episodes.table_name, "LOG_LEVEL": "INFO", }, "policies": [ PolicyStatement( actions=["dynamodb:Query"], resources=[self.anime_episodes.table_arn] ), PolicyStatement( actions=["dynamodb:Query"], resources=[f"{self.anime_episodes.table_arn}/index/anidb_id"] ) ], "timeout": 3, "memory": 512 }, "api-anime_episode": { "layers": ["utils", "databases"], "variables": { "ANIME_EPISODES_DATABASE_NAME": self.anime_episodes.table_name, "LOG_LEVEL": "INFO", }, "policies": [ PolicyStatement( actions=["dynamodb:Query"], resources=[f"{self.anime_episodes.table_arn}/index/episode_id"] ) ], "timeout": 3, "memory": 512 }, "api-anime": { "layers": ["utils", "databases", "api"], "variables": { "ANIME_DATABASE_NAME": self.anime_table.table_name, "POST_ANIME_SQS_QUEUE_URL": self.post_anime_queue.queue_url, "LOG_LEVEL": "INFO", }, "policies": [ PolicyStatement( actions=["dynamodb:Query"], resources=[f"{self.anime_table.table_arn}/index/mal_id"] ), PolicyStatement( actions=["sqs:SendMessage"], resources=[self.post_anime_queue.queue_arn] ), ], "timeout": 10, "memory": 128 }, "crons-titles_updater": { "layers": ["utils", "databases", "api"], "variables": { "ANIDB_TITLES_BUCKET": self.anidb_titles_bucket.bucket_name, "LOG_LEVEL": "INFO", }, "concurrent_executions": 1, "policies": [ PolicyStatement( actions=["s3:ListBucket"], resources=[self.anidb_titles_bucket.bucket_arn] ), PolicyStatement( actions=["s3:GetObject", "s3:PutObject"], resources=[self.anidb_titles_bucket.arn_for_objects("*")] ) ], "timeout": 120, "memory": 128 }, "crons-episodes_updater": { "layers": ["utils", "databases"], "variables": { "LOG_LEVEL": "DEBUG", "POST_ANIME_SQS_QUEUE_URL": self.post_anime_queue.queue_url, "ANIME_DATABASE_NAME": self.anime_table.table_name, }, "concurrent_executions": 1, "policies": [ PolicyStatement( actions=["dynamodb:Query"], resources=[f"{self.anime_table.table_arn}/index/broadcast_day"] ), PolicyStatement( actions=["sqs:SendMessage"], resources=[self.post_anime_queue.queue_arn] ), ], "timeout": 120, "memory": 128 }, "sqs_handlers-post_anime": { "layers": ["utils", "databases", "api"], "variables": { "ANIME_DATABASE_NAME": self.anime_table.table_name, "ANIME_EPISODES_DATABASE_NAME": self.anime_episodes.table_name, "ANIME_PARAMS_DATABASE_NAME": self.anime_params.table_name, "MAL_CLIENT_ID": self.mal_client_id, "ANIDB_TITLES_BUCKET": self.anidb_titles_bucket.bucket_name, "ANIDB_CLIENT": self.anidb_client, "LOG_LEVEL": "INFO", }, "concurrent_executions": 1, "policies": [ PolicyStatement( actions=["dynamodb:Query"], resources=[f"{self.anime_table.table_arn}/index/mal_id"] ), PolicyStatement( actions=["dynamodb:UpdateItem"], resources=[self.anime_table.table_arn] ), PolicyStatement( actions=["dynamodb:BatchWriteItem"], resources=[self.anime_episodes.table_arn] ), PolicyStatement( actions=["dynamodb:UpdateItem", "dynamodb:GetItem"], resources=[self.anime_params.table_arn] ), PolicyStatement( actions=["s3:ListBucket"], resources=[self.anidb_titles_bucket.bucket_arn] ), PolicyStatement( actions=["s3:GetObject"], resources=[self.anidb_titles_bucket.arn_for_objects("*")] ) ], "timeout": 60, "memory": 2048 }, } def _create_layers(self): if os.path.isdir(BUILD_FOLDER): shutil.rmtree(BUILD_FOLDER) os.mkdir(BUILD_FOLDER) for layer in os.listdir(LAYERS_DIR): layer_folder = os.path.join(LAYERS_DIR, layer) build_folder = os.path.join(BUILD_FOLDER, layer) shutil.copytree(layer_folder, build_folder) requirements_path = os.path.join(build_folder, "requirements.txt") if os.path.isfile(requirements_path): packages_folder = os.path.join(build_folder, "python", "lib", "python3.8", "site-packages") # print(f"Installing layer requirements to target: {os.path.abspath(packages_folder)}") subprocess.check_output(["pip", "install", "-r", requirements_path, "-t", packages_folder]) clean_pycache() self.layers[layer] = LayerVersion( self, layer, layer_version_name=f"anime-{layer}", code=Code.from_asset(path=build_folder), compatible_runtimes=[Runtime.PYTHON_3_8], ) def _create_lambdas(self): clean_pycache() for root, dirs, files in os.walk(LAMBDAS_DIR): for f in files: if f != "__init__.py": continue parent_folder = os.path.basename(os.path.dirname(root)) lambda_folder = os.path.basename(root) name = f"{parent_folder}-{lambda_folder}" lambda_config = self.lambdas_config[name] layers = [] for layer_name in lambda_config["layers"]: layers.append(self.layers[layer_name]) lambda_role = Role( self, f"{name}_role", assumed_by=ServicePrincipal(service="lambda.amazonaws.com") ) for policy in lambda_config["policies"]: lambda_role.add_to_policy(policy) lambda_role.add_managed_policy( ManagedPolicy.from_aws_managed_policy_name("service-role/AWSLambdaBasicExecutionRole")) lambda_args = { "code": Code.from_asset(root), "handler": "__init__.handle", "runtime": Runtime.PYTHON_3_8, "layers": layers, "function_name": name, "environment": lambda_config["variables"], "role": lambda_role, "timeout": Duration.seconds(lambda_config["timeout"]), "memory_size": lambda_config["memory"], } if "concurrent_executions" in lambda_config: lambda_args["reserved_concurrent_executions"] = lambda_config["concurrent_executions"] self.lambdas[name] = Function(self, name, **lambda_args) self.lambdas["sqs_handlers-post_anime"].add_event_source(SqsEventSource(self.post_anime_queue)) Rule( self, "titles_updater", schedule=Schedule.cron(hour="2", minute="10"), targets=[LambdaFunction(self.lambdas["crons-titles_updater"])] ) Rule( self, "episodes_updater", schedule=Schedule.cron(hour="4", minute="10"), targets=[LambdaFunction(self.lambdas["crons-episodes_updater"])] ) def _create_gateway(self): cert = Certificate( self, "certificate", domain_name=self.domain_name, validation_method=ValidationMethod.DNS ) domain_name = DomainName( self, "domain", domain_name=self.domain_name, certificate=cert, security_policy=SecurityPolicy.TLS_1_2 ) http_api = HttpApi( self, "anime_gateway", create_default_stage=False, api_name="anime", cors_preflight=CorsPreflightOptions( allow_methods=[HttpMethod.GET, HttpMethod.POST], allow_origins=["https://moshan.tv", "https://beta.moshan.tv"], allow_headers=["authorization", "content-type", "x-mal-client-id"] ) ) authorizer = CfnAuthorizer( self, "cognito", api_id=http_api.http_api_id, authorizer_type="JWT", identity_source=["$request.header.Authorization"], name="cognito", jwt_configuration=CfnAuthorizer.JWTConfigurationProperty( audience=["68v5rahd0sdvrmf7fgbq2o1a9u"], issuer="https://cognito-idp.eu-west-1.amazonaws.com/eu-west-1_sJ3Y4kSv6" ) ) routes = { "get_anime": { "method": "GET", "route": "/anime", "target_lambda": self.lambdas["api-anime"] }, "post_anime": { "method": "POST", "route": "/anime", "target_lambda": self.lambdas["api-anime"] }, "get_anime_by_id": { "method": "GET", "route": "/anime/{id}", "target_lambda": self.lambdas["api-anime_by_id"] }, "get_anime_episodes": { "method": "GET", "route": "/anime/{id}/episodes", "target_lambda": self.lambdas["api-anime_episodes"] }, "post_anime_episode": { "method": "POST", "route": "/anime/{id}/episodes", "target_lambda": self.lambdas["api-anime_episodes"] }, "get_anime_episode": { "method": "GET", "route": "/anime/{id}/episodes/{episode_id}", "target_lambda": self.lambdas["api-anime_episode"] }, } for r in routes: integration = HttpIntegration( self, f"{r}_integration", http_api=http_api, integration_type=HttpIntegrationType.LAMBDA_PROXY, integration_uri=routes[r]["target_lambda"].function_arn, method=getattr(HttpMethod, routes[r]["method"]), payload_format_version=PayloadFormatVersion.VERSION_2_0, ) CfnRoute( self, r, api_id=http_api.http_api_id, route_key=f"{routes[r]['method']} {routes[r]['route']}", authorization_type="JWT", authorizer_id=authorizer.ref, target="integrations/" + integration.integration_id ) routes[r]["target_lambda"].add_permission( f"{r}_apigateway_invoke", principal=ServicePrincipal("apigateway.amazonaws.com"), source_arn=f"arn:aws:execute-api:{self.region}:{self.account}:{http_api.http_api_id}/*" ) mal_proxy_integration = HttpIntegration( self, "mal_proxy_integration", http_api=http_api, integration_type=HttpIntegrationType.HTTP_PROXY, integration_uri="https://api.myanimelist.net/v2/{proxy}", method=HttpMethod.ANY, payload_format_version=PayloadFormatVersion.VERSION_1_0, ) CfnRoute( self, "mal_proxy_route", api_id=http_api.http_api_id, route_key="GET /mal_proxy/{proxy+}", authorization_type="JWT", authorizer_id=authorizer.ref, target="integrations/" + mal_proxy_integration.integration_id, ) stage = CfnStage( self, "live", api_id=http_api.http_api_id, auto_deploy=True, default_route_settings=CfnStage.RouteSettingsProperty( throttling_burst_limit=10, throttling_rate_limit=5 ), stage_name="live" ) HttpApiMapping( self, "mapping", api=http_api, domain_name=domain_name, stage=stage )
def __init__(self, scope: core.Construct, construct_id: str, db_context: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) # setting the db context db = dict(self.node.try_get_context(db_context)) # Shortening some of the logic billing_mode = BillingMode.PROVISIONED if db[ "db_billing_mode"] == "provisioned" else BillingMode.PAY_PER_REQUEST pk = db["db_table_pk"] pk_type = AttributeType.STRING if db[ "db_table_pk_type"] == "string" else AttributeType.NUMBER sk = None if db["db_table_sk"] == "" else db["db_table_sk"] sk_type = AttributeType.STRING if db[ "db_table_sk_type"] == "string" else AttributeType.NUMBER gsi_projection_type = ProjectionType.ALL if db[ "db_gsi_projection"] == "all" else ProjectionType.KEYS_ONLY lsi_projection_type = ProjectionType.ALL if db[ "db_lsi_projection"] == "all" else ProjectionType.KEYS_ONLY if sk: table = Table( self, db["db_table"], table_name=db["db_table"], partition_key=Attribute(name=pk, type=pk_type), sort_key=Attribute(name=sk, type=sk_type), read_capacity=db["db_min_read_capacity"], write_capacity=db["db_min_write_capacity"], encryption=_ddb.TableEncryption.AWS_MANAGED, point_in_time_recovery=True, removal_policy=core.RemovalPolicy.DESTROY, billing_mode=billing_mode, ) else: table = Table( self, db["db_table"], table_name=db["db_table"], partition_key=Attribute(name=pk, type=pk_type), read_capacity=db["db_min_read_capacity"], write_capacity=db["db_min_write_capacity"], encryption=_ddb.TableEncryption.AWS_MANAGED, point_in_time_recovery=True, removal_policy=core.RemovalPolicy.DESTROY, billing_mode=billing_mode, ) # Add read/write autoscaling enabled at X% utilization if db["db_billing_mode"] == "provisioned" and db[ "db_enable_autoscaling"]: read_scaling = table.auto_scale_read_capacity( min_capacity=db["db_min_read_capacity"], max_capacity=db["db_max_read_capacity"], ) read_scaling.scale_on_utilization( target_utilization_percent=db["db_target_utilization"], ) write_scaling = table.auto_scale_write_capacity( min_capacity=db["db_min_write_capacity"], max_capacity=db["db_max_write_capacity"], ) write_scaling.scale_on_utilization( target_utilization_percent=db["db_target_utilization"], ) # setting projection with keys or all if db["db_reverse_index"] and sk: table.add_global_secondary_index( partition_key=Attribute(name=sk, type=sk_type), sort_key=_ddb.Attribute(name=pk, type=pk_type), read_capacity=db["db_min_read_capacity"], write_capacity=db["db_min_write_capacity"], index_name='reverseIndex', projection_type=gsi_projection_type, ) table.auto_scale_global_secondary_index_read_capacity( index_name='reverseIndex', min_capacity=db["db_min_read_capacity"], max_capacity=db["db_max_read_capacity"], ) table.auto_scale_global_secondary_index_write_capacity( index_name='reverseIndex', min_capacity=db["db_min_write_capacity"], max_capacity=db["db_max_write_capacity"], ) else: print("No Reverse indexes created") # Add LSI with a projection of All if db["db_add_lsi"]: table.add_local_secondary_index( index_name='LSI1', projection_type=lsi_projection_type, sort_key=Attribute(name='LSISK', type=AttributeType.STRING), ) self.table = table