def generate_version(self, connection: Connection, schema: SchemaEvent, name: ResourceName, template: ResourceTemplate) -> List[ResourceVersion]: versions = [] base = super(CanalResourceGenerator, self).generate_version(connection, schema, name, template)[0] for mode in self._mode: version = deepcopy(base) version.name = mode version.full_name = get_full_name(template.full_name, mode) fields = [] config = super(CanalResourceGenerator, self).generate_default_version_config( connection, schema, name, template) if mode == CanalMode.update: fields = self._generate_update_fields(schema, config) elif mode == CanalMode.upsert: fields = self._generate_upsert_fields() self._handle_time_field(fields) config.schema = [ResourceVersion.field2schema(x) for x in fields] version.config = dump_yaml(attr.asdict(config)) versions.append(version) return versions
def _generate_transform(self, resource_names: List[ResourceName]) -> DBRes: updated = inserted = 0 connector = self.target for resource_name in resource_names: t_database, t_table = connector.get_transform_target_full_name(resource_name=resource_name, connector=connector) name = connector.get_transform_name_format(resource_name=resource_name) source_version = self.get_source_default_version(resource_name) if source_version is None: return DBRes.api_error(msg="Not found resource source table {}".format(resource_name.full_name)) sink_version = self.get_sink_default_version(t_database, t_table) if sink_version is None: return DBRes.api_error(msg="Not found resource sink table {}".format(resource_name.full_name)) require = self.get_source_name(source_version) + ',' + self.get_source_name(sink_version) execution = dict(planner='blink', type=self.get_flink_execution_type(), parallelism=connector.system_execution_parallelism) execution['restart-strategy'] = connector.system_execution_restart_strategy transform = Transform(name=name, sql=self.build_sql(sink_version, source_version, connector), require=require, connector_id=connector.id, yaml=dump_yaml(dict(execution=execution))) transform, i = self.dao.upsert_transform(transform) inserted += i updated += not i msg = 'update: {}\ninserted: {}'.format(updated, inserted) return DBRes(msg=msg)
def generate_schema_event(self, schema: SchemaContent, connection: Connection) -> SchemaEvent: return SchemaEvent(name=schema.name, info=schema.comment, database=schema.database, connection_id=connection.id, comment=schema.comment, primary_key=schema.primary_key, fields=dump_yaml( [attr.asdict(x) for x in schema.fields]), partitionable=schema.partitionable)
def generate_version(self, connection: Connection, schema: SchemaEvent, name: ResourceName, template: ResourceTemplate) -> List[ResourceVersion]: config = self.generate_default_version_config(connection, schema, name, template) config_str = dump_yaml(attr.asdict(config)) return [ ResourceVersion(name=ResourceVersion.latest_name(), full_name=get_full_name( template.full_name, ResourceVersion.latest_name()), is_system=True, is_latest=True, connection_id=connection.id, resource_name_id=name.id, template_id=template.id, schema_version_id=schema.id if schema else None, config=config_str) ]
def _create_config(require: str, config: Optional[str], args: dict) -> str: tables = [] catalogs = [] require = require.strip() if require and require.strip() else '' if require: tables.extend(DBDao.get_require_table(require)) catalogs.extend(DBDao.get_require_catalog(require)) base_config = yaml.load(handle_template(config, args), yaml.FullLoader) if config else dict() if base_config is None: base_config = dict() if base_config.get('tables'): base_config['tables'].extend(tables) else: base_config['tables'] = tables base_config['functions'] = DBDao.get_require_functions() if base_config.get('catalogs'): base_config['catalogs'].extend(catalogs) else: base_config['catalogs'] = catalogs return dump_yaml(base_config)
def update_version(self, version: ResourceVersion): version.cache = dump_yaml(version.generate_version_cache()) self.dao.save(version) self.status.update_cache()