def _generate_transform(self, resource_names: List[ResourceName]) -> DBRes: updated = inserted = 0 connector = self.target for resource_name in resource_names: t_database, t_table = connector.get_transform_target_full_name(resource_name=resource_name, connector=connector) name = connector.get_transform_name_format(resource_name=resource_name) source_version = self.get_source_default_version(resource_name) if source_version is None: return DBRes.api_error(msg="Not found resource source table {}".format(resource_name.full_name)) sink_version = self.get_sink_default_version(t_database, t_table) if sink_version is None: return DBRes.api_error(msg="Not found resource sink table {}".format(resource_name.full_name)) require = self.get_source_name(source_version) + ',' + self.get_source_name(sink_version) execution = dict(planner='blink', type=self.get_flink_execution_type(), parallelism=connector.system_execution_parallelism) execution['restart-strategy'] = connector.system_execution_restart_strategy transform = Transform(name=name, sql=self.build_sql(sink_version, source_version, connector), require=require, connector_id=connector.id, yaml=dump_yaml(dict(execution=execution))) transform, i = self.dao.upsert_transform(transform) inserted += i updated += not i msg = 'update: {}\ninserted: {}'.format(updated, inserted) return DBRes(msg=msg)
async def post(self, name: str): tm = self.terminal_manager if name in tm.terminals: await tm.terminate(name, force=True) self.write_res(DBRes()) else: raise tornado.web.HTTPError(404, "Terminal not found: %r" % name)
def get_require_name(cls, *args, session: Session, **kwargs) -> DBRes: query = session.query(ResourceVersion.full_name).join( ResourceVersion.connection).join(ResourceVersion.resource_name) version_data = [ x[0] for x in query.filter( and_(Connection.is_active == true(), ResourceName.is_active == true())).all() ] v_query = session.query(ResourceVersion).join( ResourceVersion.connection).join(ResourceVersion.resource_name) default_version_data = list( x.template.full_name for x in v_query.filter( and_(Connection.is_active == true(), ResourceName.is_active == true(), ResourceVersion.is_default == true())).all()) t_query = session.query(ResourceTemplate).join( ResourceTemplate.connection).join(ResourceTemplate.resource_name) resource_data = list(x.resource_name.full_name for x in t_query.filter( and_(Connection.is_active == true(), ResourceName.is_active == true(), ResourceTemplate.is_default == true())).all()) hive_data = [ x[0] for x in session.query(Connection.name).filter( Connection.is_active == true(), Connection.type == 'hive').all() ] return DBRes(data=hive_data + version_data + default_version_data + resource_data)
def clean(cls, model: str, pk: int, *args, session: Session, base: Type[DBT], **kwargs) -> DBRes: obj = session.query(base).get(pk) if isinstance(obj, Connector): back = obj.as_dict() source, target = obj.source, obj.target session.delete(obj) session.commit() cls._clean(source, session, Connection) cls._clean(target, session, Connection) session.add(Connector(**back)) else: if isinstance(obj, Connection): s_names = [ x[0] for x in session.query(Connector.name).join( Connector.source).filter(Connection.id == pk).all() ] t_names = [ x[0] for x in session.query(Connector.name).join( Connector.target).filter(Connection.id == pk).all() ] msg = "please clean connector source: {} target: {}".format( ','.join(s_names), ','.join(t_names)) if s_names or t_names: return DBRes.api_error(msg) cls._clean(obj, session, base) return DBRes()
def post(self, mode: str, pk: str): if mode == 'debug': term = run_debug_transform(self.json_body, self.terminal_manager) self.write_res(DBRes({"url": '/terminal/{}'.format(term)})) else: if not pk.isdigit(): pk = str(DBDao.name2pk(model='transform', name=pk)) return self.write_res(handle_job(mode, pk, self.json_body))
def delete(cls, model: str, pk: int, *args, session: Session, base: Type[Base], **kwargs) -> DBRes: if settings.FSQLFLY_SAVE_MODE_DISABLE: obj = session.query(base).get(pk) session.delete(obj) return DBRes(data=obj.id) else: return DBRes.sever_error( 'Not Support Delete when FSQLFLY_SAVE_MODE_DISABLE not set')
def get(cls, model: str, *args, session: Session, base: Type[Base], filter_: Optional[dict] = None, **kwargs) -> DBRes: query = session.query(base) if filter_: query = cls.build_and(filter_, base, query) return DBRes(data=[x.as_dict() for x in query.all()])
def post(self): files = self.request.files key = list(files.keys())[0] if key not in support_upload: return self.write_res(DBRes.api_error()) upload_file = files[key][0] _, tem_f = mkstemp(suffix=upload_file.filename, dir=upload_dirs[key]) with open(tem_f, 'wb+') as out: out.write(upload_file.body) real_path = '/upload/' + key + '/' + os.path.basename(tem_f) return self.write_res(DBRes(data={"realPath": real_path}))
def create_hive_table(self, resource_names: List[ResourceName]) -> DBRes: connector = self.target engine = create_engine(connector.target.url) for resource_name in resource_names: t_database, t_table = connector.get_transform_target_full_name(resource_name=resource_name, connector=connector) version = self.get_sink_default_version(t_database, t_table) schemas = version.generate_version_schema() for sql in self.build_hive_create_sql(t_database, t_table, schemas): print(sql) engine.execute(sql) return DBRes()
def update(cls, model: str, pk: int, obj: dict, *args, session: Session, base: Type[Base], **kwargs) -> DBRes: assert session is not None first = session.query(base).filter(base.id == pk).first() if first is None: return DBRes.not_found() if first.is_locked and obj.get('is_locked') is True: return DBRes.resource_locked() for k, v in obj.items(): if k not in ['id', 'create_at', 'update_at' ] and not cls.is_null_foreign_key(k, v): setattr(first, k, v) return DBRes(data=first.as_dict())
def _handle_job(mode: str, pk: str, json_body: dict, session: Session) -> DBRes: handle_name = 'handle_' + mode if mode in JobControlHandle and handle_name in JobControlHandle: if pk.isdigit(): transform = DBDao.get_transform(pk, session=session) if transform is None: return DBRes.api_error(msg='job id {} not found!!!'.format(pk)) else: transform = pk data = json_body run_res = getattr(JobControlHandle, handle_name)(transform, **data) return DBRes(code=500 if run_res.startswith(FAIL_HEADER) else 200, msg=run_res) else: return DBRes.api_error(msg=' {} not support!!!'.format(mode))
def get(self): job_infos = get_latest_transform() all_jobs = list() for job in JobControlHandle.job_status: if not JobControlHandle.is_real_job(job.name): continue base = dict(**job._asdict()) if job.name in job_infos: base.update(job_infos[job.name]) base['t_id'] = job_infos[job.name]['id'] elif job.name.startswith(TEMP_TERMINAL_HEAD): base['name'] = 'TEMPORARY' base['url'] = '/terminal/{}'.format(job.name[len(TEMP_TERMINAL_HEAD):]) base['id'] = job.job_id base['detail_url'] = FSQLFLY_FINK_HOST + '/#/job/{}/overview'.format(job.job_id) all_jobs.append(base) return self.write_res(DBRes(data=all_jobs))
def get(self): tm = self.terminal_manager terms = [{'name': name, 'id': name} for name in tm.terminals] self.write_res(DBRes(data=terms))
def _run(self, resource_names: List[ResourceName]): res = [] for resource_name in resource_names: name = self.target.get_transform_name_format(resource_name=resource_name) res.append(name) return DBRes(data=res)
def bulk_insert(cls, data: list, *args, session: Session, **kwargs): session.add_all(data) return DBRes(data=len(data))
def create(cls, model: str, obj: dict, *args, session: Session, base: Type[DBT], **kwargs) -> DBRes: db_obj = base(**obj) session.add(db_obj) session.commit() return DBRes(data=db_obj.as_dict())