def _generate_transform(self, resource_names: List[ResourceName]) -> DBRes: updated = inserted = 0 connector = self.target for resource_name in resource_names: t_database, t_table = connector.get_transform_target_full_name(resource_name=resource_name, connector=connector) name = connector.get_transform_name_format(resource_name=resource_name) source_version = self.get_source_default_version(resource_name) if source_version is None: return DBRes.api_error(msg="Not found resource source table {}".format(resource_name.full_name)) sink_version = self.get_sink_default_version(t_database, t_table) if sink_version is None: return DBRes.api_error(msg="Not found resource sink table {}".format(resource_name.full_name)) require = self.get_source_name(source_version) + ',' + self.get_source_name(sink_version) execution = dict(planner='blink', type=self.get_flink_execution_type(), parallelism=connector.system_execution_parallelism) execution['restart-strategy'] = connector.system_execution_restart_strategy transform = Transform(name=name, sql=self.build_sql(sink_version, source_version, connector), require=require, connector_id=connector.id, yaml=dump_yaml(dict(execution=execution))) transform, i = self.dao.upsert_transform(transform) inserted += i updated += not i msg = 'update: {}\ninserted: {}'.format(updated, inserted) return DBRes(msg=msg)
def clean(cls, model: str, pk: int, *args, session: Session, base: Type[DBT], **kwargs) -> DBRes: obj = session.query(base).get(pk) if isinstance(obj, Connector): back = obj.as_dict() source, target = obj.source, obj.target session.delete(obj) session.commit() cls._clean(source, session, Connection) cls._clean(target, session, Connection) session.add(Connector(**back)) else: if isinstance(obj, Connection): s_names = [ x[0] for x in session.query(Connector.name).join( Connector.source).filter(Connection.id == pk).all() ] t_names = [ x[0] for x in session.query(Connector.name).join( Connector.target).filter(Connection.id == pk).all() ] msg = "please clean connector source: {} target: {}".format( ','.join(s_names), ','.join(t_names)) if s_names or t_names: return DBRes.api_error(msg) cls._clean(obj, session, base) return DBRes()
def delete(cls, model: str, pk: int, *args, session: Session, base: Type[Base], **kwargs) -> DBRes: if settings.FSQLFLY_SAVE_MODE_DISABLE: obj = session.query(base).get(pk) session.delete(obj) return DBRes(data=obj.id) else: return DBRes.sever_error( 'Not Support Delete when FSQLFLY_SAVE_MODE_DISABLE not set')
def post(self): files = self.request.files key = list(files.keys())[0] if key not in support_upload: return self.write_res(DBRes.api_error()) upload_file = files[key][0] _, tem_f = mkstemp(suffix=upload_file.filename, dir=upload_dirs[key]) with open(tem_f, 'wb+') as out: out.write(upload_file.body) real_path = '/upload/' + key + '/' + os.path.basename(tem_f) return self.write_res(DBRes(data={"realPath": real_path}))
def run(cls, model: str, mode: str, pk: Union[str, int]) -> DBRes: dao = Dao() obj = dao.get_by_name_or_id(model, pk) if obj: manager = ManagerFactory.get_manager(model, mode, obj, dao) if manager.is_support(): return manager.run() else: msg = "Not support {}:{} in model {} by {} in ManagerHelper".format( obj.name, obj.id, model, mode) return DBRes.api_error(msg) else: return DBRes.api_error("Not found {} in model {}".format( pk, model))
def update(cls, model: str, pk: int, obj: dict, *args, session: Session, base: Type[Base], **kwargs) -> DBRes: assert session is not None first = session.query(base).filter(base.id == pk).first() if first is None: return DBRes.not_found() if first.is_locked and obj.get('is_locked') is True: return DBRes.resource_locked() for k, v in obj.items(): if k not in ['id', 'create_at', 'update_at' ] and not cls.is_null_foreign_key(k, v): setattr(first, k, v) return DBRes(data=first.as_dict())
def post(self): arg = self.json_body if self.is_login(arg.get('type'), arg.get('password'), arg.get('type')): self.set_login_status() self.write_json(user) else: self.write_res(DBRes.login_error())
async def post(self, name: str): tm = self.terminal_manager if name in tm.terminals: await tm.terminate(name, force=True) self.write_res(DBRes()) else: raise tornado.web.HTTPError(404, "Terminal not found: %r" % name)
def get_require_name(cls, *args, session: Session, **kwargs) -> DBRes: query = session.query(ResourceVersion.full_name).join( ResourceVersion.connection).join(ResourceVersion.resource_name) version_data = [ x[0] for x in query.filter( and_(Connection.is_active == true(), ResourceName.is_active == true())).all() ] v_query = session.query(ResourceVersion).join( ResourceVersion.connection).join(ResourceVersion.resource_name) default_version_data = list( x.template.full_name for x in v_query.filter( and_(Connection.is_active == true(), ResourceName.is_active == true(), ResourceVersion.is_default == true())).all()) t_query = session.query(ResourceTemplate).join( ResourceTemplate.connection).join(ResourceTemplate.resource_name) resource_data = list(x.resource_name.full_name for x in t_query.filter( and_(Connection.is_active == true(), ResourceName.is_active == true(), ResourceTemplate.is_default == true())).all()) hive_data = [ x[0] for x in session.query(Connection.name).filter( Connection.is_active == true(), Connection.type == 'hive').all() ] return DBRes(data=hive_data + version_data + default_version_data + resource_data)
def _handle_job(mode: str, pk: str, json_body: dict, session: Session) -> DBRes: handle_name = 'handle_' + mode if mode in JobControlHandle and handle_name in JobControlHandle: if pk.isdigit(): transform = DBDao.get_transform(pk, session=session) if transform is None: return DBRes.api_error(msg='job id {} not found!!!'.format(pk)) else: transform = pk data = json_body run_res = getattr(JobControlHandle, handle_name)(transform, **data) return DBRes(code=500 if run_res.startswith(FAIL_HEADER) else 200, msg=run_res) else: return DBRes.api_error(msg=' {} not support!!!'.format(mode))
def generate_transform(self) -> DBRes: connector = self.target need_tables = connector.need_tables source, target = connector.source, connector.target if not connector.source.resource_names: return DBRes.api_error("Not Found Any Resource Name in Connection {}".format(source.name)) resource_names = [x for x in connector.source.resource_names if x.db_name in need_tables] return self._run(resource_names)
def post(self, mode: str, pk: str): if mode == 'debug': term = run_debug_transform(self.json_body, self.terminal_manager) self.write_res(DBRes({"url": '/terminal/{}'.format(term)})) else: if not pk.isdigit(): pk = str(DBDao.name2pk(model='transform', name=pk)) return self.write_res(handle_job(mode, pk, self.json_body))
def get(self, path: str): if not any(map(lambda x: path.startswith(x), support_upload)): raise tornado.web.HTTPError(status_code=404) full_path = os.path.join(UPLOAD_ROOT_DIR, path) if not os.path.exists(full_path): return self.write_res(DBRes.api_error()) mime = FileMagic.from_file(full_path) self.set_header('content-type', mime) self.write(open(full_path, "rb").read()) self.finish()
def get(cls, model: str, *args, session: Session, base: Type[Base], filter_: Optional[dict] = None, **kwargs) -> DBRes: query = session.query(base) if filter_: query = cls.build_and(filter_, base, query) return DBRes(data=[x.as_dict() for x in query.all()])
def create_hive_table(self, resource_names: List[ResourceName]) -> DBRes: connector = self.target engine = create_engine(connector.target.url) for resource_name in resource_names: t_database, t_table = connector.get_transform_target_full_name(resource_name=resource_name, connector=connector) version = self.get_sink_default_version(t_database, t_table) schemas = version.generate_version_schema() for sql in self.build_hive_create_sql(t_database, t_table, schemas): print(sql) engine.execute(sql) return DBRes()
def _add_session(*args, **kwargs): session = kwargs[ 'session'] if 'session' in kwargs else DBSession.get_session() new_kwargs = {k: v for k, v in kwargs.items() if k != 'session'} try: res = func(*args, session=session, **new_kwargs) session.commit() return res except Exception as error: session.rollback() if settings.FSQLFLY_DEBUG: raise error err = traceback.format_exc() return DBRes.sever_error(msg=f'meet {err}') finally: if 'session' not in kwargs: session.close()
def get(self): job_infos = get_latest_transform() all_jobs = list() for job in JobControlHandle.job_status: if not JobControlHandle.is_real_job(job.name): continue base = dict(**job._asdict()) if job.name in job_infos: base.update(job_infos[job.name]) base['t_id'] = job_infos[job.name]['id'] elif job.name.startswith(TEMP_TERMINAL_HEAD): base['name'] = 'TEMPORARY' base['url'] = '/terminal/{}'.format(job.name[len(TEMP_TERMINAL_HEAD):]) base['id'] = job.job_id base['detail_url'] = FSQLFLY_FINK_HOST + '/#/job/{}/overview'.format(job.job_id) all_jobs.append(base) return self.write_res(DBRes(data=all_jobs))
def create(cls, model: str, obj: dict, *args, session: Session, base: Type[DBT], **kwargs) -> DBRes: db_obj = base(**obj) session.add(db_obj) session.commit() return DBRes(data=db_obj.as_dict())
def test_not_init_session(self): DBSession.init_engine(None) with self.assertRaises(AssertionError): self.assertEqual( DBDao.update(model='connection', pk=1, obj=dict()), DBRes.not_found())
def bulk_insert(cls, data: list, *args, session: Session, **kwargs): session.add_all(data) return DBRes(data=len(data))
def _call_(*args, **kwargs): model = kwargs['model'] if 'model' in kwargs else args[1] if model not in SUPPORT_MODELS: return DBRes.api_error(msg=f'{model} not support') base = SUPPORT_MODELS[model] return func(*args, base=base, **kwargs)
def get(self): tm = self.terminal_manager terms = [{'name': name, 'id': name} for name in tm.terminals] self.write_res(DBRes(data=terms))
def test_not_found(self): self.assertEqual(DBDao.update(model='connection', pk=1, obj=dict()), DBRes.not_found())
def _run(self, resource_names: List[ResourceName]): res = [] for resource_name in resource_names: name = self.target.get_transform_name_format(resource_name=resource_name) res.append(name) return DBRes(data=res)