def get(self, model: str): if model == 'require': return self.write_res(DBDao.get_require_name()) def b2v(x): v = x[0].decode() if v.isdigit(): return int(v) return v filter_ = dict({ k: b2v(v) for k, v in self.request.arguments.items() if not k.startswith('_') }) self.write_res(DBDao.get(model, filter_=filter_))
def run_transform(transform: Transform, **kwargs) -> (bool, str): _, yaml_f = tempfile.mkstemp(suffix='.yaml') _, sql_f = tempfile.mkstemp(suffix='.sql') yaml_conf = _create_config(require=transform.require, config=transform.yaml, args=kwargs) sql = handle_template(transform.sql, kwargs) print(yaml_conf, file=open(yaml_f, 'w')) print(sql, file=open(sql_f, 'w')) print('q\nexit;', file=open(sql_f, 'a+')) run_commands = [FSQLFLY_FLINK_BIN, 'embedded', '-s', get_job_header(transform, **kwargs), '--environment', yaml_f, *DBDao.get_require_jar(), '<', sql_f] print(' '.join(run_commands)) try: out = subprocess.check_output(' '.join(run_commands), shell=True, stderr=subprocess.PIPE) except subprocess.CalledProcessError as error: return False, "sql:\n {} \n\noutput: \n{} \n\n error: {}\n\nyaml: \n{}".format(transform.sql, error.stdout.decode(), error.stderr.decode(), yaml_conf) except Exception as e: print(e) return False, str(e) out_w = _clean(out.decode()) os.remove(yaml_f) os.remove(sql_f) print(out_w, file=open('out.shell.txt', 'w')) print(out_w) return True, out_w
def post(self, mode: str, pk: str): if mode == 'debug': term = run_debug_transform(self.json_body, self.terminal_manager) self.write_res(DBRes({"url": '/terminal/{}'.format(term)})) else: if not pk.isdigit(): pk = str(DBDao.name2pk(model='transform', name=pk)) return self.write_res(handle_job(mode, pk, self.json_body))
def _create_config(require: str, config: Optional[str], args: dict) -> str: tables = [] catalogs = [] require = require.strip() if require and require.strip() else '' if require: tables.extend(DBDao.get_require_table(require)) catalogs.extend(DBDao.get_require_catalog(require)) base_config = yaml.load(handle_template(config, args), yaml.FullLoader) if config else dict() if base_config is None: base_config = dict() if base_config.get('tables'): base_config['tables'].extend(tables) else: base_config['tables'] = tables base_config['functions'] = DBDao.get_require_functions() if base_config.get('catalogs'): base_config['catalogs'].extend(catalogs) else: base_config['catalogs'] = catalogs return dump_yaml(base_config)
def _handle_job(mode: str, pk: str, json_body: dict, session: Session) -> DBRes: handle_name = 'handle_' + mode if mode in JobControlHandle and handle_name in JobControlHandle: if pk.isdigit(): transform = DBDao.get_transform(pk, session=session) if transform is None: return DBRes.api_error(msg='job id {} not found!!!'.format(pk)) else: transform = pk data = json_body run_res = getattr(JobControlHandle, handle_name)(transform, **data) return DBRes(code=500 if run_res.startswith(FAIL_HEADER) else 200, msg=run_res) else: return DBRes.api_error(msg=' {} not support!!!'.format(mode))
def run_debug_transform(data: dict, manager: NamedTermManager) -> (str, str): _, yaml_f = tempfile.mkstemp(suffix='.yaml') yaml_conf = _create_config(data.get('require', ''), data.get('yaml', ''), dict()) print(yaml_conf, file=open(yaml_f, 'w')) real_sql = handle_template(data.get('sql', ''), dict()) name = manager._next_available_name() run_commands = [FSQLFLY_FLINK_BIN, 'embedded', '-s', '{}{}'.format(settings.TEMP_TERMINAL_HEAD, str(name)), '--environment', yaml_f, *DBDao.get_require_jar()] logger.debug('running commands is : {}'.format(' '.join(run_commands))) term = manager.new_terminal(shell_command=run_commands) logger.debug('sql :{}'.format(real_sql)) term.ptyproc.write(real_sql) term.term_name = name setattr(term, settings.TERMINAL_OPEN_NAME, True) term.run_command = ' '.join(run_commands) manager.terminals[name] = term return name
def run(self): self.logger.debug('Start Running Flink Job Damon {}'.format( str(datetime.now())[:19])) today = str(date.today()) start_time = time.time() session = DBSession.get_session() job_names = DBDao.get_job_names(session=session) living_job = JobControlHandle.live_job_names for k, transform in job_names.items(): if k not in living_job: if self.run_times[today][k] > self.max_try: self.logger.error( 'job run too many times one day {}'.format(k)) self.send_email( 'job run too many times one day {}'.format(k)) else: self.run_times[today][k] += 1 self.logger.info('job {} begin run '.format(k)) is_ok, r = run_transform(transform) if not is_ok: self.send_email('job start fail {}'.format(k), r) self.logger.error(r) else: if k in self.started_jobs: self.send_email( 'try restart job {}, last fail'.format(k), r) else: self.started_jobs.add(k) end_time = time.time() cost = end_time - start_time self.logger.debug(" ".join([ str(datetime.now())[:19], ' damon cost ', '%.2f' % cost, ' second', ' will sleep ' ])) session.close()
def _get(): job_infos = defaultdict(dict) for x in DBDao.get_transform(): name = "{}_{}".format(x.id, x.name) job_infos[name] = x.to_dict() return job_infos
def get(self): data = {k + 'Num': DBDao.count(v) for k, v in SUPPORT_MODELS.items()} data['code'] = 200 data['success'] = True return self.write_json(data)
def delete(self, model: str, pk: int): self.write_res(DBDao.delete(model, pk))
def post(self, model: str, pk: int): self.write_res(DBDao.update(model, pk, self.json_body))
def post(self, model: str): self.write_res(DBDao.create(model, self.json_body))
def reset_db(commands: list): from fsqlfly.db_helper import DBDao conformed_parser = argparse.ArgumentParser("Conformed") conformed_parser.add_argument('-f', '--force', type=bool, default=False, help='force running') args = conformed_parser.parse_args(commands) DBDao.delete_all_tables(force=args.force)
def init_db(commands: list): from fsqlfly.db_helper import DBDao DBDao.create_all_tables()