def get_sub_job_datax_instance_data_by_id(batch_job_instance_id): dtconn = dataconn.DatabaseConnection(logger) dtsf = dataconn.DataTransform() sql = BatchJobSubjobInstanceData.get_select_sub_job_datax_instance_by_id_sql( batch_job_instance_id) source_data = dtconn.ygol.getall(sql) return map(dtsf.get_row_by_dict_to_user, source_data)
def __init__(self, request): cur = Currency(request) data = cur.rq_post_json('data') self.dtconn = dataconn.DatabaseConnection(logger) self.jd = BatchJobData(data) self.error_msg = [] self.result = self._SUCCESS
def get_batch_job_data(request): # 获取批处理作业数据 sql = config.query_batch_job_sql2 dtconn = dataconn.DatabaseConnection(logger) dtsf = dataconn.DataTransform() source_data = dtconn.ygol.getall(sql) data = [dtsf.get_row_by_dict_to_user(dt) for dt in source_data] response = HttpResponse() response.write(json.dumps(data)) return response
def get_datax_job_instance_by_id(request): # 根据ID查询任务实例 cur = Currency(request) _id = cur.rq_post('_id') conn = dataconn.DatabaseConnection(logger) dtf = dataconn.DataTransform() sql = config.select_datax_job_instance_by_id_sql % _id source_data = conn.ygol.getsingle(sql) response = HttpResponse() response.write(json.dumps(dtf.get_row_by_dict_to_user(source_data))) return response
def __init__(self, data): # id 为 子作业实例ID self._subjob_instance_id = None self._batch_job_instance_id = None self.subjob_id = data.get('subjob_id') self.type = data.get('type') self.dtconn = dataconn.DatabaseConnection(logger) self.dtsf = dataconn.DataTransform() self.dh = DatetimeHelp() self.__timestamp1 = self.dh.timestamp1
def __init__(self, data): self._batch_job_id = data.get('_id', None) self.name = data.get('name', '') self.description = data.get('description', '') self._trigger_mode = data.get('trigger_mode', None) self.dtconn = dataconn.DatabaseConnection(logger) self.dtsf = dataconn.DataTransform() self.dh = DatetimeHelp() self.__timestamp1 = self.dh.timestamp1 self._batch_job_instance_id = None self.batch_job_instance_id = self._batch_job_id
def __init__(self, request): """ RESPONSE_TYPE 返回给用户数据的方式 1:20万行以内的数据,以excel方式返回 2:超过20万行的数据,需要分批处理 3:小量的数据以HTML表格的方式返回' """ cur = Currency(request) data = cur.rq_post_json('data') self.dtconn = dataconn.DatabaseConnection(logger) self.jd = JobData(data) self.error_msg = [] self.result = self._SUCCESS
def get_database(request): # 获取 数据库信息 def _data_processing(dt): # 清除数据里的密码,并对数据格式化 del dt['passwd'] return dtsf.get_row_by_dict_to_user(dt) response = HttpResponse() dtconn = dataconn.DatabaseConnection(logger) data = dtconn.ygol.getall(dataconn.dtbsif_sql) dtsf = dataconn.DataTransform() if dtconn.ygol.status: logger.error(u'获取数据库信息失败 %s' % dtconn.ygol.msg) response.write(json.dumps(map(_data_processing, data))) return response
def get_batch_job_data_by_id(request): """ 根据ID获取批处理作业数据 :param request: id :return: """ cur = Currency(request) _id = cur.rq_post('_id') sql = config.query_batch_job_sql3 % _id dtconn = dataconn.DatabaseConnection(logger) dtsf = dataconn.DataTransform() source_data = dtconn.ygol.getsingle(sql) response = HttpResponse() response.write(json.dumps(dtsf.get_row_by_dict_to_user(source_data))) return response
def __init__(self, data): # id 为 batch_job_id self._batch_job_id = data.get('_id', None) self.name = data.get('name', '') self.description = data.get('description', '') self.task_template = data.get('task_template', '') self.is_enable = data.get('is_enable', '') self.crontab = data.get('crontab', '') self.batch_job_details = data.get('batch_job_details', []) self.trigger_mode = data.get('trigger_mode', '') self.operation_type = data.get('operation_type', '') self.dtconn = dataconn.DatabaseConnection(logger) self.dtsf = dataconn.DataTransform() self.dh = DatetimeHelp() self.__timestamp1 = self.dh.timestamp1
def get_batch_job_sub_job_by_id(request): """ 根据ID获取批处理作业中的子作业数据 扩展: 目前子作业只包含数据同步,后期加入了SQL脚本、SQL备份等等之后, 需要先判断子作业的类型,再根据类型去相关表里查询子作业的详细信息。 例如:如果有同步类型的子作业,就需要根据同步作业表查询同步的任务详情。 如果有备份类型的,就去备份表里查询备份任务详情。 :param request: id :return: """ cur = Currency(request) _id = cur.rq_post('_id') sql = config.query_batch_job_sub_job_by_id_sql % _id dtconn = dataconn.DatabaseConnection(logger) dtsf = dataconn.DataTransform() source_data = dtconn.ygol.getall(sql) data = [dtsf.get_row_by_dict_to_user(dt) for dt in source_data] response = HttpResponse() response.write(json.dumps(data)) return response
def __init__(self, data): # id 为 datax_job_id self.id = data.get('_id', 0) self.name = data.get('name', '') self.description = data.get('description', '') self.querySql = data.get('querySql', '') self.reader_databaseinfo_id = data.get('reader_databaseinfo_id', '') self.writer_table = data.get('writer_table', '') self.writer_column = data.get('writer_column_id', []) self.writer_databaseinfo_id = data.get('writer_databaseinfo_id', '') self.writer_preSql = data.get('writer_preSql', '') self.writer_postSql = data.get('writer_postSql', '') self.operation_type = data.get('operation_type', '') self.trigger_mode = data.get('trigger_mode', '') self.dtconn = dataconn.DatabaseConnection(logger) self.dtsf = dataconn.DataTransform() self.dh = DatetimeHelp() self.__timestamp1 = self.dh.timestamp1 self.reader_dtbs = self._get_reader_dtbs() if self.reader_databaseinfo_id else None self.writer_dtbs = self._get_writer_dtbs() if self.writer_databaseinfo_id else None