def _read_aux(self, titulo_id, params): if isinstance(titulo_id, list): for titulo_id_elto in titulo_id: self._validate_titulo_id(titulo_id_elto) else: self._validate_titulo_id(titulo_id) start_date = pendulum.create(2002, 1, 1, 0, 0, 0) end_date = pendulum.now() group_by_year = False if 'data_inicio' in params: self._validade_date(params['data_inicio']) start_date = pendulum.strptime( '{}-01'.format(params['data_inicio']), '%Y-%m-%d') if 'data_fim' in params: self._validade_date(params['data_fim']) end_date = pendulum.strptime('{}-01'.format(params['data_fim']), '%Y-%m-%d') if 'group_by' in params: assert params['group_by'] in ( 'true', 'false'), '"group_by" must be "true" or "false".' group_by_year = True if params['group_by'] == 'true' else False start_date = start_date.strftime('%Y-%m-%d %H:%M:%S') end_date = end_date.strftime('%Y-%m-%d %H:%M:%S') return (start_date, end_date, group_by_year)
def _parse_date_reqarg(args, route_args): try: for arg in args: route_args[arg] = pendulum.strptime(route_args[arg], "%Y-%m-%d") except Exception: raise HTTP(404)
def _parse_date_reqarg(args, route_args): try: for arg in args: route_args[arg] = pendulum.strptime( route_args[arg], "%Y-%m-%d") except Exception: raise HTTP(404)
def _parse_date_reqarg_opt(args, route_args): try: for arg in args: if route_args[arg] is None: continue route_args[arg] = pendulum.strptime(route_args[arg], "%Y-%m-%d") except Exception: raise HTTP(404)
def _parse_date_reqarg_opt(args, route_args): try: for arg in args: if route_args[arg] is None: continue route_args[arg] = pendulum.strptime( route_args[arg], "%Y-%m-%d") except Exception: raise HTTP(404)
def averager(inputs): print(inputs) input_fname = inputs[0] output_fname = inputs[1] days = inputs[2] with open(output_fname, 'w') as out: with open(input_fname, 'r') as inp: reader = csv.reader(inp) #skip the first header line head = next(reader) out.write(head[0] + ',' + head[1] + '\n') head = next(reader) out.write(head[0] + ',' + head[1] + ',' + head[2] + ',' + head[3] + ',' + head[4] + '\n') avlist = [0,0,0,0] tavlist = [] count = 0 first = True for row in reader: dt = row[0] time = pendulum.strptime(dt,"%m/%d/%Y %H:%M") if(first): last_time = time first = False if((time - last_time).in_days() >= int(days)): out.write(last_time.strftime("%m/%d/%Y %H:%M") + ',' + str(avlist[0]/count) + ',' + str(avlist[1]/count) + ',' + str(avlist[2]/count) + ',' + str(avlist[3]/count) + '\n') last_time = time count = 1 powers = [float(row[1]),float(row[2]),float(row[3]),float(row[4])] avlist[0] = powers[0] avlist[1] = powers[1] avlist[2] = powers[2] avlist[3] = powers[3] else: powers = [float(row[1]),float(row[2]),float(row[3]),float(row[4])] avlist[0] += powers[0] avlist[1] += powers[1] avlist[2] += powers[2] avlist[3] += powers[3] count += 1 out.write(last_time.strftime("%m/%d/%Y %H:%M") + ',' + str(avlist[0]/count) + ',' + str(avlist[1]/count) + ',' + str(avlist[2]/count) + ',' + str(avlist[3]/count) + '\n')
def clean_up(dag_id, execution_date, session=None): # check for empty if is_empty(dag_id) or is_empty(execution_date) or is_empty(session): raise InvalidArguments( "dag_id, task_id ,execution_date and session can't be empty") # check for none if dag_id is None or execution_date is None: raise InvalidArguments("dag_id, task_id can't be None") try: search = pendulum.strptime(execution_date, "%Y-%m-%dT%H:%M:%S") execution_date = execution_date.replace('T', ' ') r_config = json.loads(Variable.get("r_config")) if dag_id in r_config: exec_dates = r_config[dag_id] if execution_date in exec_dates: exec_dates.remove(execution_date) r_config[dag_id] = exec_dates if len(r_config[dag_id]) == 0: del r_config[dag_id] if len(r_config) != 0: Variable.set(key="r_config", value=json.dumps(r_config)) else: Variable.delete('r_config') # update airflow meta-database session.query(FailedDagRun).filter(FailedDagRun.dag_id == dag_id, FailedDagRun.execution_date.like(search))\ .update({'state': 'recovery_executed'}, synchronize_session='fetch') except Exception as e: LoggingMixin().log.error(e)
with open(input_fname, 'r') as inp: reader = csv.reader(inp) #skip the first header line head = next(reader) out.write(head[0] + ',' + head[1] + '\n') head = next(reader) out.write(head[0] + ',' + head[1] + ',' + head[2] + ',' + head[3] + ',' + head[4] + '\n') avlist = [0,0,0,0] tavlist = [] count = 0 first = True for row in reader: dt = row[0] time = pendulum.strptime(dt,"%m/%d/%Y %H:%M") if(first): last_time = time first = False if((time - last_time).in_days() >= int(days)): out.write(last_time.strftime("%m/%d/%Y %H:%M") + ',' + str(avlist[0]/count) + ',' + str(avlist[1]/count) + ',' + str(avlist[2]/count) + ',' + str(avlist[3]/count) + '\n') last_time = time count = 1 powers = [float(row[1]),float(row[2]),float(row[3]),float(row[4])] avlist[0] = powers[0] avlist[1] = powers[1] avlist[2] = powers[2] avlist[3] = powers[3] else: powers = [float(row[1]),float(row[2]),float(row[3]),float(row[4])]