def get_time_elapsed_in_min( elapsed_string: Optional[str]) -> Optional[int]: """Parse SLURM elapsed time string into minutes""" if not elapsed_string or not isinstance(elapsed_string, str): return 0 days = 0 if "-" in elapsed_string: days = int(elapsed_string.split("-")[0]) elapsed_string = elapsed_string.split("-")[1] split_timestamp = elapsed_string.split(":") if len(split_timestamp) < 3: split_timestamp = list( "0" * (3 - len(split_timestamp))) + split_timestamp return int((parse_datestr(":".join(split_timestamp)) - parse_datestr("0:0:0")).seconds / 60 + days * 60)
def scandal_predicate(headers): """ See Sec 5.1 of http://www.kecl.ntt.co.jp/as/members/ishiguro/open/2012AISTATS.pdf """ dt = parse_datestr(headers['date']) return dt.year == 2001 and dt.month in (8, 10)
def validate_date(datestr): """Control and validate the date string. :param datestr: The date string representation. :return: The datetime object form the parsed date string. """ return parse_datestr(datestr, fuzzy=True).replace(tzinfo=tzutc())
def update_spreadsheet(datadir): credentials = ServiceAccountCredentials.from_json_keyfile_name( 'client_secret.json', scope) gc = gspread.authorize(credentials) worksheet = gc.open("Nylas Mail Benchmarks").sheet1 filenames = [] for filename in glob('{datadir}/*-results.txt'.format(datadir=datadir)): gitsha = re.match('^(.*)-results.txt$', os.path.basename(filename)).groups(0)[0] formatted_datetime = subprocess.check_output( ['git', 'show', '-s', '--format=%ci', gitsha]) parsed_datetime = parse_datestr(formatted_datetime) filenames.append((filename, gitsha, parsed_datetime)) new_data = [] for filename, gitsha, parsed_datetime in sorted(filenames, key=lambda t: t[2]): synced_messages, confidence_interval = anymean(filename) row = (parsed_datetime.strftime("%Y-%m-%d %H:%M:%S"), gitsha, synced_messages, confidence_interval) new_data.append(row) print row # TODO: might want to use the batch upload api in order to not run into rate-limits for i, new_row in enumerate(new_data): row_num = i + 2 existing_row = worksheet.range( 'A{row_num}:D{row_num}'.format(row_num=row_num)) for j, cell in enumerate(existing_row): col_num = j + 1 cell.value = new_row[j] print "updating cell {row_num}:{col_num} with {val}".format( row_num=row_num, col_num=col_num, val=cell.value) worksheet.update_cells(existing_row)
def update_jobs(self, analysis_obj: models.Analysis, jobs_dataframe: pd.DataFrame) -> None: """Parses job dataframe and creates job objects""" if len(jobs_dataframe) == 0: return formatter_func = formatters.formatter_map.get( analysis_obj.data_analysis, formatters.transform_undefined) jobs_dataframe["step"] = jobs_dataframe["step"].apply( lambda x: formatter_func(x)) for job_obj in analysis_obj.failed_jobs: job_obj.delete() self.commit() analysis_obj.failed_jobs = [ self.Job( analysis_id=analysis_obj.id, slurm_id=val.get("id"), name=val.get("step"), status=val.get("status").lower(), started_at=parse_datestr(val.get("started")) if isinstance( val.get("started"), str) else None, elapsed=val.get("time_elapsed"), ) for ind, val in jobs_dataframe.iterrows() ] self.commit()
def validate_req_datetime(datestr, strict=True): try: if strict: date = datetime.strptime(datestr, DATE_FORMAT) else: date = parse_datestr(datestr, fuzzy=True) return date.replace(tzinfo=tzutc()) except Exception as e: raise DateTimeError("Error parsing 'Accept-Datetime: %s' \n" "Message: %s" % (datestr, e),400)
def post_find_analysis(): """Find analysis using case_id, date, and status""" content = request.json analysis_obj = store.get_analysis( case_id=content.get("case_id"), started_at=parse_datestr(content.get("started_at")), status=content.get("status"), ) if analysis_obj: data = stringify_timestamps(analysis_obj.to_dict()) return jsonify(**data), 200 return jsonify(None), 200
def parse_date(val, nullable=True, tz=TZ_LOCAL, **parse_kws): v = None if isinstance(val, datetime): v = val.timestamp() elif isinstance(val, str): v = parse_datestr(val, **parse_kws).timestamp() elif isinstance(val, (int, float)): v = val if v: return datetime.fromtimestamp(v, tz=tz) elif nullable: return None raise ValueError(f"Unknown DateValue{val}")
def cast_date(*, value: str, params: str, shared: dict) -> datetime: """Cast date/datetime string into object. :param value: Current value. :param params: Additional value-related parameters. :param shared: Global shared parameters. """ param_key, _, param_val = params.partition('=') date = parse_datestr(value) tz = param_val if param_key == 'TZID' else shared['tz'] or 'UTC' return default_tzinfo(date, gettz(tz))
def load_server_metadata(self, meta_json): if 'participant_codes' in meta_json: for props in meta_json['participant_codes']: code = props['code'] study = props['study'] kwargs = {} if props.get('is_secret_url', False): self.get_experiment(study).set_secret_url(code) kwargs['is_secret_url'] = True else: kwargs['timeout'] = parse_datestr(props['timeout']) kwargs['unique_session'] = props['unique_session'] kwargs['session_limit'] = props['session_limit'] kwargs['session_count'] = props.get('session_count', 0) self.add_participant_code(study, code=code, **kwargs)
def validate_date(datestr, strict=False): """ Controls and validates the date string. :param datestr: The date string representation :param strict: When True, the date must strictly follow the format defined in the config file (DATEFMT). When False, the date string can be fuzzy and the function will try to reconstruct it. :return: The datetime object form the parsed date string. """ try: if strict: date = datetime.strptime(datestr, DATE_FORMAT) else: date = parse_datestr(datestr, fuzzy=True).replace(tzinfo=tzutc()) return date except Exception as e: raise Exception("Error: cannot parse date string %s" % datestr)
def validate_req_datetime(datestr, strict=True): """ Parses the requested date string into a dateutil time object. Raises DateTimeError if the parse fails to produce a datetime :param datestr: A date string, in a common format. :param strict: If the datetime MUST follow the exact format DATEFMT :return: the dateutil time object """ try: if strict: date = datetime.strptime(datestr, DATE_FORMAT) else: date = parse_datestr(datestr, fuzzy=True) logging.debug("Accept datetime parsed to: "+date_str(date)) return date.replace(tzinfo=tzutc()) except Exception as e: raise DateTimeError("Error parsing 'Accept-Datetime: %s' \n" "Message: %s" % (datestr, e))
def post_query_analyses(): """Return list of analyses matching the query terms""" content = request.json query_analyses = store.analyses( case_id=content.get("case_id"), query=content.get("query"), status=content.get("status"), deleted=content.get("deleted"), temp=content.get("temp"), before=parse_datestr(content.get("before")) if content.get("before") else None, is_visible=content.get("visible"), family=content.get("family"), data_analysis=content.get("data_analysis"), ) data = [ stringify_timestamps(analysis_obj.to_dict()) for analysis_obj in query_analyses ] return jsonify(*data), 200
def parse_doc_detail(self, response): """处理'財務報告更(補)正查詢作業'公告内容,附件""" attaches = response.xpath( "//th[contains(text(), '附件')]/following-sibling::td/a/@href" ).extract() file_urls = [response.urljoin(it).replace('\n', '') for it in attaches] if response.meta['is_report']: # 暂定位非财报 pass else: detail_type = '{}_Q{}'.format(response.meta['title'], response.meta['season'][1]) self.total_new += 1 yield BulletinItem(exchange_market_code=response.meta['exchange'], company_code=response.meta['code'], fiscal_year=response.meta['year'], disclosure_date=parse_datestr( response.meta['date']), doc_type='directory', file_original_title=response.meta['caption'], announcement_detail_type=detail_type, is_downloaded=response.meta['is_downloaded'], data=response.body, file_urls=file_urls)
def parse_str_to_datetime(cls, value: str) -> Optional[dt.datetime]: if value: return parse_datestr(value)
def parse_date(*args, **kwargs): return parse_datestr(*args, **kwargs)