def iso8601(argument_value): try: if argument_value: iso8601.parse_date(argument_value) except ParseError: msg = "%s must be an iso8601 date" % argument_value raise argparse.ArgumentTypeError(msg)
def test_parse_invalid_date2(): try: iso8601.parse_date("23") except iso8601.ParseError: pass else: assert 1 == 2
def _import_task_list(self, issue, task_list): from issues_hel.models import Task extant_tasks = Task.objects.filter(issue=issue) if extant_tasks.count() == len(task_list): # No change in number... maybe mtimes have changed? extant_mtimes = set( extant_tasks.values_list('task_modified', flat=True)) new_mtimes = { parse_date(task_data.get('task_modified', '')) for task_data in task_list } if extant_mtimes == new_mtimes: # Nothing to do! return extant_tasks.delete() # Have to wipe everything out first, sigh for task_data in task_list: Task.objects.create( issue_id=issue.id, task_state=task_data.get('task_state', ''), task_type=task_data.get('task_type', ''), owner_name=task_data.get('owner_name', ''), task_modified=parse_date(task_data.get('task_modified', '')), task_created=parse_date(task_data.get('task_created', '')), )
def score_frames(segs): """score the frame errors from the segments""" d = dict(D=0, I=0, F=0, M=0, Us=0, Ue=0, Os=0, Oe=0, TP=0, TN=0) for seg in segs: secs = (parse_date(seg["t2"]) - parse_date(seg["t1"])).total_seconds() if seg["score"] == "FP" or seg["score"] == "FN": if seg.get("err"): d[seg["err"]] += secs elif seg["score"] == "TP" or seg["score"] == "TN": d[seg["score"]] += secs d["P"] = d["D"] + d["F"] + d["Us"] + d["Ue"] + d["TP"] #positive frames d["N"] = d["I"] + d["M"] + d["Os"] + d["Oe"] + d["TN"] #negative frames #calculate frame ratessiter ret = dict(p_rates={}, n_rates={}, frame_counts=d) POS = ["D", "F", "Us", "Ue", "TP"] NEG = ["I", "M", "Os", "Oe", "TN"] if d["P"]: for i in POS: ret["p_rates"][i+"r"] = d[i]*1.0 / d["P"] if d["N"]: for i in NEG: ret["n_rates"][i+"r"] = d[i]*1.0 / d["N"] if d["P"] or d["N"]: ret["acc"] = (d["TP"]*1.0 + d["TN"]) / (d["P"]*1.0 + d["N"]) ret["p_rate"] = d["P"]*1.0/(d["P"] + d["N"]) ret["n_rate"] = d["N"]*1.0/(d["P"] + d["N"]) return ret
def test_parse_show_entity_dates(): ISO8601_1 = "2014-12-12T15:19:21+01:00" ISO8601_2 = "2014-12-13T15:19:21+01:00" ISO8601_3 = "2014-12-14T15:19:21+01:00" PARSED_1 = iso8601.parse_date(ISO8601_1) PARSED_2 = iso8601.parse_date(ISO8601_2) PARSED_3 = iso8601.parse_date(ISO8601_3) response_body = {'status': 'success', 'count': 1, 'obj1': {'id': 42, 'created': ISO8601_1, 'item': {'updated': ISO8601_2, 'fields': {'viewed': ISO8601_3}, 'activities': {'act1': {'viewed': ISO8601_3}}}}} httpretty.register_uri(httpretty.GET, crm.url.format(url='companies/42/'), body=json.dumps(response_body), content_type='application/json') response = crm.show_company(42) assert response['status'] == 'success' assert response['obj1']['id'] == 42 assert response['obj1']['created'] == ISO8601_1 assert response['obj1']['created_parsed'] == PARSED_1 assert response['obj1']['item']['updated'] == ISO8601_2 assert response['obj1']['item']['updated_parsed'] == PARSED_2 assert response['obj1']['item']['fields']['viewed'] == ISO8601_3 assert response['obj1']['item']['fields']['viewed_parsed'] == PARSED_3 assert response['obj1']['item']['activities']['act1']['viewed'] == ISO8601_3 assert response['obj1']['item']['activities']['act1']['viewed_parsed'] == PARSED_3
def validate_schema_datetime(instance): if isinstance(instance, str): try: iso8601.parse_date(instance) except ParseError: raise ValidationError("datetime format is invalid. It must be a valid ISO8601 date time format, " "https://en.wikipedia.org/wiki/ISO_8601") return True
def test_parse_invalid_date(invalid_date, error_string): assert isinstance( invalid_date, str ) or invalid_date is None # Why? 'cos I've screwed up the parametrize before :) with pytest.raises(iso8601.ParseError) as exc: iso8601.parse_date(invalid_date) assert exc.errisinstance(iso8601.ParseError) assert str(exc.value).startswith(error_string)
def Construct(class_, attributes, api, url): self = class_(attributes) self._api = api self.objectId = attributes.get("objectId", None) if "createdAt" in attributes: self.createdAt = iso8601.parse_date(attributes.get("createdAt")) if "updatedAt" in attributes: self.updatedAt = iso8601.parse_date(attributes.get("updatedAt")) self._url = url + "/" + self.objectId return self
def test_add_desc_sort(self): self.query.add_series(self.single_series, self.field, 'value') self.cmd.run(self.query, {'sort': 'desc'}) data = self.query.run()['data'] previous = iso8601.parse_date(data[0][0]) for row in data[1:]: current = iso8601.parse_date(row[0]) self.assertLess(current, previous) previous = current
def test_sort_asc_with_collapse(self): self.query.add_series(self.single_series, self.field, 'value') self.cmd.run(self.query, {'sort': 'asc'}) self.query.update_collapse('year') data = self.query.run()['data'] previous = iso8601.parse_date(data[0][0]) for row in data[1:]: current = iso8601.parse_date(row[0]) self.assertGreater(current, previous) previous = current
def extract_segments(results): """extract the time segments from labels and detected """ tt = [ ( parse_date(x["t1"]), parse_date(x["t2"]) ) for x in results["labels"]+results["detected"] ] ts = sorted(itertools.chain.from_iterable( tt )) t1 = parse_date(results["t1"]) if t1 < ts[0]: ts.insert(0, t1) t2 = parse_date(results["t2"]) if t2 > ts[-1]: ts.append(t2) return [ dict(t1=x[0].isoformat(), t2=x[1].isoformat()) for x in list(sliding_window(ts, 2)) ]
def __setattr__(self, name, value): ''' Transforma os valores de acordo com os tipos ''' if name in ('valor_unitario', 'valor_documento', 'valor_desconto', 'valor_outras_deducoes', 'valor_multa', 'valor_outros_acrescimos', 'valor_cobrado'): value = Decimal(value) elif name in ('vencimento', 'data_documento', 'data_processamento'): if isinstance(value, six.string_types): value = iso8601.parse_date(value).date() elif name in ('quantidade', 'num_sequencial'): try: value = int(value) except: value = 0 elif name in ('numero_documento', 'especie_documento', 'aceite', 'especie', 'sacado', 'cpf_cei_cnpj', 'sacador_avalista', 'local_pagamento', 'cedente', 'agencia', 'conta_corrente', 'carteira', 'contrato'): value = "{0}".format(value) return super(Boleto, self).__setattr__(name, value)
def test_parse_utc_different_default(): """Z should mean 'UTC', not 'default'. """ tz = iso8601.FixedOffset(2, 0, "test offset") d = iso8601.parse_date("2007-01-01T08:00:00Z", default_timezone=tz) assert d == datetime.datetime(2007, 1, 1, 8, 0, 0, 0, iso8601.UTC)
def test_to_iso(self): dtime = iso8601.parse_date('2015-04-15 12:00:00+0300') iso_time = utils.to_iso(dtime) assert iso_time == '2015-04-15T15:00:00.000000Z' dtime = '2015-04-15 12:00:00' iso_time = utils.to_iso(dtime) assert iso_time == '2015-04-15T15:00:00.000000Z'
def _parse_date_wrapper(self, entry, field): """Returns a dict with a new field name for the parsed date. """ try: return {'{}_parsed'.format(field): iso8601.parse_date(entry[field])} except ParseError: pass return {}
def convert(self, value, op): if isinstance(self.type, restful_fields.Raw): self.type = self._get_python_type_from_field(self.type) if inspect.isclass(self.type): # check if we're expecting a string and the value is `None` if value is None and issubclass(self.type, six.string_types): return None # handle date and datetime: if issubclass(self.type, datetime.date): try: # RFC822-formatted strings are now the default: dt = parsedate_to_datetime(value) except TypeError: # ISO8601 fallback: dt = iso8601.parse_date(value) if self.type is datetime.date: return dt.date() return dt try: return self.type(value, self.name, op) except TypeError: try: return self.type(value, self.name) except TypeError: return self.type(value)
def _check_create_representation(self): rt = self.resource_type # TODO(rst): change controller to work on resource itself values = self.request.content.get_values(True) self.logger.debug("_check_create_representation: %s", values) # TODO: move this to expiration time handler plugin # but needs to be set to a value even if plugin is disabled if issubclass(self.resource_type, ExpiringResource): expiration_time = values.get("expirationTime") if not expiration_time: expiration_time = self.now + self.global_config[ "default_lifetime"] self.fields.append("expirationTime") else: if not isinstance(expiration_time, datetime): try: expiration_time = parse_date(expiration_time) except ParseError as e: raise CSEValueError( "Illegal value for expirationTime: %s" % (e,)) if expiration_time < self.now + self.global_config[ "min_lifetime"]: self.logger.warn("expirationTime is too low. Adjusting") expiration_time = self.now + self.global_config[ "min_lifetime"] self.fields.append("expirationTime") elif expiration_time > self.now + self.global_config[ "max_lifetime"]: self.logger.warn("expirationTime is too high. Adjusting") expiration_time = self.now + self.global_config[ "max_lifetime"] self.fields.append("expirationTime") values["expirationTime"] = expiration_time rt_attributes = rt.attributes ignore_extra = self.global_config["ignore_extra_attributes"] is_flex = ignore_extra and issubclass(self.resource_type, FlexibleAttributesMixin) # TODO: optimize if ignore_extra and not is_flex: names = rt.attribute_names for k in values.keys(): if k not in names: values.pop(k) for attribute in rt_attributes: have_attr = (attribute.name in values and values[attribute.name] is not None) if not have_attr and attribute.mandatory: raise CSEMissingValue("Missing attribute: %s" % (attribute.name,)) if have_attr and attribute.accesstype == attribute.RO: self._handle_ro_attribute(attribute) self.values = values
def convert_date(toks): date_val = toks[0][0] try: res = parse_date(date_val) except ParseError: res = date_val return res
def convert_date(toks): date_val = toks[0][0] try: res = parse_date(date_val) except ValueError: res = date_val return res
def _create_subproject(self, **kw): if not 'label' in kw: kw['label'] = 'TestSubproject' if not 'creation_date' in kw: kw['creation_date'] = \ iso8601.parse_date('2005-10-25T12:01:33+02:00') return self._create_entity(Subproject, kw)
def test_dataset_issued_no_inference(self): catalog = DataJson(os.path.join(SAMPLES_DIR, 'full_ts_data.json')) self.loader.run(catalog, self.catalog_id) issued = Dataset.objects.first().issued self.assertEqual( issued.date(), iso8601.parse_date(catalog.get_datasets()[0]['issued']).date())
def test_update_first_run(self, mock_get_content): mock_get_content.return_value = self._first_result() utils.update() meetups = Meetup.objects.all() self.assertEqual(len(meetups), 1) # Check all values are as expected meetup = meetups[0] expected_datetime = datetime( year=2015, month=6, day=10, hour=17, minute=30, tzinfo=UTC) self.assertEqual(meetup.time, expected_datetime) expected_datetime = datetime( year=2015, month=5, day=12, hour=21, minute=53, second=10, tzinfo=UTC) self.assertEqual(meetup.updated, expected_datetime) expected_datetime = datetime( year=2014, month=1, day=28, hour=20, minute=47, second=2, tzinfo=UTC) self.assertEqual(meetup.created, expected_datetime) self.assertEqual(meetup.rsvps, 24) self.assertEqual(meetup.maybe_rsvps, 7) self.assertEqual(meetup.waitlist_count, 11) self.assertEqual(meetup.name, 'Python Ireland meetup') self.assertEqual(meetup.description, description) self.assertEqual(meetup.status, 'upcoming') self.assertEqual(meetup.visibility, 'public') self.assertEqual(meetup.event_url, 'http://www.meetup.com/pythonireland/events/221078098/') # We should have ticked the MeetupUpdate r = settings.REDIS meetup_update = iso8601.parse_date(r.get(settings.MEETUPS_LAST_CHECKED).decode('utf-8')) minute_ago = datetime.now(tz=UTC) - timedelta(minutes=1) self.assertGreater(meetup_update, minute_ago)
def create_queries(self, query_results): # Filtramos las queries ya agregadas ids = self.loaded_api_mgmt_ids results = filter(lambda x: x['id'] not in ids, query_results['results']) results = filter(lambda x: x['uri'].find('/series/api/') > -1, results) results = filter(lambda x: x['request_method'] != 'OPTIONS', results) queries = [] for result in results: parsed_querystring = parse.parse_qs(result['querystring'], keep_blank_values=True) queries.append(Query( ip_address=result['ip_address'], args=result['querystring'], timestamp=iso8601.parse_date(result['start_time']), ids=parsed_querystring.get('ids', ''), params=parsed_querystring, api_mgmt_id=result['id'], uri=result.get('uri') or '', request_time=result.get('request_time') or 0, user_agent=result.get('user_agent') or '', status_code=result.get('status_code') or 0, )) self.loaded_api_mgmt_ids.update([result['id']]) return queries
def _make_date_index_continuous(self, start_date, end_date): """Hace el índice de tiempo de los resultados continuo (según el intervalo de resultados), sin saltos, entre start_date y end_date. Esto implica llenar el diccionario self.data_dict con claves de los timestamp faltantes para asegurar la continuidad """ # Si no hay datos cargados no hay nada que hacer if not self.data_dict: return current_date = iso8601.parse_date(start_date) end_date = iso8601.parse_date(end_date) while current_date < end_date: current_date += get_relative_delta(self.args[constants.PARAM_PERIODICITY]) self.data_dict.setdefault(str(current_date.date()), {})
def _parse_date_wrapper(entry, field): """Returns a dict with a new field name for the parsed date. """ try: return { '{}_parsed'.format(field): iso8601.parse_date(entry[field]) } except ParseError: return {}
def test_parse_valid_date(valid_date, expected_datetime, isoformat): parsed = iso8601.parse_date(valid_date) assert parsed.year == expected_datetime.year assert parsed.month == expected_datetime.month assert parsed.day == expected_datetime.day assert parsed.hour == expected_datetime.hour assert parsed.minute == expected_datetime.minute assert parsed.second == expected_datetime.second assert parsed.microsecond == expected_datetime.microsecond assert parsed.tzinfo == expected_datetime.tzinfo assert parsed == expected_datetime assert parsed.isoformat() == expected_datetime.isoformat() copy.deepcopy(parsed) # ensure it's deep copy-able pickle.dumps(parsed) # ensure it pickles if isoformat: assert parsed.isoformat() == isoformat assert iso8601.parse_date(parsed.isoformat()) == parsed # Test round trip
def test_parse_date(): d = iso8601.parse_date("2006-10-20T15:34:56Z") assert d.year == 2006 assert d.month == 10 assert d.day == 20 assert d.hour == 15 assert d.minute == 34 assert d.second == 56 assert d.tzinfo == iso8601.UTC
def test_parse_show_entity_dates(): ISO8601_1 = "2014-12-12T15:19:21+01:00" ISO8601_2 = "2014-12-13T15:19:21+01:00" ISO8601_3 = "2014-12-14T15:19:21+01:00" PARSED_1 = iso8601.parse_date(ISO8601_1) PARSED_2 = iso8601.parse_date(ISO8601_2) PARSED_3 = iso8601.parse_date(ISO8601_3) response_body = { 'status': 'success', 'count': 1, 'obj1': { 'id': 42, 'created': ISO8601_1, 'item': { 'updated': ISO8601_2, 'fields': { 'viewed': ISO8601_3 }, 'activities': { 'act1': { 'viewed': ISO8601_3 } } } } } httpretty.register_uri(httpretty.GET, crm.url.format(url='companies/42/'), body=json.dumps(response_body), content_type='application/json') response = crm.show_company(42) assert response['status'] == 'success' assert response['obj1']['id'] == 42 assert response['obj1']['created'] == ISO8601_1 assert response['obj1']['created_parsed'] == PARSED_1 assert response['obj1']['item']['updated'] == ISO8601_2 assert response['obj1']['item']['updated_parsed'] == PARSED_2 assert response['obj1']['item']['fields']['viewed'] == ISO8601_3 assert response['obj1']['item']['fields']['viewed_parsed'] == PARSED_3 assert response['obj1']['item']['activities']['act1'][ 'viewed'] == ISO8601_3 assert response['obj1']['item']['activities']['act1'][ 'viewed_parsed'] == PARSED_3
def from_server(name, value): if name == 'tags': if isinstance(value, str): value = value.split(',') else: try: value = iso8601.parse_date(value) except iso8601.ParseError: pass return value
def test_validated_datetime(): datetime = ValidatedDatetime('19941128T155300') assert str(datetime) == '19941128T155300' datetime_obj = iso8601.parse_date(datetime) assert datetime_obj.year == 1994 assert datetime_obj.month == 11 assert datetime_obj.day == 28 assert datetime_obj.hour == 15 assert datetime_obj.minute == 53 assert datetime_obj.second == 0
def test_partial_end_date_is_inclusive(self): field = Field.objects.get(identifier=self.single_series) query = Query(index=settings.TEST_INDEX) query.add_series(self.single_series, self.field, 'value') query.sort('asc') first_date = query.run()['data'][0][0] end_date = iso8601.parse_date(first_date) + relativedelta(years=10) self.query.add_series(self.single_series, field, 'value') self.cmd.run(self.query, {'end_date': str(end_date)}) # Me aseguro de traer suficientes resultados self.query.add_pagination(start=0, limit=1000) self.query.sort('asc') data = self.query.run()['data'] last_date = iso8601.parse_date(data[-1][0]) self.assertEqual(last_date.year, end_date.year) self.assertGreaterEqual(last_date.month, end_date.month)
def generate(self): sources = {} for field in filter(lambda x: self.fields[x]['dataset_fuente'], self.fields): source = self.fields[field]['dataset_fuente'] if source not in sources: sources[source] = { constants.SOURCES_DATASET_SOURCE: source, constants.SOURCE_SERIES_AMT: 0, constants.SOURCE_VALUES_AMT: 0, constants.SOURCE_FIRST_INDEX: None, constants.SOURCE_LAST_INDEX: None, } sources[source][constants.SOURCE_SERIES_AMT] += 1 index_start = self.fields[field]['metadata'].get( meta_keys.INDEX_START) # ☢☢☢ if index_start: index_start = iso8601.parse_date(index_start).date() first_index = sources[source][constants.SOURCE_FIRST_INDEX] if first_index is None or first_index > index_start: sources[source][constants.SOURCE_FIRST_INDEX] = index_start index_end = self.fields[field]['metadata'].get(meta_keys.INDEX_END) if index_end: index_end = iso8601.parse_date(index_end).date() last_index = sources[source][constants.SOURCE_LAST_INDEX] if last_index is None or last_index < index_end: sources[source][constants.SOURCE_LAST_INDEX] = index_end index_size = self.fields[field]['metadata'].get( meta_keys.INDEX_SIZE) or 0 if index_size: index_size = int(index_size) sources[source][constants.SOURCE_VALUES_AMT] += index_size self.write_tmp_file(sources)
def test_required_only(self): XMLSchema = etree.XMLSchema(generate_xsd(Schema)) ops = Ops.parsexml(XML_REQUIRED_ONLY, XMLSchema) self.assertEqual("N608WB", ops.aircraft) self.assertEqual("123123", ops.flight_number) self.assertEqual("COMMERCIAL", ops.type) self.assertEqual("ICAO", ops.takeoff_airport.code_type) self.assertEqual("EGLL", ops.takeoff_airport.code) self.assertEqual(None, ops.takeoff_pilot) self.assertEqual(iso8601.parse_date("2009-12-30T23:35:59Z"), ops.landing_gate_datetime)
def test_to_server(self): out = utils.to_server('tags', ['foo', 'bar']) assert out == 'foo,bar' dtime = iso8601.parse_date('2015-04-15 12:00:00+0300') out = utils.to_server('timestamp', dtime) assert out == '2015-04-15T15:00:00.000000Z' out = utils.to_server('ignored', 'just a string') assert out == 'just a string' out = utils.to_server('ignored', 123) assert out == 123
def test_attrs_to_server(self): values = {'tags': ['foo', 'bar'], 'timestamp': iso8601.parse_date('2015-04-15 12:00:00+0300'), 'ignored1': 'just a string', 'ignored2': 123} out = utils.attrs_to_server(values) assert out['tags'] == 'foo,bar' assert out['timestamp'] == '2015-04-15T15:00:00.000000Z' assert out['ignored1'] == 'just a string' assert out['ignored2'] == 123
def __init__(self, search_result_item): repo_name = search_result_item['name'] owner_name = search_result_item['owner']['login'] repo_description = search_result_item['description'] or '?' last_commit = iso8601.parse_date(search_result_item['updated_at']) created_at = iso8601.parse_date(search_result_item['created_at']) language = '?' license_dict = search_result_item.get('license') license = license_dict.get('name', None) if license_dict else None html_url = search_result_item['html_url'] super().__init__(repo_name=repo_name, repo_description=repo_description, html_url=html_url, owner_name=owner_name, last_commit=last_commit, created_at=created_at, language=language, license=license)
def update_needed(): """ Checks if we need to refresh the meetup events. :return: True if a MeetupUpdate exists from the last hour. False otherwise """ last_checked = settings.REDIS.get(settings.MEETUPS_LAST_CHECKED) last_checked = iso8601.parse_date(last_checked.decode('utf-8')) if last_checked else None if not last_checked: return True an_hour_ago = Delorean().datetime - timedelta(hours=1) return last_checked < an_hour_ago
def fetch_order_status(order_status_by_parent_order_id: dict, order_id: str): # order_status_by_parent_order_id: <order_id:messages> # order_id: acceptance ID. if order_id not in order_status_by_parent_order_id: return None messages = order_status_by_parent_order_id[order_id] sorted_messages = sorted(messages, key=lambda tup: iso8601.parse_date(tup['event_date'])) executed_quantity = 0 executed_value = 0 status = None order_quantity = None outstanding_size = None # https://bf-lightning-api.readme.io/docs/realtime-child-order-events for message in sorted_messages: et = message['event_type'] ed = message['event_date'] if et == 'ORDER': # new order. order_quantity = message['size'] outstanding_size = order_quantity # new order. status = OrderStatus.OPEN elif et == 'ORDER_FAILED': raise OrderFailed(sorted_messages) elif et == 'CANCEL': status = OrderStatus.CANCEL elif et == 'CANCEL_FAILED': status = OrderStatus.CANCEL_FAILED elif et == 'EXECUTION': status = OrderStatus.OPEN executed_value += message['size'] * message['price'] executed_quantity += message['size'] outstanding_size = message['outstanding_size'] elif et == 'EXPIRE': status = OrderStatus.EXPIRE if float(executed_quantity) > 0: status = OrderStatus.PARTIAL_FILL if order_quantity is not None: if abs(float(executed_quantity) - float(order_quantity)) < 1e-6: status = OrderStatus.FULLY_FILL else: if outstanding_size == 0 and status == OrderStatus.OPEN: status = OrderStatus.FULLY_FILL else: logger.warning('Could not fetch the order quantity. That means we never received the ' 'ORDER message but directly some executions. Bitflyer sometimes does this.' 'Bug ahead.') avg_price = float(executed_value) / float(executed_quantity) if executed_quantity != 0 else 0 return OrderStatus( order_id=order_id, event_date=ed, status=status, avg_price=avg_price, executed_quantity=executed_quantity, outstanding_size=outstanding_size )
def validate_phone_datetime(datetime_string, none_ok=False): if none_ok: if datetime_string is None: return None if not _soft_assert(datetime_string != '', 'phone datetime should never be empty'): return None try: return iso8601.parse_date(datetime_string) except iso8601.ParseError: raise PhoneDateValueError('{!r}'.format(datetime_string))
def test_attrs_from_server(self): values = {'tags': 'foo,bar', 'timestamp': '2015-04-15 12:00:00+0300', 'ignored1': 'just a string', 'ignored2': 123} out = utils.attrs_from_server(values) assert out['tags'] == ['foo', 'bar'] assert out['timestamp'] == iso8601.parse_date( '2015-04-15 12:00:00+0300' ) assert out['ignored1'] == 'just a string' assert out['ignored2'] == 123
def _import_task_list(self, issue, task_list): from issues_hel.models import Task extant_tasks = Task.objects.filter(issue=issue) if extant_tasks.count() == len(task_list): # No change in number... maybe mtimes have changed? extant_mtimes = set(extant_tasks.values_list('task_modified', flat=True)) new_mtimes = set(parse_date(task_data.get('task_modified', '')) for task_data in task_list) if extant_mtimes == new_mtimes: # Nothing to do! return extant_tasks.delete() # Have to wipe everything out first, sigh for task_data in task_list: Task.objects.create( issue_id=issue.id, task_state=task_data.get('task_state', ''), task_type=task_data.get('task_type', ''), owner_name=task_data.get('owner_name', ''), task_modified=parse_date(task_data.get('task_modified', '')), task_created=parse_date(task_data.get('task_created', '')), )
def test_parse_dates(): ISO8601 = "2014-12-12T15:19:21+01:00" PARSED = iso8601.parse_date(ISO8601) response_body = {'status': 'success', 'count': 1, 'obj1': {'updated': ISO8601}} httpretty.register_uri(httpretty.GET, crm.url.format(url='companies/'), body=json.dumps(response_body), content_type='application/json') response = crm.list_companies() assert response['status'] == 'success' assert response['obj1']['updated'] == ISO8601 assert response['obj1']['updated_parsed'] == PARSED
def test_space_separator(): """Handle a separator other than T """ d = iso8601.parse_date("2007-06-23 06:40:34.00Z") assert d.year == 2007 assert d.month == 6 assert d.day == 23 assert d.hour == 6 assert d.minute == 40 assert d.second == 34 assert d.microsecond == 0 assert d.tzinfo == iso8601.UTC
def test_parse_date_tz(): d = iso8601.parse_date("2006-10-20T15:34:56.123+02:30") assert d.year == 2006 assert d.month == 10 assert d.day == 20 assert d.hour == 15 assert d.minute == 34 assert d.second == 56 assert d.microsecond == 123000 assert d.tzinfo.tzname(None) == "+02:30" offset = d.tzinfo.utcoffset(None) assert offset.days == 0 assert offset.seconds == 60 * 60 * 2.5
def test_parse_date_fraction_2(): """From bug 6 """ d = iso8601.parse_date("2007-5-7T11:43:55.328Z'") assert d.year == 2007 assert d.month == 5 assert d.day == 7 assert d.hour == 11 assert d.minute == 43 assert d.second == 55 assert d.microsecond == 328000 assert d.tzinfo == iso8601.UTC
def validate_phone_datetime(datetime_string, none_ok=False): if none_ok: if datetime_string is None: return None if not datetime_string != '': soft_assert('@'.join(['droberts', 'dimagi.com']))( False, 'phone datetime should never be empty' ) return None try: return iso8601.parse_date(datetime_string) except iso8601.ParseError: raise PhoneDateValueError('{!r}'.format(datetime_string))
def score_results(results): """Score list of detected events to list of ground truth items (from test_cases.py). See: J.A. Ward et al. (2011) http://dl.acm.org/citation.cfm?id=1889687 returns dict containing: segments - ordered list of scored segments. segment is dict with t1, t2, score, and optional err """ truth_time = 0 truths = results["labels"] detected = results["detected"] for truth in truths: truth_time += (parse_date(truth["t2"]) - parse_date(truth["t1"])).seconds overlapped = False overlaps = defaultdict(list) #list of state index that overlap truth keyed on truth index segs = extract_segments(results) segs = score_segments(segs, truths, detected) return dict(segments=segs, frame_score=score_frames(segs), events=score_events(truths, detected, segs))
def convertToISO8601UTC (dateTimeArg): """This method assumes that the datetime is local naive time.""" arg = dateTimeArg if isinstance(arg, (types.StringTypes, unicode)) == True: try: arg = iso8601.parse_date(arg) except: pass if isinstance(arg, datetime) == True and arg.tzinfo is not None: dateUTC = arg - arg.utcoffset() dateUTC_noTZ = datetime(dateUTC.year, dateUTC.month, dateUTC.day, dateUTC.hour, dateUTC.minute, dateUTC.second, dateUTC.microsecond) return dateUTC_noTZ return dateTimeArg