Exemplo n.º 1
0
def parse_datetime(dtstr, default_timezone=TIMEZONE):
    """
    Parse an ISO8601 datetime string.

    The datetime may be delimited by ' ' or 'T'.
    If no timezone is included, the ``default_timezone`` will be applied

    :param dtstr: An ISO8601 formatted datetime string
    :param default_timezone: A default timezone to apply if missing.

    :rtype: datetime.datetime
    :return: A timezone-aware datetime objet.
    """
    # Allow use of space as separator
    try:
        if 'T' not in dtstr:
            date = aniso8601.parse_datetime(dtstr, delimiter=' ')
        else:
            date = aniso8601.parse_datetime(dtstr)
    except ValueError as e:
        # The aniso8601 errors are not always great
        raise ValueError("invalid iso8601 date (%s)" % (e,))

    if not date.tzinfo:
        # No timezone given, assume default_timezone
        date = apply_timezone(date, tz=default_timezone)
    return date
Exemplo n.º 2
0
    def get_latest_lease_maintenance_activity(self, cursor):
        """
        Get a description of the most recently completed lease maintenance
        activity.

        :return LeaseMaintenanceActivity|None: If any lease maintenance has
            completed, an object describing its results.  Otherwise, None.
        """
        cursor.execute(
            """
            SELECT [started], [count], [finished]
            FROM [lease-maintenance-spending]
            WHERE [finished] IS NOT NULL
            ORDER BY [finished] DESC
            LIMIT 1
            """, )
        activity = cursor.fetchall()
        if len(activity) == 0:
            return None
        [(started, count, finished)] = activity
        return LeaseMaintenanceActivity(
            parse_datetime(started, delimiter=u" "),
            count,
            parse_datetime(finished, delimiter=u" "),
        )
Exemplo n.º 3
0
def assertion_timestamp_checks(state, task_meta, **options):
    try:
        node_id = task_meta['node_id']
        assertion = get_node_by_id(state, node_id)
        issued_on = aniso8601.parse_datetime(assertion['issuedOn'])
    except (IndexError, KeyError, ValueError,):
        raise TaskPrerequisitesError(task_meta)

    now = datetime.now(utc)
    if issued_on > now:
        return task_result(
            False, "Assertion {} has issue date {} in the future.".format(node_id, issued_on))

    if assertion.get('expires'):
        expires = aniso8601.parse_datetime(assertion['expires'])
        if expires < issued_on:
            return task_result(
                False, "Assertion {} expiration is prior to issue date.".format(node_id))

        if expires < now :
            return task_result(
                False, "Assertion {} expired on {}".format(node_id, assertion['expires'])
            )

    return task_result(
        True, "Assertion {} was issued and has not expired.".format(node_id))
Exemplo n.º 4
0
def get_application_periods_by_periods(json_application_periods):
    result = []
    for app_periods in json_application_periods:
        period = (parse_datetime(app_periods['begin']).replace(tzinfo=None),
                  parse_datetime(app_periods['end']).replace(tzinfo=None))
        result.append(period)
    return result
Exemplo n.º 5
0
def build_api_model() -> Model:
    """
    Returns a Flask-RESTX Api Model based on the sample dict returned by the trained model wrapper.
    This will be used to validate input and automatically generate the Swagger prototype.
    """
    fields_classes_map: Dict = {
        "str": fields.String,
        "int": fields.Integer,
        "float": fields.Float,
        "bool": fields.Boolean,
        "datetime": fields.DateTime,
        "date": fields.Date,
    }
    model_dict: Dict = {}
    model_sample: Dict = trained_model_wrapper.sample()
    if model_sample:
        for key, value in model_sample.items():
            fields_class: fields.Raw = fields_classes_map.get(
                type(value).__name__, fields.String)
            if type(value).__name__ == "str":
                try:
                    parse_date(value)
                    fields_class = fields.Date
                except ValueError:
                    pass
                try:
                    parse_datetime(value)
                    fields_class = fields.DateTime
                except ValueError:
                    pass
            model_dict[key] = fields_class(example=value,
                                           readonly=True,
                                           required=True)
    return api.model("input_vector", model_dict)
Exemplo n.º 6
0
def parse_iec_xml(file_path):
    """Parses iec xml to Pandas DataFrame, meta on the same row wit valu and start/end time
    columns = ["position", "timestamp_start_utc", "timestamp_end_utc", "value", "business_type", "from_domain", "to_domain", "line"]"""

    tree = etree.parse(file_path)
    periods = tree.findall('.//{*}Period')

    data_list = []

    for period in periods:

        business_type = get_text(period, '../{*}businessType')
        from_domain = get_text(period, '../{*}in_Domain.mRID')
        to_domain = get_text(period, '../{*}out_Domain.mRID')
        line = get_text(period, '../{*}connectingLine_RegisteredResource.mRID')

        curve_type = get_text(period, '../{*}curveType')
        resolution = aniso8601.parse_duration(
            period.find('.//{*}resolution').text, relative=True)
        start_time = aniso8601.parse_datetime(period.find('.//{*}start').text)
        end_time = aniso8601.parse_datetime(period.find('.//{*}end').text)

        points = period.findall('.//{*}Point')

        for n, point in enumerate(points):
            position = int(eval(point.find("{*}position").text))
            value = float(eval(point.find("{*}quantity").text))
            timestamp_start = (start_time + resolution *
                               (position - 1)).replace(tzinfo=None)

            if curve_type == "A03":
                # This curvetype expect values to be valid until next change or until the end of period
                if n + 2 <= len(points):
                    next_position = int(
                        eval(points[n + 1].find("{*}position").text))
                    timestamp_end = (start_time + resolution *
                                     (next_position - 1)).replace(tzinfo=None)
                else:

                    timestamp_end = end_time.replace(tzinfo=None)

            else:
                # Else the value is on only valid during specified resolution
                timestamp_end = timestamp_start + resolution

            data_list.append((position, timestamp_start, timestamp_end, value,
                              business_type, from_domain, to_domain, line))

            #dataframe.ix[timestamp_start.replace(tzinfo=None), "DATA"] = value

    data_frame = pandas.DataFrame(data_list,
                                  columns=[
                                      "position", "timestamp_start_utc",
                                      "timestamp_end_utc", "value",
                                      "business_type", "from_domain",
                                      "to_domain", "line"
                                  ])

    #print  data_frame #DEBUG
    return data_frame
Exemplo n.º 7
0
    def post(self, id):
        update = request.get_json()
        if 'id' in update:
            return {'error': 'Updating id is not allowed'}, \
                HTTPStatus.BAD_REQUEST

        bookmark = db.Bookmark.query \
                              .filter_by(id=id, user=current_user.id) \
                              .first()
        if bookmark is None:
            return {'error': 'Not found'}, HTTPStatus.NOT_FOUND

        if 'url' in update:
            bookmark.url = urldefrag(update['url']).url
        if 'title' in update:
            bookmark.title = update['title']
        if 'timestamp' in update:
            bookmark.timestamp = aniso8601.parse_datetime(update['timestamp'])
        if 'read' in update:
            if update['read']:
                bookmark.read = aniso8601.parse_datetime(update['read'])
            else:
                bookmark.read = None
        if 'tags' in update:
            bookmark.tags = update['tags']

        db.db.session.add(bookmark)
        db.db.session.commit()

        return bookmark.to_dict(), HTTPStatus.OK
def test_pattern_with_one_period_in_result():
    start_date = parse_datetime("2015-02-01T06:52:00Z").replace(tzinfo=None)
    end_date = parse_datetime("2015-02-01T23:52:00Z").replace(tzinfo=None)
    weekly_pattern = "1111111"
    time_slots = [{"begin": "07:45", "end": "09:30"}]
    app_periods = get_application_periods_by_pattern(start_date, end_date, weekly_pattern, time_slots, 'Europe/Paris')
    eq_(len(app_periods), 1)
Exemplo n.º 9
0
Arquivo: utils.py Projeto: dvdn/Chaos
def get_application_periods_by_periods(json_application_periods):
    result = []
    for app_periods in json_application_periods:
        period = (
            parse_datetime(app_periods['begin']).replace(tzinfo=None),
            parse_datetime(app_periods['end']).replace(tzinfo=None)
        )
        result.append(period)
    return result
def test_pattern_with_one_period_in_result():
    start_date = parse_datetime("2015-02-01T06:52:00Z").replace(tzinfo=None)
    end_date = parse_datetime("2015-02-01T23:52:00Z").replace(tzinfo=None)
    weekly_pattern = "1111111"
    time_slots = [{"begin": "07:45", "end": "09:30"}]
    app_periods = get_application_periods_by_pattern(start_date, end_date,
                                                     weekly_pattern,
                                                     time_slots,
                                                     'Europe/Paris')
    eq_(len(app_periods), 1)
def test_pattern_midnight_change_one_day():
    date_format = "%Y-%m-%dT%H:%M:%SZ"
    start_date = parse_datetime("2015-09-21T00:00:00Z").replace(tzinfo=None)
    end_date = parse_datetime("2015-09-21T00:00:00Z").replace(tzinfo=None)
    weekly_pattern = "1111111"
    time_slots = [{"begin": "18:00", "end": "03:00"}]
    app_periods = get_application_periods_by_pattern(start_date, end_date, weekly_pattern, time_slots, 'Europe/Paris')
    eq_(len(app_periods), 1)
    eq_(app_periods[0][0].strftime(date_format), "2015-09-21T16:00:00Z")
    eq_(app_periods[0][1].strftime(date_format), "2015-09-22T01:00:00Z")
def test_pattern_midnight_change_to_winter_2_oclock():
    date_format = "%Y-%m-%dT%H:%M:%SZ"
    start_date = parse_datetime("2015-10-24T00:00:00Z").replace(tzinfo=None)
    end_date = parse_datetime("2015-10-24T00:00:00Z").replace(tzinfo=None)
    weekly_pattern = "1111111"
    time_slots = [{"begin": "22:00", "end": "02:00"}]
    app_periods = get_application_periods_by_pattern(start_date, end_date, weekly_pattern, time_slots, 'Europe/Paris')
    eq_(len(app_periods), 1)
    #Difference of one hour for begin and two hours for end between UTC and Europe/Paris
    eq_(app_periods[0][0].strftime(date_format), "2015-10-24T20:00:00Z")
    eq_(app_periods[0][1].strftime(date_format), "2015-10-25T01:00:00Z")
Exemplo n.º 13
0
def parse_date(dtstr):
    # Allow use of space as separator
    if 'T' not in dtstr:
        date = aniso8601.parse_datetime(dtstr, delimiter=' ')
    else:
        date = aniso8601.parse_datetime(dtstr)

    if not date.tzinfo:
        # No timezone, assume UTC?
        date = apply_timezone(date, tz=pytz.UTC)
    return date
Exemplo n.º 14
0
 def _validate_datetime(value):
     try:
         # aniso at least needs to think it can get a datetime from value
         aniso8601.parse_datetime(value)
     except Exception as e:
         return False
     # we also require tzinfo specification on our datetime strings
     # NOTE -- does not catch minus-sign (non-ascii char) tzinfo delimiter
     return (isinstance(value, six.string_types) and
             (value[-1:]=='Z' or
              bool(re.match(r'.*[+-](?:\d{4}|\d{2}|\d{2}:\d{2})$', value))))
def test_pattern_with_multi_periods_in_result():
    date_format = "%Y-%m-%dT%H:%M:%SZ"
    start_date = parse_datetime("2015-02-02T06:52:00Z").replace(tzinfo=None)
    end_date = parse_datetime("2015-02-15T23:52:00Z").replace(tzinfo=None)
    weekly_pattern = "1111111"
    time_slots = [{"begin": "07:45", "end": "09:30"}]
    app_periods = get_application_periods_by_pattern(start_date, end_date, weekly_pattern, time_slots, 'Europe/Paris')
    eq_(len(app_periods), 14)
    eq_(app_periods[0][0].strftime(date_format), "2015-02-02T06:45:00Z")
    eq_(app_periods[0][1].strftime(date_format), "2015-02-02T08:30:00Z")
    eq_(app_periods[13][0].strftime(date_format), "2015-02-15T06:45:00Z")
    eq_(app_periods[13][1].strftime(date_format), "2015-02-15T08:30:00Z")
Exemplo n.º 16
0
 def state_from_row(state, row):
     if state == u"pending":
         return Pending(counter=row[3])
     if state == u"double-spend":
         return DoubleSpend(parse_datetime(row[0], delimiter=u" "), )
     if state == u"redeemed":
         return Redeemed(
             parse_datetime(row[0], delimiter=u" "),
             row[1],
             row[2],
         )
     raise ValueError("Unknown voucher state {}".format(state))
Exemplo n.º 17
0
def push_deneigement_8hr():
    """
    Push messages to users when the snow removal period for their checkin location is exactly eight hours away
    """
    CONFIG = create_app().config
    db = PostgresWrapper(
        "host='{PG_HOST}' port={PG_PORT} dbname={PG_DATABASE} "
        "user={PG_USERNAME} password={PG_PASSWORD} ".format(**CONFIG))

    # grab the appropriate checkins to send pushes to by slot ID
    start = (datetime.datetime.utcnow().replace(tzinfo=pytz.utc).astimezone(
        pytz.timezone('US/Eastern')) + datetime.timedelta(hours=8))
    finish = start - datetime.timedelta(minutes=5)
    res = db.query("""
        SELECT DISTINCT x.start, u.lang, u.sns_id
        FROM temporary_restrictions x
        JOIN checkins c ON c.slot_id = ANY(x.slot_ids)
        JOIN users u ON c.user_id = u.id
        WHERE (x.meta = '2' OR x.meta = '3') AND x.active = true AND x.type = 'snow'
            AND x.start > '{}' AND x.start < '{}'
            AND c.active = true AND c.checkout_time IS NULL
            AND u.push_on_temp = true AND u.sns_id IS NOT NULL
            AND c.checkin_time > (NOW() - INTERVAL '14 DAYS')
    """.format(finish.strftime('%Y-%m-%d %H:%M:%S'),
               start.strftime('%Y-%m-%d %H:%M:%S')))

    # group device IDs by start time, then send messages
    lang_en, lang_fr = filter(lambda x: x[1] == 'en',
                              res), filter(lambda x: x[1] == 'fr', res)
    data = {
        "en": {x: []
               for x in set([z[0].isoformat() for z in lang_en])},
        "fr": {x: []
               for x in set([z[0].isoformat() for z in lang_fr])}
    }
    for x in lang_en:
        data["en"][x[0].isoformat()].append(x[2])
    for x in lang_fr:
        data["fr"][x[0].isoformat()].append(x[2])
    for x in data["en"].keys():
        dt = format_datetime(aniso8601.parse_datetime(x), u"h:mm a")
        notifications.schedule_notifications(
            data["en"][x],
            u"❄️ Attention, snow removal starts in 8 hours, at {}!".format(dt))
    for x in data["fr"].keys():
        dt = format_datetime(aniso8601.parse_datetime(x),
                             u"H'h'mm",
                             locale='fr_FR')
        notifications.schedule_notifications(
            data["fr"][x],
            u"❄️ Attention, le déneigement commence dans 8h, à {} !".format(
                dt))
Exemplo n.º 18
0
def push_deneigement_scheduled():
    """
    Push messages to users when snow removal is initially scheduled for their checkin location.
    """
    CONFIG = create_app().config
    db = PostgresWrapper(
        "host='{PG_HOST}' port={PG_PORT} dbname={PG_DATABASE} "
        "user={PG_USERNAME} password={PG_PASSWORD} ".format(**CONFIG))

    # grab the appropriate checkins to send pushes to by slot ID
    start = datetime.datetime.now()
    finish = start - datetime.timedelta(minutes=5)
    res = db.query("""
        SELECT DISTINCT x.start, u.lang, u.sns_id
        FROM temporary_restrictions x
        JOIN checkins c ON c.slot_id = ANY(x.slot_ids)
        JOIN users u ON c.user_id = u.id
        WHERE (x.meta = '2' OR x.meta = '3') AND x.active = true AND x.type = 'snow'
            AND x.modified > '{}' AND x.modified < '{}'
            AND c.active = true AND c.checkout_time IS NULL
            AND u.push_on_temp = true AND u.sns_id IS NOT NULL
            AND c.checkin_time > (NOW() - INTERVAL '14 DAYS')
    """.format(finish.strftime('%Y-%m-%d %H:%M:%S'),
               start.strftime('%Y-%m-%d %H:%M:%S')))

    # group device IDs by start time, then send messages
    lang_en, lang_fr = filter(lambda x: x[1] == 'en',
                              res), filter(lambda x: x[1] == 'fr', res)
    data = {
        "en": {x: []
               for x in set([z[0].isoformat() for z in lang_en])},
        "fr": {x: []
               for x in set([z[0].isoformat() for z in lang_fr])}
    }
    for x in lang_en:
        data["en"][x[0].isoformat()].append(x[2])
    for x in lang_fr:
        data["fr"][x[0].isoformat()].append(x[2])
    for x in data["en"].keys():
        dt = format_datetime(aniso8601.parse_datetime(x),
                             u"h:mm a 'on' EEEE d MMM")
        notifications.schedule_notifications(
            data["en"][x],
            "❄️ Snow removal scheduled! Move your car before {}".format(dt))
    for x in data["fr"].keys():
        dt = format_datetime(aniso8601.parse_datetime(x),
                             u"H'h'mm', 'EEEE 'le 'd MMM",
                             locale='fr_FR')
        notifications.schedule_notifications(
            data["fr"][x],
            "❄️ Déneigement annoncé ! Déplacez votre véhicule avant {}".format(
                dt))
Exemplo n.º 19
0
    def post(self):
        r = request.get_json()

        if not r:
            return {'error': 'payload is mandatory'}, HTTPStatus.BAD_REQUEST

        if isinstance(r, dict):
            r = [r]

        bookmarks = []
        now = datetime.datetime.utcnow().isoformat()

        for entity in r:
            if 'url' not in entity:
                return {'error': 'url field is mandatory'}, \
                    HTTPStatus.BAD_REQUEST

            url = urldefrag(entity['url']).url

            if entity.get('read'):
                read = aniso8601.parse_datetime(entity.get('read'))
            else:
                read = None

            if entity.get('title'):
                title = entity.get('title')
            else:
                title = entity.get('url')

            tags = entity.get('tags')

            notes = entity.get('notes')

            bookmark = db.Bookmark(
                user=current_user.id,
                url=url,
                title=title,
                timestamp=aniso8601.parse_datetime(
                    entity.get('timestamp', now)),
                read=read,
                tags=tags,
                notes=notes)

            db.db.session.add(bookmark)
            db.db.session.commit()

            bookmarks.append(bookmark)

            q.enqueue(article.fetch_article, bookmark.id, url)

        res = list(map(lambda x: x.to_dict(), bookmarks))
        return res[0] if len(res) == 1 else res, HTTPStatus.CREATED
Exemplo n.º 20
0
def parse_to_datetime(dtstr):
    """Converts a value from str to datetime if it matches a pattern

    The string must be on the formatted as a date or datetime. Does not
    localize the resulting datetime object.
    """
    try:
        if 'T' not in dtstr:
            return aniso8601.parse_datetime(dtstr, delimiter=' ')
        else:
            return aniso8601.parse_datetime(dtstr)
    except ValueError:
        return datetime.datetime.strptime(dtstr, '%Y-%m-%d')
def test_pattern_midnight_change_one_day_one_oclock():
    date_format = "%Y-%m-%dT%H:%M:%SZ"
    start_date = parse_datetime("2015-09-21T00:00:00Z").replace(tzinfo=None)
    end_date = parse_datetime("2015-09-21T00:00:00Z").replace(tzinfo=None)
    weekly_pattern = "1111111"
    time_slots = [{"begin": "18:00", "end": "01:00"}]
    app_periods = get_application_periods_by_pattern(start_date, end_date,
                                                     weekly_pattern,
                                                     time_slots,
                                                     'Europe/Paris')
    eq_(len(app_periods), 1)
    eq_(app_periods[0][0].strftime(date_format), "2015-09-21T16:00:00Z")
    eq_(app_periods[0][1].strftime(date_format), "2015-09-21T23:00:00Z")
Exemplo n.º 22
0
    def post(self, id):
        update = request.get_json()
        if 'id' in update:
            return {'error': 'Updating id is not allowed'}, \
                HTTPStatus.BAD_REQUEST

        bookmark = db.Bookmark.query \
                              .filter_by(id=id, user=current_user.id) \
                              .first()
        if bookmark is None:
            return {'error': 'Not found'}, HTTPStatus.NOT_FOUND

        if 'url' in update:
            bookmark.url = filter_url(update['url'])
        if 'title' in update:
            bookmark.title = update['title']
        if 'timestamp' in update:
            bookmark.timestamp = aniso8601.parse_datetime(update['timestamp'])
        if 'read' in update:
            if update['read']:
                bookmark.read = aniso8601.parse_datetime(update['read'])
            else:
                bookmark.read = None

        if 'meta' in update:
            bookmark.meta = update['meta'] if update['meta'] else None

        if 'tags' in update:
            bookmark.tags = update['tags']
        if 'parent' in update:
            parent_id = update['parent']
            if parent_id is not None:
                parent = db.Bookmark.query \
                                    .filter_by(id=parent_id,
                                               user=current_user.id) \
                                    .first()

                if parent is None:
                    return {'error': 'parent does not exist'}, \
                        HTTPStatus.BAD_REQUEST

                if is_child(bookmark, parent):
                    return {'error': 'bookmark loops are not allowed'}, \
                        HTTPStatus.BAD_REQUEST
            bookmark.parent_id = parent_id

        db.db.session.add(bookmark)
        db.db.session.commit()

        return bookmark.to_dict(), HTTPStatus.OK
def test_pattern_midnight_change_to_summer():
    date_format = "%Y-%m-%dT%H:%M:%SZ"
    start_date = parse_datetime("2016-03-26T00:00:00Z").replace(tzinfo=None)
    end_date = parse_datetime("2016-03-27T00:00:00Z").replace(tzinfo=None)
    weekly_pattern = "1111111"
    time_slots = [{"begin": "22:00", "end": "02:00"}]
    app_periods = get_application_periods_by_pattern(start_date, end_date, weekly_pattern, time_slots, 'Europe/Paris')
    eq_(len(app_periods), 2)
    #Difference of two hours between UTC and Europe/Paris
    eq_(app_periods[0][0].strftime(date_format), "2016-03-26T21:00:00Z")
    eq_(app_periods[0][1].strftime(date_format), "2016-03-27T01:00:00Z")
    #Difference of one hour between UTC and Europe/Paris
    eq_(app_periods[1][0].strftime(date_format), "2016-03-27T20:00:00Z")
    eq_(app_periods[1][1].strftime(date_format), "2016-03-28T00:00:00Z")
def test_pattern_midnight_change_to_winter_2_oclock():
    date_format = "%Y-%m-%dT%H:%M:%SZ"
    start_date = parse_datetime("2015-10-24T00:00:00Z").replace(tzinfo=None)
    end_date = parse_datetime("2015-10-24T00:00:00Z").replace(tzinfo=None)
    weekly_pattern = "1111111"
    time_slots = [{"begin": "22:00", "end": "02:00"}]
    app_periods = get_application_periods_by_pattern(start_date, end_date,
                                                     weekly_pattern,
                                                     time_slots,
                                                     'Europe/Paris')
    eq_(len(app_periods), 1)
    #Difference of one hour for begin and two hours for end between UTC and Europe/Paris
    eq_(app_periods[0][0].strftime(date_format), "2015-10-24T20:00:00Z")
    eq_(app_periods[0][1].strftime(date_format), "2015-10-25T01:00:00Z")
def test_solve_event():
    assert event_domain.solve_event(
        event_id='538745942',
        affectation=1,
        analyst_email='*****@*****.**',
        date=parse_datetime('2019-12-09T05:00:00.000Z'))
    event = event_domain.get_event('538745942')
    assert event['historic_state'][-1]['state'] == 'SOLVED'
    with pytest.raises(EventAlreadyClosed):
        assert event_domain.solve_event(
            event_id='538745942',
            affectation=1,
            analyst_email='*****@*****.**',
            date=parse_datetime('2019-12-09T05:00:00.000Z'))
Exemplo n.º 26
0
def test_get_utc_datetime_with_dst_transition_in_between_valid():
    with chaos.app.app_context():
        date_format = "%Y-%m-%dT%H:%M:%SZ"
        #Before DST of end March difference is one hour
        str_date_time = "2015-03-28T13:15:00Z"
        date_time = parse_datetime(str_date_time).replace(tzinfo=None)
        utc_date_time = chaos.utils.get_utc_datetime_by_zone(date_time, 'Europe/Paris')
        eq_(utc_date_time.strftime(date_format), "2015-03-28T12:15:00Z")
        #DST began on Sun 29-Mar-2015 at 02:00:00 A.M. when local clocks were set forward 1 hour
        #After DST of end March difference is two hours
        str_date_time = "2015-03-29T13:15:00Z"
        date_time = parse_datetime(str_date_time).replace(tzinfo=None)
        utc_date_time = chaos.utils.get_utc_datetime_by_zone(date_time, 'Europe/Paris')
        eq_(utc_date_time.strftime(date_format), "2015-03-29T11:15:00Z")
Exemplo n.º 27
0
 def get_shares(self, **kwargs):
     if kwargs.get("id"):
         data = self.conn._get("/shared_files/{0}".format(kwargs["id"]),
                               params=kwargs)
     else:
         data = self.conn._get("/shared_files", params=kwargs)
     if data.get("shared_file") and data["shared_file"]["expires_at"] \
        and data["shared_file"]["expires_at"] != 0:
         ex = data["shared_file"]["expires_at"]
         data["shared_file"]["expires_at"] = aniso8601.parse_datetime(ex)
     elif data.get("shared_files"):
         for x in data.get("shared_files"):
             if x["expires_at"] and x["expires_at"] != 0:
                 x["expires_at"] = aniso8601.parse_datetime(x["expires_at"])
     return data.get("shared_file") or data.get("shared_files")
def test_pattern_midnight_change_several_days_with_last_day_valid_():
    date_format = "%Y-%m-%dT%H:%M:%SZ"
    start_date = parse_datetime("2015-09-22T00:00:00Z").replace(tzinfo=None)
    end_date = parse_datetime("2015-09-24T00:00:00Z").replace(tzinfo=None)
    weekly_pattern = "1110111"
    time_slots = [{"begin": "18:00", "end": "03:00"}]
    app_periods = get_application_periods_by_pattern(start_date, end_date,
                                                     weekly_pattern,
                                                     time_slots,
                                                     'Europe/Paris')
    eq_(len(app_periods), 2)
    eq_(app_periods[0][0].strftime(date_format), "2015-09-22T16:00:00Z")
    eq_(app_periods[0][1].strftime(date_format), "2015-09-23T01:00:00Z")
    eq_(app_periods[1][0].strftime(date_format), "2015-09-23T16:00:00Z")
    eq_(app_periods[1][1].strftime(date_format), "2015-09-24T01:00:00Z")
def test_pattern_with_multi_periods_in_result():
    date_format = "%Y-%m-%dT%H:%M:%SZ"
    start_date = parse_datetime("2015-02-02T06:52:00Z").replace(tzinfo=None)
    end_date = parse_datetime("2015-02-15T23:52:00Z").replace(tzinfo=None)
    weekly_pattern = "1111111"
    time_slots = [{"begin": "07:45", "end": "09:30"}]
    app_periods = get_application_periods_by_pattern(start_date, end_date,
                                                     weekly_pattern,
                                                     time_slots,
                                                     'Europe/Paris')
    eq_(len(app_periods), 14)
    eq_(app_periods[0][0].strftime(date_format), "2015-02-02T06:45:00Z")
    eq_(app_periods[0][1].strftime(date_format), "2015-02-02T08:30:00Z")
    eq_(app_periods[13][0].strftime(date_format), "2015-02-15T06:45:00Z")
    eq_(app_periods[13][1].strftime(date_format), "2015-02-15T08:30:00Z")
Exemplo n.º 30
0
 def get_shares(self, **kwargs):
     if kwargs.get("id"):
         data = self.conn._get("/shared_files/{0}".format(kwargs["id"]),
                               params=kwargs)
     else:
         data = self.conn._get("/shared_files", params=kwargs)
     if data.get("shared_file") and data["shared_file"]["expires_at"] \
        and data["shared_file"]["expires_at"] != 0:
         ex = data["shared_file"]["expires_at"]
         data["shared_file"]["expires_at"] = aniso8601.parse_datetime(ex)
     elif data.get("shared_files"):
         for x in data.get("shared_files"):
             if x["expires_at"] and x["expires_at"] != 0:
                 x["expires_at"] = aniso8601.parse_datetime(x["expires_at"])
     return data.get("shared_file") or data.get("shared_files")
Exemplo n.º 31
0
    def deserialize(self, payload):
        value = payload.get(self.name, None)

        if value is None and self.required:
            raise ValueError('expected field %s to be present in payload' %
                             (self.name))
        elif value is not None and not isinstance(value, string_types):
            raise TypeError('expected field %s to be a string, but it is '
                            '%r: %r' % (self.name, type(value), value))

        if value is None:
            return None

        value = aniso8601.parse_datetime(value)

        if value.tzinfo is not None:
            utc_offset = value.tzinfo.utcoffset(value)
            dst = value.tzinfo.dst(value)

            if (utc_offset is not None and
                utc_offset != datetime.timedelta(seconds=0)) or \
                (dst is not None and
                 dst != datetime.timedelta(seconds=0)):
                raise ValueError('expected date/time to be UTC based')

            value = value.replace(tzinfo=None)

        return value
Exemplo n.º 32
0
def _make_datetime(value):
    """Helper function for `make_datetime()`.

    Tries to convert the given value to a
    :class:`datetime.datetime`. But, unlike make_datetime(), if no
    timezone is given, makes a naive `datetime.datetime`.

    Strings will be parsed as ISO 8601 timestamps.

    If a number is provided, it will be interpreted as a UNIX
    timestamp, which by definition is UTC.

    If a `dict` is provided, does `datetime.datetime(**value)`.

    If a `tuple` or a `list` is provided, does
    `datetime.datetime(*value)`. Uses the timezone in the tuple or
    list if provided.

    :param value: something to convert
    :type value: str | unicode | float | int | :class:`datetime.datetime` | dict | list | tuple
    :return: the value after conversion
    :rtype: :class:`datetime.datetime`
    :raises: ValueError | TypeError
    """

    if isinstance(value, str):
        try:
            return aniso8601.parse_datetime(value)
        except Exception as e:
            raise ValueError(
                f"Conversion to datetime.datetime failed. Could not "
                f"parse the given string as an ISO 8601 timestamp: "
                f"{repr(value)}\n\n"
                f"{repr(e)}")

    try:
        if isinstance(value, datetime.datetime):
            return value
        elif isinstance(value, dict):
            tzinfo = value.pop('tzinfo', None)
            if tzinfo:
                return tzinfo.localize(datetime.datetime(**value))
            else:
                return datetime.datetime(**value)
        # struct_time does not preserve millisecond accuracy per
        # TinCan spec, so this is disabled to discourage its use.
        # elif isinstance(value, struct_time):
        #     posix = mktime(value)
        #     return datetime.datetime.utcfromtimestamp(posix).replace(tzinfo=utc)
        elif isinstance(value, (tuple, list)):
            return tuple_to_datetime(value)
        else:
            return datetime.datetime.utcfromtimestamp(value).replace(
                tzinfo=utc)
    except Exception as e:
        msg = (
            f"Could not convert the given value of type '{value.__class__.__name__}' to a "
            f"datetime.datetime: {repr(value)}\n\n"
            f"{repr(e)}")
        raise TypeError(msg) if isinstance(e, TypeError) else ValueError(msg)
Exemplo n.º 33
0
    def deserialize(self, payload):
        value = payload.get(self.name, None)

        if value is None and self.required:
            raise ValueError('expected field %s to be present in payload' %
                             (self.name))
        elif value is not None and not isinstance(value, string_types):
            raise TypeError('expected field %s to be a string, but it is '
                            '%r: %r' % (self.name, type(value), value))

        if value is None:
            return None

        value = aniso8601.parse_datetime(value)

        if value.tzinfo is not None:
            utc_offset = value.tzinfo.utcoffset(value)
            dst = value.tzinfo.dst(value)

            if (utc_offset is not None and
                utc_offset != datetime.timedelta(seconds=0)) or \
                (dst is not None and
                 dst != datetime.timedelta(seconds=0)):
                raise ValueError('expected date/time to be UTC based')

            value = value.replace(tzinfo=None)

        return value
Exemplo n.º 34
0
 def test_update_with_ts(self):
     id = clientid
     self.session.get_client_by_id.return_value = deepcopy(retrieved_gk_client)
     self.session.insert_client = mock.MagicMock()
     attrs = {'created': '2000-01-01T00:00:00+01:00'}
     res = self.controller.update(id, attrs, retrieved_user, [])
     assert res['created'] == parse_datetime(date_created)
Exemplo n.º 35
0
def test_get_utc_datetime_valid():
    with chaos.app.app_context():
        date_format = "%Y-%m-%dT%H:%M:%SZ"
        str_date_time = "2015-04-10T13:15:00Z"
        date_time = parse_datetime(str_date_time).replace(tzinfo=None)
        utc_date_time = chaos.utils.get_utc_datetime_by_zone(date_time, 'Europe/Paris')
        eq_(utc_date_time.strftime(date_format), "2015-04-10T11:15:00Z")
Exemplo n.º 36
0
    def from_row(cls, row):
        def state_from_row(state, row):
            if state == u"pending":
                return Pending(counter=row[3])
            if state == u"double-spend":
                return DoubleSpend(parse_datetime(row[0], delimiter=u" "), )
            if state == u"redeemed":
                return Redeemed(
                    parse_datetime(row[0], delimiter=u" "),
                    row[1],
                    row[2],
                )
            raise ValueError("Unknown voucher state {}".format(state))

        number, created, expected_tokens, state = row[:4]

        return cls(
            number=number,
            expected_tokens=expected_tokens,
            # All Python datetime-based date/time libraries fail to handle
            # leap seconds.  This parse call might raise an exception of the
            # value represents a leap second.  However, since we also use
            # Python to generate the data in the first place, it should never
            # represent a leap second... I hope.
            created=parse_datetime(created, delimiter=u" "),
            state=state_from_row(state, row[4:]),
        )
def test_pattern_midnight_change_several_days_with_last_day_valid():
    date_format = "%Y-%m-%dT%H:%M:%SZ"
    start_date = parse_datetime("2015-09-21T00:00:00Z").replace(tzinfo=None)
    end_date = parse_datetime("2015-09-26T00:00:00Z").replace(tzinfo=None)
    weekly_pattern = "1101010"
    time_slots = [{"begin": "18:00", "end": "03:00"}]
    app_periods = get_application_periods_by_pattern(start_date, end_date, weekly_pattern, time_slots, 'Europe/Paris')
    eq_(len(app_periods), 4)
    eq_(app_periods[0][0].strftime(date_format), "2015-09-21T16:00:00Z")
    eq_(app_periods[0][1].strftime(date_format), "2015-09-22T01:00:00Z")
    eq_(app_periods[1][0].strftime(date_format), "2015-09-22T16:00:00Z")
    eq_(app_periods[1][1].strftime(date_format), "2015-09-23T01:00:00Z")
    eq_(app_periods[2][0].strftime(date_format), "2015-09-24T16:00:00Z")
    eq_(app_periods[2][1].strftime(date_format), "2015-09-25T01:00:00Z")
    eq_(app_periods[3][0].strftime(date_format), "2015-09-26T16:00:00Z")
    eq_(app_periods[3][1].strftime(date_format), "2015-09-27T01:00:00Z")
Exemplo n.º 38
0
def _parse_datetime(value):
    try:
        return aniso8601.parse_datetime(value)
    except ValueError:
        return aniso8601.parse_date(value)

    return None
Exemplo n.º 39
0
    def _get_passages(self, xml, route_point):
        ns = {'siri': 'http://www.siri.org.uk/siri'}
        try:
            root = et.fromstring(xml)
        except et.ParseError as e:
            logging.getLogger(__name__).exception("invalid xml")
            raise RealtimeProxyError('invalid xml')

        stop = route_point.fetch_stop_id(self.object_id_tag)
        line = route_point.fetch_line_id(self.object_id_tag)
        route = route_point.fetch_route_id(self.object_id_tag)
        next_passages = []
        for visit in root.findall('.//siri:MonitoredStopVisit', ns):
            cur_stop = visit.find('.//siri:StopPointRef', ns).text
            if stop != cur_stop:
                continue
            cur_line = visit.find('.//siri:LineRef', ns).text
            if line != cur_line:
                continue
            cur_route = visit.find('.//siri:DirectionName', ns).text
            if route != cur_route:
                continue
            cur_destination = visit.find('.//siri:DestinationName', ns).text
            cur_dt = visit.find('.//siri:ExpectedDepartureTime', ns).text
            cur_dt = aniso8601.parse_datetime(cur_dt)
            next_passages.append(RealTimePassage(cur_dt, cur_destination))

        return next_passages
Exemplo n.º 40
0
def recache_repo(cacheing_session, cache, name):
    api_r = cacheing_session.get("https://api.github.com/repos/{0}/{1}/commits".format(*name.split("/", 1)))
    try:
        raw = api_r.json()
    except ValueError:
        print("warning: GitHub API returned invalid JSON for {0}.".format(name))
        return []
    pool = set(a["sha1"] for a in cache if a["name"] == name)
    for co in raw[0:30]:
        if co["sha"] in pool:
            continue
        repo_baseurl = "https://github.com/{0}".format(name)
        DEFAULT_AUTHOR = {
            "html_url": "",
            "avatar_url": "assets/images/unknown_author.png",
            "login": co["commit"]["author"]["name"]
        }
        if not co["author"]:
            co["author"] = DEFAULT_AUTHOR
        if not co["committer"]:
            co["committer"] = DEFAULT_AUTHOR
        dt = aniso8601.parse_datetime(co["commit"]["author"]["date"]).replace(tzinfo=None)
        cache.append({
            "sha1": co["sha"],
            "author-page": co["author"]["html_url"],
            "avatar-url": co["author"]["avatar_url"],
            "author-name": co["author"]["login"],
            "github": repo_baseurl,
            "name": name,
            "author-date": co["commit"]["author"]["date"],
            "commit-message": co["commit"]["message"],
            "committer-is-author": co["author"]["login"] == co["committer"]["login"],
            "committer-name": co["committer"]["login"],
            "_nsam-sortkey": time.mktime(dt.timetuple()) if sys.version_info.major == 3 else (dt - datetime.datetime(1970, 1, 1)).total_seconds()
        })
Exemplo n.º 41
0
    def _get_passages(self, tree, ns, route_point):

        stop = route_point.fetch_stop_id(self.object_id_tag)
        line = route_point.fetch_line_id(self.object_id_tag)
        route = route_point.fetch_route_id(self.object_id_tag)
        next_passages = []
        for visit in tree.findall('.//siri:MonitoredStopVisit', ns):
            cur_stop = visit.find('.//siri:StopPointRef', ns).text
            if stop != cur_stop:
                continue
            cur_line = visit.find('.//siri:LineRef', ns).text
            if line != cur_line:
                continue
            cur_route = visit.find('.//siri:DirectionName', ns).text
            if route != cur_route:
                continue
            # TODO? we should ignore MonitoredCall with a DepartureStatus set to "Cancelled"
            cur_destination = visit.find('.//siri:DestinationName', ns).text
            cur_dt = visit.find('.//siri:ExpectedDepartureTime', ns).text
            # TODO? fallback on siri:AimedDepartureTime if there is no ExpectedDepartureTime
            # In that case we may want to set realtime to False
            cur_dt = aniso8601.parse_datetime(cur_dt)
            next_passages.append(RealTimePassage(cur_dt, cur_destination))

        return next_passages
Exemplo n.º 42
0
def test_timestamp_parsing():
    parser = LogParser()
    parser.read(StringIO(INITIAL_GAME))
    parser.flush()

    assert parser.games[0].packets[0].ts == time(2, 59, 14, 608862)

    # Test with an initial datetime
    parser2 = LogParser()
    parser2._current_date = datetime(2015, 1, 1)
    parser2.read(StringIO(INITIAL_GAME))
    parser2.flush()

    assert parser2.games[0].packets[0].ts == datetime(2015, 1, 1, 2, 59, 14,
                                                      608862)

    # Same test, with timezone
    parser2 = LogParser()
    parser2._current_date = parse_datetime("2015-01-01T02:58:00+0200")
    parser2.read(StringIO(INITIAL_GAME))
    parser2.flush()

    ts = parser2.games[0].packets[0].ts
    assert ts.year == 2015
    assert ts.hour == 2
    assert ts.second == 14
    assert ts.tzinfo
    assert ts.utcoffset() == timedelta(hours=2)
Exemplo n.º 43
0
def apply_edition_date(title, submitted_at):
    dt = aniso8601.parse_datetime(submitted_at)

    title['edition_date'] =  dt.strftime('%Y-%m-%d')
    title['last_application'] = submitted_at

    return title
Exemplo n.º 44
0
 def episodes(self, show_id):
     r = requests.get(self._url_episodes.format(show_id=show_id))
     if r.status_code != 200:
         return
     data = r.json()
     episodes = []
     i = 0
     for episode in data:
         if episode['season'] == 0:
             continue
         if episode['number'] == 0:
             continue
         i += 1
         if episode['airstamp']:
             episode['airstamp'] = aniso8601.parse_datetime(episode['airstamp'])
             if episode['airstamp'].tzinfo:
                 episode['airstamp'] = episode['airstamp'].astimezone(tz.tzutc())
         episodes.append({
             'number': i,
             'title': episode['name'],
             'season': episode['season'],
             'episode': episode['number'],
             'air_date': episode['airstamp'].date().isoformat() if episode['airstamp'] else None,
             'air_time': episode['airstamp'].time().isoformat() if episode['airstamp'] else None,
             'description': None if not episode['summary'] else {
                 'text': re.sub('<[^>]*>', '', episode['summary']),
                 'title': 'TVmaze',
                 'url': episode['url'],
             },
         })
     return episodes
Exemplo n.º 45
0
    def _alexa_request(self, verify=True):
        raw_body = flask_request.data
        alexa_request_payload = json.loads(raw_body)

        if verify:
            cert_url = flask_request.headers['Signaturecertchainurl']
            signature = flask_request.headers['Signature']

            # load certificate - this verifies a the certificate url and format under the hood
            cert = verifier.load_certificate(cert_url)
            # verify signature
            verifier.verify_signature(cert, signature, raw_body)
            # verify timestamp
            timestamp = aniso8601.parse_datetime(alexa_request_payload['request']['timestamp'])
            if not current_app.debug or self.ask_verify_timestamp_debug:
                verifier.verify_timestamp(timestamp)
            # verify application id
            try:
                application_id = alexa_request_payload['session']['application']['applicationId']
            except KeyError:
                application_id = alexa_request_payload['context'][
                    'System']['application']['applicationId']
            if self.ask_application_id is not None:
                verifier.verify_application_id(application_id, self.ask_application_id)

        return alexa_request_payload
Exemplo n.º 46
0
def get_filename_from_metadata(meta_data,
                               file_type="xml",
                               filename_mask=default_filename_mask):
    """Convert metadata to filename by using filename mask and file type"""
    # Separators
    file_type_separator = "."
    meta_separator = "_"
    entity_and_area_separator = "-"

    # Remove Model. form dictionary as python string format can't use . in variable name
    meta_data = {key.split(".")[1]: meta_data[key] for key in meta_data}

    # DateTime fields from text to DateTime
    DateTime_fields = ["scenarioTime", 'created']
    for field in DateTime_fields:
        meta_data[field] = aniso8601.parse_datetime(meta_data[field])

    # Integers to integers
    meta_data["version"] = int(meta_data["version"])

    # Add metadata to file name string
    file_name = filename_mask.format(**meta_data)

    # Add file type to file name string
    file_name = file_type_separator.join([file_name, file_type])

    return file_name
Exemplo n.º 47
0
def test_get_utc_datetime_with_dst_transition_in_between_valid():
    with chaos.app.app_context():
        date_format = "%Y-%m-%dT%H:%M:%SZ"
        #Before DST of end March difference is one hour
        str_date_time = "2015-03-28T13:15:00Z"
        date_time = parse_datetime(str_date_time).replace(tzinfo=None)
        utc_date_time = chaos.utils.get_utc_datetime_by_zone(
            date_time, 'Europe/Paris')
        eq_(utc_date_time.strftime(date_format), "2015-03-28T12:15:00Z")
        #DST began on Sun 29-Mar-2015 at 02:00:00 A.M. when local clocks were set forward 1 hour
        #After DST of end March difference is two hours
        str_date_time = "2015-03-29T13:15:00Z"
        date_time = parse_datetime(str_date_time).replace(tzinfo=None)
        utc_date_time = chaos.utils.get_utc_datetime_by_zone(
            date_time, 'Europe/Paris')
        eq_(utc_date_time.strftime(date_format), "2015-03-29T11:15:00Z")
Exemplo n.º 48
0
def test_timestamp_parsing():
	parser = LogParser()
	parser.read(StringIO(INITIAL_GAME))
	parser.flush()

	assert parser.games[0].packets[0].ts == time(2, 59, 14, 608862)

	# Test with an initial datetime
	parser2 = LogParser()
	parser2._current_date = datetime(2015, 1, 1)
	parser2.read(StringIO(INITIAL_GAME))
	parser2.flush()

	assert parser2.games[0].packets[0].ts == datetime(2015, 1, 1, 2, 59, 14, 608862)

	# Same test, with timezone
	parser2 = LogParser()
	parser2._current_date = parse_datetime("2015-01-01T02:58:00+0200")
	parser2.read(StringIO(INITIAL_GAME))
	parser2.flush()

	ts = parser2.games[0].packets[0].ts
	assert ts.year == 2015
	assert ts.hour == 2
	assert ts.second == 14
	assert ts.tzinfo
	assert ts.utcoffset() == timedelta(hours=2)
Exemplo n.º 49
0
 def __call__(self, item, field, value):
     if value:
         setattr(
             item,
             self.attribute,
             parse_datetime(value).replace(tzinfo=None)
         )
     else:
         setattr(item, self.attribute, None)
Exemplo n.º 50
0
 def _to_python(self):
     '''A :class:`datetime.datetime` object is returned.'''
     # don't parse data that is already native
     if isinstance(self.data, datetime.datetime):
         return self.data
     elif self.format is None:
         # parse as iso8601
         return aniso8601.parse_datetime(self.data)
     else:
         return datetime.datetime.strptime(self.data, self.format)
Exemplo n.º 51
0
def datetime_from_ISO_string(ISO_string):
    """
    An utility function to convert ISO 8601 formatted datetime string to pythonic datetime object.
    Input: The ISO formatted string 
    Output: The pythonic datetime object, None is there is some error in conversion.
    """
    try:
        return aniso8601.parse_datetime(ISO_string)
    except:
        return None
Exemplo n.º 52
0
    def get_authorities(self, **kwargs):
        """
        Get certificate authority metadata.

        :param str id: (optional) if provided, filter by this cert ID
        :returns: list of CertificateAuthority dicts
        """
        if kwargs.get("id"):
            data = self.conn._get("/authorities/{0}".format(kwargs["id"]),
                                  params=kwargs)
        else:
            data = self.conn._get("/authorities", params=kwargs)
        if data.get("authority"):
            ex = data["authority"]["expiry"]
            data["authority"]["expiry"] = aniso8601.parse_datetime(ex)
        elif data.get("authorities"):
            for x in data.get("authorities"):
                x["expiry"] = aniso8601.parse_datetime(x["expiry"])
        return data.get("authority") or data.get("authorities")
def test_validate_upload_date():
	"""
	Verifies the upload date / match start validation algorithm.
	The match start is never supposed to be any later than the upload date, so
	if it is, the match start is set to the upload date -- but the timezone
	remains untouched.
	"""

	from aniso8601 import parse_datetime
	from hsreplaynet.games.processing import get_valid_match_start

	values = ((
		# MS greater than UD, same timezone, expecting UD
		"2016-01-01T10:00:00Z",  # Match start
		"2016-01-01T01:01:01Z",  # Upload date
		"2016-01-01T01:01:01Z",  # Expected result
	), (
		# MS lesser than UD, expecting MS
		"2016-01-01T10:00:00+0200",
		"2016-01-01T10:00:00+0100",
		"2016-01-01T10:00:00+0200"
	), (
		# MS greater than UD, different timezone, expecting modified UD
		"2016-01-01T10:00:00+0300",
		"2016-01-01T10:00:00+0400",
		"2016-01-01T09:00:00+0300"
	), (
		# MS greater than UD, different timezone, expecting modified UD
		"2018-01-01T10:00:00-0500",
		"2016-01-01T10:00:00+0500",
		"2016-01-01T00:00:00-0500"
	))

	for match_start, upload_date, expected in values:
		match_start = parse_datetime(match_start)
		upload_date = parse_datetime(upload_date)
		expected = parse_datetime(expected)

		ret = get_valid_match_start(match_start, upload_date)
		# assert expected.tzinfo == match_start.tzinfo
		assert ret.tzinfo == match_start.tzinfo
		assert ret == expected
Exemplo n.º 54
0
def parse_datetime_iso8601(datetime):
    """Parse a string in ISO8601 format."""
    if not datetime:
        return None

    try:
        dt = parse_datetime(datetime)
    except ValueError:
        return None
    else:
        return dt
Exemplo n.º 55
0
def _parse_interval(value):
    """Do some nasty try/except voodoo to get some sort of datetime
    object(s) out of the string.
    """
    try:
        return sorted(aniso8601.parse_interval(value))
    except ValueError:
        try:
            return aniso8601.parse_datetime(value), None
        except ValueError:
            return aniso8601.parse_date(value), None
Exemplo n.º 56
0
    def clean(self, data):
        date_string = super(DateTimeField, self).clean(data)

        try:
            value = aniso8601.parse_datetime(date_string)
        except (ValueError, NotImplementedError):
            raise self.error('invalid_format')

        if self.timezone_required and value.tzinfo is None:
            raise self.error('no_timezone')

        return value
Exemplo n.º 57
0
def datetime_from_iso8601(datetime_str):
    """Turns an ISO8601 formatted date into a datetime object.

    Example::

        inputs.datetime_from_iso8601("2012-01-01T23:30:00+02:00")

    :param datetime_str: The ISO8601-complying string to transform
    :type datetime_str: str
    :return: A datetime
    """
    return aniso8601.parse_datetime(datetime_str)
Exemplo n.º 58
0
 def orders():
     orders = CoinbaseWallet.get_json_data('orders')['orders']
     orders_list = []
     for x in orders:
         x = x['order']
         coins = int(x['total_btc']['cents'])/100000000.0
         dt = parse_datetime(x['created_at']).utctimetuple()
         ds = strftime("%a, %d %b %Y", dt)
         ts = strftime("%H:%M:%S", dt)
         if x['status'] == 'completed':
             orders_list.append([x['id'], coins, ds, ts])
     return orders_list
Exemplo n.º 59
0
def parse_as_iso(string):
    try:
        return aniso8601.parse_datetime(string)
    except Exception:
        try:
            return aniso8601.parse_date(string)
        except Exception as err:
            printf(
                'Can\'t interpret "{}" as an ISO-8601 time: {}',
                string, err,
                file=sys.stderr,
            )
            sys.exit(1)
Exemplo n.º 60
0
    def get(self, **kwargs):
        """
        Get backup metadata.

        :param str id: (optional) if provided, filter by this backup ID
        :param str time: (optional) if provided w/ID, filter by backup time
        :returns: list of Backup dicts
        """
        if kwargs.get("id") and kwargs.get("time"):
            full_id = kwargs["id"] + "/" + kwargs["time"]
            data = self.conn._get("/backups/{0}".format(full_id))
        elif kwargs.get("id"):
            data = self.conn._get("/backups/{0}".format(kwargs["id"]))
        else:
            data = self.conn._get("/backups")
        if data.get("backup"):
            t = data["backup"]["time"]
            data["backup"]["time"] = aniso8601.parse_datetime(t)
        elif data.get("backups"):
            for x in data.get("backups"):
                x["time"] = aniso8601.parse_datetime(x["time"])
        return data.get("backup") or data.get("backups")