Пример #1
0
def validate(new, old, userctx, secobj):
    """
    Validate this flight document against the schema, then check that
    only managers are approving documents and approved documents are only
    edited by managers.
    """
    global schema
    if not schema:
        schema = read_json_schema("flight.json")
    validate_doc(new, schema)
    
    if '_admin' in userctx['roles']:
        return

    if new['approved'] and 'manager' not in userctx['roles']:
        raise UnauthorizedError("Only managers may approve documents.")

    if old and 'manager' not in userctx['roles']:
        raise UnauthorizedError("Only managers may edit documents.")

    start = rfc3339_to_timestamp(new['start'])
    end = rfc3339_to_timestamp(new['end'])
    launch = rfc3339_to_timestamp(new['launch']['time'])
    if start > end:
        raise ForbiddenError("Launch window may not end before it starts.")
    if end - start > 7 * 24 * 3600:
        raise ForbiddenError("Launch window may not be greater than one week"
                " (speak to an admin if you have a special requirement).")
    if not start <= launch < end:
        raise ForbiddenError("Launch time must be within launch window.")

    if 'payloads' in new:
        payloads = new['payloads']
        if len(payloads) != len(set(payloads)):
            raise ForbiddenError("Duplicate entries in payloads list")
Пример #2
0
    def __init__(self, event):
        try:
            self.location = event['location']
        except KeyError:
            self.location = None
        self.id = event['id']
        try:
            self.html_link = event['htmlLink']
        except KeyError:
            self.html_link = None


        self.end_timestamp = rfc.rfc3339_to_timestamp(event['end']['dateTime'])
        self.start_timestamp = rfc.rfc3339_to_timestamp(event['start']['dateTime'])
        self.start_datetime = datetime.datetime.fromtimestamp(self.start_timestamp)
        self.end_datetime = datetime.datetime.fromtimestamp(self.end_timestamp)
        self.start_year = self.start_datetime.year
        self.start_month=self.start_datetime.month
        self.start_day=self.start_datetime.day
        self.start_time=self._format_time(self.start_datetime)
        self.end_year=self.end_datetime.year
        self.end_month=self.end_datetime.month
        self.end_day=self.end_datetime.day
        self.end_time=self._format_time(self.end_datetime)
        self.summary = event['summary']

        try:
            self.description = event['description']
        except KeyError:
            self.description = None

        self.status = event['status']
Пример #3
0
    def _generate_query(query: dict):
        if query:
            match_dict = dict()
            date_dict = dict()

            new_query = {'bool': {}}

            for key, value in query.items():
                if key == 'dateFrom':
                    date_dict['gte'] = rfc3339_to_timestamp(value)
                elif key == 'dateTo':
                    date_dict['lte'] = rfc3339_to_timestamp(value)
                else:
                    match_dict[key] = value

            if match_dict:
                new_query['bool']['must'] = [{
                    'match': {
                        key: value
                    }
                } for key, value in match_dict.items()]

            if date_dict:
                new_query['bool']['filter'] = {'range': {'date': date_dict}}

            return new_query

        else:
            return {'match_all': {}}
async def test_get_non_existant(jp_fetch, labserverapp):
    id = "foo"

    r = await jp_fetch("lab", "api", "workspaces", id)
    validate_request(r)
    data = json.loads(r.body.decode())

    r2 = await jp_fetch("lab", "api", "workspaces", id, method="PUT", body=json.dumps(data))
    validate_request(r2)

    r3 = await jp_fetch("lab", "api", "workspaces", id)
    validate_request(r3)
    data = json.loads(r3.body.decode())
    first_metadata = data["metadata"]
    first_created = rfc3339_to_timestamp(first_metadata["created"])
    first_modified = rfc3339_to_timestamp(first_metadata["last_modified"])

    r4 = await jp_fetch("lab", "api", "workspaces", id, method="PUT", body=json.dumps(data))
    validate_request(r4)

    r5 = await jp_fetch("lab", "api", "workspaces", id)
    validate_request(r5)
    data = json.loads(r5.body.decode())
    second_metadata = data["metadata"]
    second_created = rfc3339_to_timestamp(second_metadata["created"])
    second_modified = rfc3339_to_timestamp(second_metadata["last_modified"])

    assert first_created <= second_created
    assert first_modified < second_modified
Пример #5
0
    def test_now(self):
        s = strict_rfc3339.now_to_rfc3339_utcoffset()
        assert s[-1] == "Z"
        assert len(s) == 20
        d = int(time.time()) - strict_rfc3339.rfc3339_to_timestamp(s)
        assert d == 0 or d == 1

        s = strict_rfc3339.now_to_rfc3339_utcoffset(False)
        assert abs(strict_rfc3339.rfc3339_to_timestamp(s) - time.time()) <= 0.1
    def test_now(self):
        s = strict_rfc3339.now_to_rfc3339_utcoffset()
        assert s[-1] == "Z"
        assert len(s) == 20
        d = int(time.time()) - strict_rfc3339.rfc3339_to_timestamp(s)
        assert d == 0 or d == 1

        s = strict_rfc3339.now_to_rfc3339_utcoffset(False)
        assert abs(strict_rfc3339.rfc3339_to_timestamp(s) - time.time()) <= 0.1
async def test_patch(jp_fetch, labserverapp):
    id = "@jupyterlab/shortcuts-extension:plugin"

    r = await jp_fetch("lab",
                       "api",
                       "settings",
                       id,
                       method="PUT",
                       body=json.dumps(dict(raw=json5.dumps(dict()))))
    validate_request(r)

    r = await jp_fetch(
        "lab",
        "api",
        "settings",
        id,
        method="GET",
    )
    validate_request(r)
    data = json.loads(r.body.decode())
    first_created = rfc3339_to_timestamp(data["created"])
    first_modified = rfc3339_to_timestamp(data["last_modified"])

    r = await jp_fetch("lab",
                       "api",
                       "settings",
                       id,
                       method="PUT",
                       body=json.dumps(dict(raw=json5.dumps(dict()))))
    validate_request(r)

    r = await jp_fetch(
        "lab",
        "api",
        "settings",
        id,
        method="GET",
    )
    validate_request(r)
    data = json.loads(r.body.decode())
    second_created = rfc3339_to_timestamp(data["created"])
    second_modified = rfc3339_to_timestamp(data["last_modified"])

    assert first_created <= second_created
    assert first_modified < second_modified

    r = await jp_fetch(
        "lab",
        "api",
        "settings/",
        method="GET",
    )
    validate_request(r)
    data = json.loads(r.body.decode())
    listing = data["settings"]
    list_data = [item for item in listing if item["id"] == id][0]
Пример #8
0
async def test_patch(jp_fetch, labserverapp):
    id = '@jupyterlab/shortcuts-extension:plugin'

    r = await jp_fetch('lab',
                       'api',
                       'settings',
                       id,
                       method='PUT',
                       body=json.dumps(dict(raw=json5.dumps(dict()))))
    validate_request(r)

    r = await jp_fetch(
        'lab',
        'api',
        'settings',
        id,
        method='GET',
    )
    validate_request(r)
    data = json.loads(r.body.decode())
    first_created = rfc3339_to_timestamp(data['created'])
    first_modified = rfc3339_to_timestamp(data['last_modified'])

    r = await jp_fetch('lab',
                       'api',
                       'settings',
                       id,
                       method='PUT',
                       body=json.dumps(dict(raw=json5.dumps(dict()))))
    validate_request(r)

    r = await jp_fetch(
        'lab',
        'api',
        'settings',
        id,
        method='GET',
    )
    validate_request(r)
    data = json.loads(r.body.decode())
    second_created = rfc3339_to_timestamp(data['created'])
    second_modified = rfc3339_to_timestamp(data['last_modified'])

    assert first_created <= second_created
    assert first_modified < second_modified

    r = await jp_fetch(
        'lab',
        'api',
        'settings/',
        method='GET',
    )
    validate_request(r)
    data = json.loads(r.body.decode())
    listing = data['settings']
    list_data = [item for item in listing if item['id'] == id][0]
    def test_now(self):
        s = strict_rfc3339.now_to_rfc3339_localoffset()
        w = strict_rfc3339.rfc3339_to_timestamp(s)
        assert s[-6:] == ["-05:00", "-04:00"][time.localtime(w).tm_isdst]

        d = int(time.time()) - w
        assert d == 0 or d == 1

        s = strict_rfc3339.now_to_rfc3339_localoffset(False)
        assert abs(strict_rfc3339.rfc3339_to_timestamp(s) - time.time()) <= 0.1
Пример #10
0
 def is_alive(self):
     current_epoch = strict_rfc3339.rfc3339_to_timestamp(
         self.get_current_utc())
     last_pet_epoch = strict_rfc3339.rfc3339_to_timestamp(
         self.get_last_pet())
     delta_t = current_epoch - last_pet_epoch
     if delta_t > self.timeout:
         return False
     else:
         return True
Пример #11
0
    def test_now(self):
        s = strict_rfc3339.now_to_rfc3339_localoffset()
        w = strict_rfc3339.rfc3339_to_timestamp(s)
        assert s[-6:] == ["-05:00", "-04:00"][time.localtime(w).tm_isdst]

        d = int(time.time()) - w
        assert d == 0 or d == 1

        s = strict_rfc3339.now_to_rfc3339_localoffset(False)
        assert abs(strict_rfc3339.rfc3339_to_timestamp(s) - time.time()) <= 0.1
Пример #12
0
async def test_get(fetch, labserverapp):
    id = 'foo'
    r = await fetch('lab', 'api', 'workspaces', id)
    data = json.loads(r.body.decode())
    metadata = data['metadata']
    assert metadata['id'] == id
    assert rfc3339_to_timestamp(metadata['created'])
    assert rfc3339_to_timestamp(metadata['last_modified'])

    r2 = await fetch('lab', 'api', 'workspaces', id)
    assert r2.code == 200
    data = json.loads(r.body.decode())
    assert data['metadata']['id'] == id
Пример #13
0
def end_start_including_payloads_map(doc):
    """
    View: ``flight/end_start_including_payloads``

    Emits::

        [end_time, start_time, flight_id, 0] -> [payload_configuration ids]
        [end_time, start_time, flight_id, 1] -> {linked payload_configuration doc 1}
        [end_time, start_time, flight_id, 1] -> {linked payload_configuration doc 2}
        ...

    Or, when a flight has no payloads::

        [end_time, start_time, flight_id, 0] -> null

    Times are all UNIX timestamps (and therefore in UTC).
    
    Sorts by flight window end time then start time.

    If the flight has payloads, emit it with the list of payloads, and emit
    a link for each payload so that they get included with include_docs. If a
    flight does not have payloads, it is emitted by itself.

    Only shows approved flights.

    Used by the parser to find active flights and get the configurations used
    to decode telemetry from them.

    May otherwise be used to find upcoming flights and their associated
    payloads, though typically the view ``launch_time_including_payloads``
    would be more useful as it sorts by launch time.

    Query using ``startkey=[current_timestamp]`` to get all flights whose
    windows have not yet ended. Use ``include_docs=true`` to have the linked
    payload_configuration documents fetched and returned as the ``"doc"`` key
    for that row, otherwise the row's value will just contain an object that
    holds the linked ID. See the 
    `CouchDB documentation <http://wiki.apache.org/couchdb/Introduction_to_CouchDB_views#Linked_documents>`_
    for details on linked documents.
    """
    if doc['type'] != "flight" or not doc['approved']:
        return
    flight_id = doc['_id']
    et = rfc3339_to_timestamp(doc['end'])
    st = rfc3339_to_timestamp(doc['start'])
    if 'payloads' in doc:
        yield (et, st, flight_id, 0), doc['payloads']
        for payload in doc['payloads']:
            yield (et, st, flight_id, 1), {'_id': payload}
    else:
        yield (et, st, flight_id, 0), None
Пример #14
0
def end_start_including_payloads_map(doc):
    """
    View: ``flight/end_start_including_payloads``

    Emits::

        [end_time, start_time, flight_id, 0] -> [payload_configuration ids]
        [end_time, start_time, flight_id, 1] -> {linked payload_configuration doc 1}
        [end_time, start_time, flight_id, 1] -> {linked payload_configuration doc 2}
        ...

    Or, when a flight has no payloads::

        [end_time, start_time, flight_id, 0] -> null

    Times are all UNIX timestamps (and therefore in UTC).
    
    Sorts by flight window end time then start time.

    If the flight has payloads, emit it with the list of payloads, and emit
    a link for each payload so that they get included with include_docs. If a
    flight does not have payloads, it is emitted by itself.

    Only shows approved flights.

    Used by the parser to find active flights and get the configurations used
    to decode telemetry from them.

    May otherwise be used to find upcoming flights and their associated
    payloads, though typically the view ``launch_time_including_payloads``
    would be more useful as it sorts by launch time.

    Query using ``startkey=[current_timestamp]`` to get all flights whose
    windows have not yet ended. Use ``include_docs=true`` to have the linked
    payload_configuration documents fetched and returned as the ``"doc"`` key
    for that row, otherwise the row's value will just contain an object that
    holds the linked ID. See the 
    `CouchDB documentation <http://wiki.apache.org/couchdb/Introduction_to_CouchDB_views#Linked_documents>`_
    for details on linked documents.
    """
    if doc['type'] != "flight" or not doc['approved']:
        return
    flight_id = doc['_id']
    et = rfc3339_to_timestamp(doc['end'])
    st = rfc3339_to_timestamp(doc['start'])
    if 'payloads' in doc:
        yield (et, st, flight_id, 0), doc['payloads']
        for payload in doc['payloads']:
            yield (et, st, flight_id, 1), {'_id': payload}
    else:
        yield (et, st, flight_id, 0), None
async def test_get(jp_fetch, labserverapp):
    id = "foo"
    r = await jp_fetch("lab", "api", "workspaces", id)
    validate_request(r)
    data = json.loads(r.body.decode())
    metadata = data["metadata"]
    assert metadata["id"] == id
    assert rfc3339_to_timestamp(metadata["created"])
    assert rfc3339_to_timestamp(metadata["last_modified"])

    r2 = await jp_fetch("lab", "api", "workspaces", id)
    validate_request(r2)
    data = json.loads(r.body.decode())
    assert data["metadata"]["id"] == id
Пример #16
0
def tx_ledger_time(txs, log_path):
    """Process load testing log and print time difference of transaction submission time to horizon and commit time to ledger, and horizon response time."""
    logging.debug('reading logs ...')

    tx_time_diffs = {}
    with open(log_path, 'r') as f:
        for i, raw_line in enumerate(f):
            if i % 100 == 0:
                logging.debug('processing line %d', i)

            json_line = json.loads(raw_line)

            # ignore lines missing tx hash (events we're not interested in)
            tx_hash = json_line.get('tx_hash')
            if not tx_hash:
                continue

            # get transaction hash from all submitted transactions
            if json_line.get('msg') == 'submitting transaction':
                # process only the first result of every transaction submit attempt, ignore retries
                if tx_hash not in tx_time_diffs:
                    tx_time_diffs[tx_hash] = {
                        'horizon_submit_timestamp':
                        strict_rfc3339.rfc3339_to_timestamp(
                            json_line['timestamp']),
                        # commit to ledger timestamp
                        'commit_to_ledger_timestamp':
                        strict_rfc3339.rfc3339_to_timestamp(
                            txs[tx_hash]['created_at'])
                    }
            elif json_line.get('transaction_status'):
                # process only the first result of every transaction submit attempt, ignore retries
                tx = tx_time_diffs[tx_hash]
                if 'status' not in tx:
                    tx.update({
                        'horizon_response_timestamp':
                        strict_rfc3339.rfc3339_to_timestamp(
                            json_line['timestamp']),
                        'status':
                        json_line['transaction_status'],
                    })

    w = csv.DictWriter(sys.stdout,
                       fieldnames=['tx_hash'] +
                       list(list(tx_time_diffs.values())[0].keys()))
    w.writeheader()
    for tx_hash, tx in tx_time_diffs.items():
        w.writerow({'tx_hash': tx_hash, **tx})
Пример #17
0
    def convert(
            self,
            value: ty.Any,
            path: Path,
            *args: ty.Any,
            entity: ty.Optional[ConvertibleEntity] = None,
            **context: ty.Any
    ) -> ty.Optional[ty.Union[str, datetime.datetime]]:
        if isinstance(
                value, datetime.datetime
        ) and entity == ConvertibleEntity.RESPONSE and value is not None:
            value = rfc3339.rfc3339(value)

        result = self.subtype.convert(value,
                                      path,
                                      *args,
                                      entity=entity,
                                      **context)

        if entity == ConvertibleEntity.RESPONSE:
            return result

        if result is None:
            return None

        try:
            return datetime.datetime.fromtimestamp(rfc3339_to_timestamp(value))
        except InvalidRFC3339Error:
            raise SchemaError(Error(path, self.messages['format']))
Пример #18
0
    def _assert_init(self, kwargs):
        """Run __init__ assertions.

        Args:
            kwargs (dict): keyword arguments for the Token initializer.

        Raises:
            AssertionError: Token attributes is not set as expected.
        """

        token = Token(**kwargs)

        if 'bucket' in kwargs:
            assert token.bucket_id == kwargs['bucket']
        else:
            assert token.bucket_id is None

        if 'expires' in kwargs:
            assert token.expires == datetime.fromtimestamp(
                strict_rfc3339.rfc3339_to_timestamp(kwargs['expires']))
        else:
            assert token.expires is None

        if 'operation' in kwargs:
            assert token.operation == kwargs['operation']
        else:
            assert token.operation is None

        if 'token' in kwargs:
            assert token.id == kwargs['token']
        else:
            assert token.id is None
def from_rfc3339(rfc3339_text, with_nanos=False):
    """Parse a RFC 3339 date string format to datetime.date.

    Example of accepted format: '1972-01-01T10:00:20.021-05:00'

    - By default, the result is a datetime.datetime
    - If with_nanos is true, the result is a 2-tuple, (datetime.datetime,
    nanos), where the second field represents the possible nanosecond
    resolution component of the second field.

    Args:
      rfc3339_text (string): An rfc3339 formatted date string
      with_nanos (bool): Determines if nanoseconds should be parsed from the
        string

    Raises:
      ValueError: if ``rfc3339_text`` is invalid

    Returns:
      :class:`datetime.datetime`: when with_nanos is False
      tuple(:class:`datetime.datetime`, int): when with_nanos is True

    """
    timestamp = strict_rfc3339.rfc3339_to_timestamp(rfc3339_text)
    result = datetime.datetime.utcfromtimestamp(timestamp)
    if with_nanos:
        return (result, int((timestamp - int(timestamp)) * 1e9))
    else:
        return result
Пример #20
0
def _parse_time(input):
    """
    :param input: Either a number as milliseconds since Unix Epoch, or a string as a valid RFC3339 timestamp
    :return: milliseconds since Unix epoch, or None if input was invalid.
    """

    # bool is a subtype of int, and we don't want to try and compare it as a time.
    if isinstance(input, bool):
        log.warn("Got unexpected bool type when attempting to parse time")
        return None

    if isinstance(input, Number):
        return float(input)

    if isinstance(input, six.string_types):
        try:
            timestamp = strict_rfc3339.rfc3339_to_timestamp(input)
            return timestamp * 1000.0
        except Exception as e:
            log.warn("Couldn't parse timestamp:" + str(input) +
                     " with error: " + str(e))
            return None

    log.warn("Got unexpected type: " + type(input) + " with value: " +
             str(input) + " when attempting to parse time")
    return None
Пример #21
0
def export(update, ctx):
    gettime = str(update.message.date).split()
    chatid = update.message.chat_id
    timetoconvert = gettime[0] + "T" + gettime[1]
    timestamp = strict_rfc3339.rfc3339_to_timestamp(timetoconvert)

    if timestart < int(timestamp):
        user = update.message.from_user
        if update.message.chat.type == "private":
            if checkRus(chatid):
                #print("123")
                ctx.bot.send_message(
                    chat_id=update.message.chat_id,
                    text=
                    f"Вы экспортировали секретный ключ: <code>{db.getWIF(user['id'])}</code>. <b>Important:</b> Do not share this key. If you do share this key, all your YENTEN will be lost.",
                    parse_mode="HTML")
            else:
                ctx.bot.send_message(
                    chat_id=update.message.chat_id,
                    text=
                    f"You're exported secret key: <code>{db.getWIF(user['id'])}</code>. <b>Important:</b> Do not share this key. If you do share this key, all your YENTEN will be lost.",
                    parse_mode="HTML")
        else:
            if checkRus(chatid):
                ctx.bot.send_message(
                    chat_id=update.message.chat_id,
                    text="Эта команда работает только в личных сообщениях."
                    " Вместо этого отправьте мне личное сообщение :D")
            else:
                ctx.bot.send_message(
                    chat_id=update.message.chat_id,
                    text="This command only works in private messages."
                    " Send me a private message instead :D")
Пример #22
0
    def _assert_init(self, kwargs):
        """Run __init__ assertions.

        Args:
            kwargs (dict): keyword arguments for the Token initializer.

        Raises:
            AssertionError: Token attributes is not set as expected.
        """

        token = Token(**kwargs)

        if 'bucket' in kwargs:
            assert token.bucket_id == kwargs['bucket']
        else:
            assert token.bucket_id is None

        if 'expires' in kwargs:
            assert token.expires == datetime.fromtimestamp(
                strict_rfc3339.rfc3339_to_timestamp(
                    kwargs['expires']))
        else:
            assert token.expires is None

        if 'operation' in kwargs:
            assert token.operation == kwargs['operation']
        else:
            assert token.operation is None

        if 'token' in kwargs:
            assert token.id == kwargs['token']
        else:
            assert token.id is None
Пример #23
0
def optimize_timestamp(value: str) -> int:
    """ Encode timestamp into bytes.

    :param value: RFC3339 time string
    """
    assert isinstance(value, str)
    return int(strict_rfc3339.rfc3339_to_timestamp(value))
Пример #24
0
    def metadataSearch(self, searchType, searchEmail, searchFile,
                       searchGivenName, filterType, mimeType, startDate,
                       endDate):
        """ Search through metadata """

        searchItem = list()

        startDateTs = float(getTimestamp(startDate))
        endDateTs = float(getTimestamp(endDate))

        for i in self.metadata:
            creationTs = strict_rfc3339.rfc3339_to_timestamp(i['createdDate'])

            #check temporal period
            if creationTs >= startDateTs and creationTs <= endDateTs:
                prunedRes = self.metadataSearchType(i, searchType, searchEmail,
                                                    searchFile,
                                                    searchGivenName)

                if prunedRes != None:
                    #now apply filters
                    filteredRes = self.metadataSearchFilters(
                        filterType, prunedRes, mimeType)

                    if filteredRes != None:
                        searchItem.append(filteredRes)
        return searchItem
Пример #25
0
    def lastSeen(self, value):

        if value is not None:
            self._last_seen = datetime.fromtimestamp(
                strict_rfc3339.rfc3339_to_timestamp(value))
        else:
            self._last_seen = None
Пример #26
0
def setLang(update, ctx):
    gettime = str(update.message.date).split()
    timetoconvert = gettime[0] + "T" + gettime[1]
    timestamp = strict_rfc3339.rfc3339_to_timestamp(timetoconvert)

    if timestart < int(timestamp):
        user = update.message.from_user
        args = update.message.text.split(" ")
        language = getLang(update.message.chat_id)

        if update.message.chat.type == "private":
            if args[1] in ["en", "zh", "id", "ru"]:
                if args[1] == db.getLang(user["id"]):
                    userlang = db.getLang(user["id"])
                    ctx.bot.send_message(
                        chat_id=update.message.chat_id,
                        text=lang[userlang]['setlang']['same-lang'])
                else:
                    db.setLang(user["id"], args[1])
                    userlang = db.getLang(user["id"])
                    ctx.bot.send_message(
                        chat_id=update.message.chat_id,
                        text=lang[userlang]['setlang']['set-lang'])
            else:
                userlang = db.getLang(user["id"])
                ctx.bot.send_message(
                    chat_id=update.message.chat_id,
                    text=lang[userlang]['setlang']['invalid-lang'])
        else:
            ctx.bot.send_message(
                chat_id=update.message.chat_id,
                text=lang[language]['error']['general']['dm-only'])
Пример #27
0
    def __init__(self,
                 id=None,
                 name=None,
                 status=None,
                 user=None,
                 created=None,
                 storage=None,
                 transfer=None,
                 pubkeys=None):
        self.id = id
        self.name = name
        self.status = status
        self.user = user
        self.storage = storage
        self.transfer = transfer
        self.pubkeys = pubkeys

        # self.files = FileManager(bucket_id=self.id)
        # self.pubkeys = BucketKeyManager(
        #     bucket=self, authorized_public_keys=self.pubkeys)
        # self.tokens = TokenManager(bucket_id=self.id)

        if created is not None:
            self.created = datetime.fromtimestamp(
                strict_rfc3339.rfc3339_to_timestamp(created))
        else:
            self.created = None
Пример #28
0
def ruter(message, name=None, transporttype=None):
    ret = ''
    if name is None:
        # 3010370 is for Forskningsparken T-Bane
        stops = get_stations('Forskningsparken')
    else:
        stops = get_stations(name)

    for stop in stops:
        if stop['PlaceType'] in 'Stop':
            departures = get_departures(stop['ID'],
                                        transporttypes=transporttype)[:5]
            if len(departures) is not 0:
                ret = ret + stop['Name'] + ':\n'
                for departure in departures:
                    mvc = departure['MonitoredVehicleJourney']
                    destination = mvc['DestinationName']
                    line = mvc['PublishedLineName']
                    timestamp = mvc['MonitoredCall']['ExpectedDepartureTime']
                    time = datetime.datetime.fromtimestamp(
                        strict_rfc3339.rfc3339_to_timestamp(timestamp),
                        tz=oslo)
                    ret += ('%s %s:  %s\n' %
                            (line, destination, pretty_time(time)))

    message.reply(ret)
Пример #29
0
def balance(update, ctx):
    gettime = str(update.message.date).split()
    timetoconvert = gettime[0] + "T" + gettime[1]
    timestamp = strict_rfc3339.rfc3339_to_timestamp(timetoconvert)

    if timestart < int(timestamp):

        user = update.message.from_user
        userlang = db.getLang(user['id'])

        if update.message.chat.type == "private":

            if not db.checkUser(user["id"]):
                ctx.bot.send_message(
                    chat_id=update.message.chat_id,
                    text=lang[userlang]['error']['not-registered'])
            else:
                balance = getBalance(user["id"])

                ctx.bot.send_message(
                    chat_id=update.message.chat_id,
                    text=
                    f"{lang[userlang]['balance']['part-1']} {balance} {config.coin['ticker']}"
                )
        else:
            ctx.bot.send_message(
                chat_id=update.message.chat_id,
                text=lang[userlang]['error']['general']['dm-only'])
Пример #30
0
def deposit(update, ctx):
    gettime = str(update.message.date).split()
    timetoconvert = gettime[0] + "T" + gettime[1]
    timestamp = strict_rfc3339.rfc3339_to_timestamp(timetoconvert)

    if timestart < int(timestamp):

        user = update.message.from_user
        userlang = db.getLang(user['id'])

        if update.message.chat.type == "private":

            if not db.checkUser(user["id"]):
                ctx.bot.send_message(
                    chat_id=update.message.chat_id,
                    text=lang[userlang]['error']['not-registered'])
            else:

                address = getAddress(user["id"])

                ctx.bot.send_message(
                    chat_id=update.message.chat_id,
                    text=
                    f"{lang[userlang]['deposit']['part-1']} <code>{address}</code>",
                    parse_mode="HTML")

        else:
            ctx.bot.send_message(
                chat_id=update.message.chat_id,
                text=lang[userlang]['error']['general']['dm-only'])
Пример #31
0
def info(update, ctx):
    gettime = str(update.message.date).split()
    timetoconvert = gettime[0] + "T" + gettime[1]
    timestamp = strict_rfc3339.rfc3339_to_timestamp(timetoconvert)

    if timestart < int(timestamp):

        price = requests.get(
            f"https://api.coingecko.com/api/v3/simple/price?ids={config.coin['coin_name']}&vs_currencies=usd,btc"
        ).json()
        info = requests.get(f"{config.apiUrl}/info").json()

        btc = str(format(price["sugarchain"]["btc"], '.8f'))
        usd = str(price["sugarchain"]["usd"])

        blocks = str(info['result']['blocks'])
        hash = formathash(int(info['result']['nethash']))
        diff = str(info['result']['difficulty'])
        supply = str(format(convertToSugar(info['result']['supply']), '.8f'))

        ctx.bot.send_message(chat_id=update.message.chat_id,
                             text=f"""
Current block height: <code>{blocks}</code>
Current network hashrate: <code>{hash}</code>
Current network difficulty: <code>{diff}</code>
Current circulating supply: <code>{supply}</code> SUGAR
Current {config.coin['ticker']}/BTC price: {btc} BTC
Current {config.coin['ticker']}/USD price: ${usd}
""",
                             parse_mode="HTML")
Пример #32
0
def tx_submit_rate():
    """Process load testing log from stdin and print transaction submit rate in CSV format."""
    buckets = collections.OrderedDict()

    for c, raw_line in enumerate(fileinput.input()):
        if c % 100 == 0:
            logging.debug('processing line %d', c)

        json_line = json.loads(raw_line)
        if json_line.get('msg') == 'submitting transaction':
            unix_ts = str(
                int(strict_rfc3339.rfc3339_to_timestamp(
                    json_line['timestamp'])))
            if unix_ts not in buckets:
                buckets[unix_ts] = 1
            else:
                buckets[unix_ts] += 1

    rate_count = collections.defaultdict(lambda: 0)
    for rate in buckets.values():
        rate_count[rate] += 1

    w = csv.writer(sys.stdout)
    w.writerow(['txs per second (rate, 1s)', 'count'])

    for b in rate_count.items():
        w.writerow(b)
Пример #33
0
    def post_handler(self, data: dict, schema_order: list):
        response = make_response(
            request.headers, HTTPStatus.INTERNAL_SERVER_ERROR,
            json.dumps(http_message(HTTPStatus.INTERNAL_SERVER_ERROR)))
        retries = 0

        while retries <= 3:
            try:
                body = EventHandler._order_data(data, schema_order)
                body['date'] = rfc3339_to_timestamp(body['date'])
                event_id = uuid1()
                self.es.create(index=self.index, id=event_id, body=body)
                body = self._generate_dict_body(body, event_id.int)
                response = make_response(request.headers, HTTPStatus.CREATED,
                                         json.dumps(body))
                response.headers.add("Location",
                                     body["_links"]["self"]["href"])
                break
            except ConflictError:  # it's pretty impossible an uuid collision, but who knows...
                retries += 1
                continue
            except Exception:  # here is if something really bad happens, so let's exit and thow a 500 error
                break

        return response
Пример #34
0
    def object(cls, *args):

        import strict_rfc3339

        return datetime.datetime.utcfromtimestamp(
            strict_rfc3339.rfc3339_to_timestamp(*args)
        )
Пример #35
0
def from_rfc3339(rfc3339_text, with_nanos=False):
    """Parse a RFC 3339 date string format to datetime.date.

    Example of accepted format: '1972-01-01T10:00:20.021-05:00'

    - By default, the result is a datetime.datetime
    - If with_nanos is true, the result is a 2-tuple, (datetime.datetime,
    nanos), where the second field represents the possible nanosecond
    resolution component of the second field.

    Args:
      rfc3339_text (string): An rfc3339 formatted date string
      with_nanos (bool): Determines if nanoseconds should be parsed from the
        string

    Raises:
      ValueError: if ``rfc3339_text`` is invalid

    Returns:
      :class:`datetime.datetime`: when with_nanos is False
      tuple(:class:`datetime.datetime`, int): when with_nanos is True

    """
    timestamp = strict_rfc3339.rfc3339_to_timestamp(rfc3339_text)
    result = datetime.datetime.utcfromtimestamp(timestamp)
    if with_nanos:
        return (result, int((timestamp - int(timestamp)) * 1e9))
    else:
        return result
Пример #36
0
    def execute(cls, stack: MichelsonStack, stdout: List[str],
                context: AbstractContext):
        res_type: MichelsonType
        literal: Type[MichelineLiteral]
        res_type, literal = cls.args  # type: ignore

        if res_type.prim == 'AMOUNT':
            context.amount = literal.get_int()  # type: ignore
        elif res_type.prim == 'BALANCE':
            context.balance = literal.get_int()  # type: ignore
        elif res_type.prim == 'CHAIN_ID':
            context.chain_id = literal.get_string()  # type: ignore
        elif res_type.prim == 'SENDER':
            context.sender = literal.get_string()  # type: ignore
        elif res_type.prim == 'SOURCE':
            context.source = literal.get_string()  # type: ignore
        elif res_type.prim == 'NOW':
            try:
                context.now = literal.get_int()  # type: ignore
            # FIXME: Why does TypeError appear to be wrapped?
            except (TypeError, MichelsonRuntimeError):
                context.now = int(
                    strict_rfc3339.rfc3339_to_timestamp(
                        literal.get_string()))  # type: ignore
        else:
            raise ValueError(
                f'Expected one of {cls.allowed_primitives}, got {res_type.prim}'
            )
        return cls()
Пример #37
0
 def process(self, element):
     logging.debug('AddTimestampToDict: %s %r' % (type(element), element))
     return [
         beam.window.TimestampedValue(
             element,
             strict_rfc3339.rfc3339_to_timestamp(element['timestamp']))
     ]
Пример #38
0
    def lastSeen(self, value):

        if value is not None:
            self._last_seen = datetime.fromtimestamp(
                strict_rfc3339.rfc3339_to_timestamp(value))
        else:
            self._last_seen = None
Пример #39
0
def parse_date_time(date_string, date_time_string):
    if date_string:
        date_time = datetime.datetime.strptime(date_string, "%Y-%m-%d")
    else:
        timestamp = strict_rfc3339.rfc3339_to_timestamp(date_time_string)
        date_time = datetime.datetime.utcfromtimestamp(timestamp)
    
    return date_time
Пример #40
0
 def make_clean_google_hours_dictionary(self, data):
     try:
         if data['items']:
             hours = []
             for i in data['items']:
                 hours.append(self._make_hour_obj(
                                 self._safe_grab(i, 'summary'),
                                 self._safe_grab(i, 'description'),
                                 True,
                                 strict_rfc3339.rfc3339_to_timestamp(i['start']['dateTime']),
                                 strict_rfc3339.rfc3339_to_timestamp(i['end']['dateTime']),
                                 self._safe_grab(i, 'timeZone'),
                             ))
             return hours
     except Exception as e: 
         #print "ERROR>>>>>>>> " + str(e)
         pass
     return [self._make_hour_obj('','nothing to show',False,'','','')]
Пример #41
0
    def __init__(self, id=None, created=None, shards=None):
        self.id = id

        if created is not None:
            self.created = datetime.fromtimestamp(
                strict_rfc3339.rfc3339_to_timestamp(created))
        else:
            self.created = None

        if shards is None:
            self.shards = []
        else:
            self.shards = shards
Пример #42
0
def _estimate_time_received(receivers):
    sum_x, sum_x2, n = 0, 0, 0

    for callsign in receivers:
        x = rfc3339_to_timestamp(receivers[callsign]['time_created'])
        sum_x += x
        sum_x2 += x * x
        n += 1

    mean = sum_x / n
    std_dev = math.sqrt((sum_x2 / n) - (mean * mean))

    new_sum_x, new_n = 0, 0

    for callsign in receivers:
        x = rfc3339_to_timestamp(receivers[callsign]['time_created'])
        if abs(x - mean) > std_dev:
            continue
        new_sum_x += x
        new_n += 1

    return new_sum_x / new_n if new_n != 0 else mean
Пример #43
0
    def test_init(self):
        """Test Frame.__init__()."""
        kwargs = dict(
            created='2016-10-13T04:23:48.183Z',
            id='510b23e9f63a77d939a72a77',
            shards=[])

        frame = Frame(**kwargs)

        assert frame.created == datetime.fromtimestamp(
            strict_rfc3339.rfc3339_to_timestamp(
                '2016-10-13T04:23:48.183Z'))
        assert frame.id == '510b23e9f63a77d939a72a77'
        assert frame.shards == []
Пример #44
0
def callsign_time_created_map(doc):
    """
    View: ``listener_information/callsign_time_created``

    Emits::

        [callsign, time_created] -> null

    Times are UNIX timestamps (and therefore in UTC).
    
    Sorts by callsign. Useful to see a certain callsign's latest information.
    """
    if doc['type'] == "listener_information":
        tc = rfc3339_to_timestamp(doc['time_created'])
        yield (doc['data']['callsign'], tc), None
Пример #45
0
def time_created_callsign_map(doc):
    """
    View: ``listener_information/time_created_callsign``

    Emits::

        [time_created, callsign] -> null

    Times are UNIX timestamps (and therefore in UTC).

    Sorts by time created. Useful to see the latest listener information.
    """
    if doc['type'] == "listener_information":
        tc = rfc3339_to_timestamp(doc['time_created'])
        yield (tc, doc['data']['callsign']), None
Пример #46
0
    def __init__(
            self, token=None, bucket=None, operation=None, expires=None,
            encryptionKey=None
    ):
        self.id = token
        self.bucket = Bucket(id=bucket)
        self.operation = operation

        if expires is not None:
            self.expires = datetime.fromtimestamp(
                strict_rfc3339.rfc3339_to_timestamp(expires))
        else:
            self.expires = None

        self.encryptionKey = encryptionKey
def launch_time_map(doc):
    """
    View: ``payload_telemetry_stats/launch_time```

    Emits::

        launch_time -> {flight}

    for every approved flight, where {flight} is a flight-like doc
    containing only flight['name'] and flight['launch']['time']
    """

    if doc['type'] == 'flight' and doc['approved']:
        t = rfc3339_to_timestamp(doc['launch']['time'])
        d = {"name": doc['name'], "launch": {"time": doc['launch']['time']}}
        yield t, d
Пример #48
0
    def process_changes(self, fcb_list, last_update):
        self.logger.info("id:%d Processing changes..." % self.ident)
        last_update = strict_rfc3339.rfc3339_to_timestamp(last_update)

        for element in fcb_list:
            element = file_object.FileObject(element)

            # Handle new files TODO Handle new files added to the 
            creation_time = element.get_ctime()
            if creation_time > last_update:
                # Handle new file.
                self.logger.info("New file found: %s" % element.get_name())
                self.file_tree_navigator.add_file_entry_id(element)
            else:
                # Handle changed file.
                self.logger.info("Changed file found: %s" % element.get_name())
                self.file_tree_navigator.update_file_entry_id(element)
Пример #49
0
def name_time_created_map(doc):
    """
    View: ``payload_configuration/name_time_created``

    Emits::

        [name, time_created] -> null

    In the key, ``time_created`` is emitted as a UNIX timestamp (seconds since
    epoch).

    Used to get a list of all current payload configurations, for display
    purposes or elsewhere where sorting by name is useful.
    """
    if doc['type'] == "payload_configuration":
        created = rfc3339_to_timestamp(doc['time_created'])
        yield (doc['name'], created), None
Пример #50
0
    def create(cls, info: dict) -> object:
        value = info.get('value')
        if not value:
            raise Exception('Missing signer token value for cloud asset')

        expired_at_str = info.get('expired_at')
        if not expired_at_str:
            raise Exception('Missing expiry time for cloud asset signer token')

        expire_timestamp = None
        try:
            expire_timestamp = rfc3339_to_timestamp(expired_at_str)
        except InvalidRFC3339Error as ex:
            raise Exception(
                'Invalid format of expiry time for cloud asset signer token')

        return cls(value,
                   datetime.fromtimestamp(expire_timestamp),
                   info.get('extra'))
Пример #51
0
    def get_results_for_plot(self, query):
        pts = self.client.query(query)        
        results = list(pts.get_points())
        
        retval = []

        if len(results) > 0:

            keys = list(results[0].keys())
            keys.remove('time')
            f = keys[0]

            for r in results:
                t = rfc3339_to_timestamp(r['time'])
                d = r[f]

                retval.append([t,d])

        return retval
Пример #52
0
def launch_time_including_payloads_map(doc):
    """
    View: ``flight/launch_time_including_payloads``

    Emits::

        [launch_time, flight_id, 0] -> [payload_configuration ids]
        [launch_time, flight_id, 1] -> {linked payload_configuration doc 1}
        [launch_time, flight_id, 1] -> {linked payload_configuration doc 2}
        ...

    Or, when a flight has no payloads::
        
        [launch_time, flight_id, 0] -> null

    Times are all UNIX timestamps (and therefore in UTC).

    Sort by flight launch time.
    
    Only shows approved flights.

    Used by the calendar and other interface elements to show a list of
    upcoming flights.

    Query using ``startkey=[current_timestamp]`` to get all upcoming flights.
    Use ``include_docs=true`` to have the linked
    payload_configuration documents fetched and returned as the ``"doc"`` key
    for that row, otherwise the row's value will just contain an object that
    holds the linked ID. See the 
    `CouchDB documentation <http://wiki.apache.org/couchdb/Introduction_to_CouchDB_views#Linked_documents>`_
    for details on linked documents.
    """
    if doc['type'] != "flight" or not doc['approved']:
        return
    flight_id = doc['_id']
    lt = rfc3339_to_timestamp(doc['launch']['time'])
    if 'payloads' in doc:
        yield (lt, flight_id, 0), doc['payloads']
        for payload in doc['payloads']:
            yield (lt, flight_id, 1), {'_id': payload}
    else:
        yield (lt, flight_id, 0), None
Пример #53
0
    def test_init(self):
        """Test Token.__init__()."""

        kwargs = dict(
            token='token',
            bucket='bucket_id',
            operation='operation',
            expires='2016-10-13T04:23:48.183Z',
            encryptionKey='key_id',
        )

        token = Token(**kwargs)

        assert token.id == kwargs['token']
        assert token.bucket == Bucket(id=kwargs['bucket'])
        assert token.operation == kwargs['operation']
        assert token.expires == datetime.fromtimestamp(
            strict_rfc3339.rfc3339_to_timestamp(
                kwargs['expires']))
        assert token.encryptionKey == kwargs['encryptionKey']
def time_uploaded_day_map(doc):
    """
    View: ``payload_telemetry_stats/time_uploaded_day``

    Emits::

        (time_created // (3600 * 24)) -> 1

    for every receiver of every payload_telemetry doc that belongs to a
    flight (not a testing payload).

    N.B.: reduce is "_sum" (Erlang)
    """

    if not _is_flight_telemetry(doc):
        return

    for info in doc['receivers'].itervalues():
        t = rfc3339_to_timestamp(info["time_created"])
        yield int(t // (3600 * 24)), 1
Пример #55
0
	def constructTimeLineItem(self,item,isHistory = False):
		""" Construct an element to be added to the timeline """
		isDir = False
		altName = item['id']
		hasHistory = False

		if isHistory:
			displayDate = item['modifiedDate']

			if "lastModifyingUserName" in item:
				title = item['lastModifyingUserName']
			else:
				title = item['id']

			trashed = str(False)
		else:
			displayDate = item['createdDate']
			title = item['title']
			trashed = str(item['labels']['trashed'])

			#check if the file has history
			if item['mimeType'] != "application/vnd.google-apps.folder":
				fd = self.db.getFileDownload(self.t,altName)
				fh = self.db.getHistoryForFile(fd)

				if len(fh) > 0:
					hasHistory = True


		if item['mimeType'] == "application/vnd.google-apps.folder":
			isDir = True
			altName = ""

		# date
		dateTuple = list(time.gmtime(strict_rfc3339.rfc3339_to_timestamp(displayDate)))[:6]
		#month -1 because javascript Date goes from 0-11
		dateTuple[1] = dateTuple[1] - 1
		date = ",".join(map(str,dateTuple))

		jStr = '{"timeStr":"'+displayDate+'"}'
		return {'title': title,'time':date,'isDir':str(isDir),'trashed':trashed,'altName': altName,'params':jStr,'hasHistory':hasHistory}
Пример #56
0
    def test_init(self):
        """Test Contact.__init__()."""

        kwargs = dict(
            address='api.storj.io',
            port=8443,
            nodeID='32033d2dc11b877df4b1caefbffba06495ae6b18',
            lastSeen='2016-05-24T15:16:01.139Z',
            protocol='0.7.0',
            userAgent='4.0.3'
        )

        contact = Contact(**kwargs)

        assert contact.address == kwargs['address']
        assert contact.port == kwargs['port']
        assert contact.nodeID == kwargs['nodeID']
        assert contact.lastSeen == datetime.fromtimestamp(
            strict_rfc3339.rfc3339_to_timestamp(kwargs['lastSeen']))
        assert contact.protocol == kwargs['protocol']
        assert contact.userAgent == kwargs['userAgent']
Пример #57
0
	def metadataSearch(self,searchType,searchEmail,searchFile,searchGivenName,filterType,mimeType,startDate,endDate):
		""" Search through metadata """

		searchItem = list()

		startDateTs = float(getTimestamp(startDate))
		endDateTs = float(getTimestamp(endDate))

		for i in self.metadata:
			creationTs = strict_rfc3339.rfc3339_to_timestamp(i['createdDate'])
			
			#check temporal period
			if creationTs >= startDateTs and creationTs <= endDateTs:
					prunedRes = self.metadataSearchType(i,searchType,searchEmail,searchFile,searchGivenName)

					if prunedRes != None:
						#now apply filters
						filteredRes = self.metadataSearchFilters(filterType,prunedRes,mimeType)

						if filteredRes != None:
							searchItem.append(filteredRes)
		return searchItem
Пример #58
0
    def test_init(self):
        """Test Bucket.__init__()."""
        kwargs = dict(
            created='2016-10-13T04:23:48.183Z',
            id='510b23e9f63a77d939a72a77',
            name='integration-20161013_042347',
            pubkeys=[],
            status='Active',
            storage=0,
            transfer=0,
            user='******')

        bucket = Bucket(**kwargs)

        assert bucket.created == datetime.fromtimestamp(
            strict_rfc3339.rfc3339_to_timestamp(
                '2016-10-13T04:23:48.183Z'))
        assert bucket.id == '510b23e9f63a77d939a72a77'
        assert bucket.name == 'integration-20161013_042347'
        assert bucket.pubkeys == []
        assert bucket.status == 'Active'
        assert bucket.storage == 0
        assert bucket.transfer == 0
        assert bucket.user == '*****@*****.**'
Пример #59
0
def callsign_time_created_index_map(doc):
    """
    View: ``payload_configuration/callsign_time_created_index``

    Emits::

        [callsign, time_created, 1] -> [metadata, sentence 1]
        [callsign, time_created, 2] -> [metadata, sentence 2]
        ...
        [callsign, time_created, n] -> [metadata, sentence n]

    Where ``metadata`` is::

        {
            "name": doc.name,
            "time_created": doc.time_created (original string),
            "metadata": doc.metadata (if present in doc)
        }

    (In other words, one row per sentence in this document).

    In the key, ``time_created`` is emitted as a UNIX timestamp (seconds since
    epoch).

    Useful to obtain configuration documents for a given callsign if it can't
    be found via upcoming flights, for example parsing test telemetry or
    selecting a sentence to copy when making a new document.
    """
    if doc['type'] == "payload_configuration":
        if 'sentences' in doc:
            created = rfc3339_to_timestamp(doc['time_created'])
            for n, sentence in enumerate(doc['sentences']):
                m = {"name": doc["name"], "time_created": doc["time_created"]}
                if "metadata" in doc:
                    m["metadata"] = doc["metadata"]
                yield (sentence['callsign'], created, n), (m, sentence)
Пример #60
0
    def __init__(
            self, id=None, name=None, status=None, user=None,
            created=None, storage=None, transfer=None, pubkeys=None,
            publicPermissions=None, encryptionKey=None):
        self.id = id
        self.name = name
        self.status = status
        self.user = user
        self.storage = storage
        self.transfer = transfer
        self.pubkeys = pubkeys
        self.publicPermissions = publicPermissions
        self.encryptionKey = encryptionKey

        # self.files = FileManager(bucket_id=self.id)
        # self.pubkeys = BucketKeyManager(
        #     bucket=self, authorized_public_keys=self.pubkeys)
        # self.tokens = TokenManager(bucket_id=self.id)

        if created is not None:
            self.created = datetime.fromtimestamp(
                strict_rfc3339.rfc3339_to_timestamp(created))
        else:
            self.created = None