def test_now(self):
        s = strict_rfc3339.now_to_rfc3339_utcoffset()
        assert s[-1] == "Z"
        assert len(s) == 20
        d = int(time.time()) - strict_rfc3339.rfc3339_to_timestamp(s)
        assert d == 0 or d == 1

        s = strict_rfc3339.now_to_rfc3339_utcoffset(False)
        assert abs(strict_rfc3339.rfc3339_to_timestamp(s) - time.time()) <= 0.1
def get_metric_data(project, role, metric):
    print" get_metrics",metric
    # Authorize via OAuth 2.0
    http = get_app_http()
    #last_run = get_state(project, metric)
    last_run = get_state(project, metric)
    youngest = strict_rfc3339.now_to_rfc3339_utcoffset()
    timespan = str(last_run) + 's'
    oldest_endtime = 0
    message = 'get_metric_data call_api - Project: {} Metric: {}'.format(project, metric)
    # print ("message",message)
    metric_data = call_api(http, cloudmonitoring_client, 'timeseries', 'list',
                           project=project, youngest=youngest,
                           metric=metric, timespan=timespan)
    
    # print 'timespan',timespan
    oldest_endtime = parse_metric_data(http, metric_data, project, youngest, timespan, metric, role)
    # print 'oldest endtime' , oldest_endtime
    # print "Metric data:",metric_data
    

    if oldest_endtime > 0:
        ts = oldest_endtime
        # print "ts",ts
        set_state(project, metric, ts)
        result = {'last_update': ts}
    else:
        result = {'last_update': 0}
    return json.dumps(result)
Beispiel #3
0
    def _get_data(self, raw_data, callsign, config, module):
        """Attempt to parse data from what we know so far."""
        sentences = config["payload_configuration"]["sentences"]
        for sentence_index, sentence in enumerate(sentences):
            if sentence["callsign"] != callsign:
                continue
            if sentence["protocol"] != module["name"]:
                continue

            data = self.filtering.intermediate_filter(raw_data, sentence)

            try:
                data = module["module"].parse(data, sentence)
            except (ValueError, KeyError) as e:
                logger.debug("Exception in {module} main parse: {e}"
                    .format(module=module['name'],
                            e=quick_traceback.oneline(e)))
                statsd.increment("parser.parse_exception")
                continue

            data = self.filtering.post_filter(data, sentence)

            data["_protocol"] = module["name"]
            data["_parsed"] = {
                "time_parsed": strict_rfc3339.now_to_rfc3339_utcoffset(),
                "payload_configuration": config["id"],
                "configuration_sentence_index": sentence_index
            }
            if "flight_id" in config:
                data["_parsed"]["flight"] = config["flight_id"]
            return data
        raise CantGetData()
Beispiel #4
0
 async def add_metric(self, data):
     data['timestamp'] = now_to_rfc3339_utcoffset()
     peername = self.request.transport.get_extra_info('peername')
     if peername is not None:
             host, port = peername
             data['remote_address'] = host
     data["username"] = self.get_current_user()["login"]
     if hasattr(self, "factory") and "kind" not in data:
         data['kind'] = self.factory.get_kind()
     obj = Metric()
     obj.update(data)
     await obj.save(self.db, w=0)
Beispiel #5
0
def add_listener_update(doc, req):
    """
    Update function: ``payload_telemetry/_update/add_listener``

    Given a prototype payload_telemetry JSON document in the request body,
    containing just the _raw telemetry string and one entry in receivers,
    create the document or merge this listener into it as appropriate.

    Used by listeners when a new payload telemetry string has been received.

    Usage::

        PUT /habitat/_design/payload_telemetry/_update/add_listener/<doc ID>

        {
            "data": {
                "_raw": "<base64 raw telemetry data>"
            },
            "receivers": {
                "<receiver callsign>": {
                    "time_created": "<RFC3339 timestamp>",
                    "time_uploaded": "<RFC3339 timestamp>",
                    <other keys as desired, for instance
                     latest_listener_telemetry, latest_listener_info, etc>
                }
            }
        }

    The document ID should be sha256(doc["data"]["_raw"]) in hexadecimal.

    Returns "OK" if everything was fine, otherwise CouchDB will raise an error.
    Errors might occur in validation (in which case the validation error is
    returned) or because of a save conflict. In the event of a save conflict,
    uploaders should retry the same request until the conflict is resolved.
    """
    protodoc = json.loads(req["body"])
    if "data" not in protodoc or "_raw" not in protodoc["data"]:
        raise ForbiddenError("doc.data._raw is required")
    if "receivers" not in protodoc or len(protodoc["receivers"]) != 1:
        raise ForbiddenError("doc.receivers must exist and have one receiver")
    callsign = protodoc["receivers"].keys()[0]
    protodoc["receivers"][callsign]["time_server"] = now_to_rfc3339_utcoffset()
    if not doc:
        doc = {"_id": req["id"], "type": "payload_telemetry",
               "data": {"_raw": protodoc["data"]["_raw"]}, "receivers": {}}
    doc["receivers"][callsign] = protodoc["receivers"][callsign]
    return doc, "OK"
    def get(self):

        credentials = AppAssertionCredentials("https://www.googleapis.com/auth/calendar.readonly")
        http_auth = credentials.authorize(Http())
        cal_service = discovery.build('calendar', 'v3', http=http_auth)

        service_settings = ServiceSettings.query().get()
        if not service_settings:
            service_settings = ServiceSettings()
        next_sync_token = service_settings.cal_sync_token

        cal_events = []
        if next_sync_token:
            now = None
        else:
            now = strict_rfc3339.now_to_rfc3339_utcoffset()

        try:
            events_result = cal_service.events().list(calendarId="*****@*****.**", timeMin=now,
                                                      syncToken=next_sync_token).execute()
        except:
            service_settings.cal_sync_token = None
            service_settings.put()
            raise

        cal_events += events_result.get('items', [])
        next_page_token = events_result.get('nextPageToken', None)

        while next_page_token:
            events_result = cal_service.events().list(calendarId="*****@*****.**", timeMin=now,
                                                      syncToken=next_sync_token, pageToken=next_page_token).execute()
            cal_events += events_result.get('items', [])
            next_page_token = events_result.get('nextPageToken', None)

        next_sync_token = events_result.get("nextSyncToken", None)
        service_settings.cal_sync_token = next_sync_token
        service_settings.put()

        for cal_event in cal_events:
            cal_id = cal_event.get("id")

            event = Event.query().filter(Event.cal_id == cal_id).get()

            if event:
                q = taskqueue.Queue('default')
                for task in event.tasks:
                    q.delete_tasks(taskqueue.Task(name=task))
                event.tasks = []

                if cal_event.get("status") == "cancelled":
                    event.key.delete()
                    logging.info("Event deleted: %s", event)
                    continue
            else:
                event = Event(cal_id=cal_id)
                event.put()

            summary = cal_event.get("summary")
            description = cal_event.get("description")

            start = cal_event.get("start")
            end = cal_event.get("end")

            start = parse_date_time(start.get("date"), start.get("dateTime"))
            end = parse_date_time(end.get("date"), end.get("dateTime"))

            event.summary = summary
            event.description = description
            event.start = start
            event.end = end

            set_event_reminders(event)

            event.put()

            logging.info("New event created: %s", event)
Beispiel #7
0
def utc_timestamp():
    return strict_rfc3339.now_to_rfc3339_utcoffset()
Beispiel #8
0
def http_post_update(doc, req):
    """
    Update function: ``payload_telemetry/_update/http_post``

    Creates a new payload_telemetry document with all keys present in the HTTP
    POST form data available in ``doc.data._fallbacks`` and the ``from`` HTTP
    querystring key as the receiver callsign if available. The ``data`` field
    will be base64 encoded and used as ``doc.data._raw``.

    This function has additional functionality specific to RockBLOCKs: if all
    of the keys ``imei``, ``momsn``, ``transmit_time``, ``iridium_latitude``,
    ``iridium_longitude``, ``iridium_cep`` and ``data`` are present in the form
    data, then:
    * ``imei`` will be copied to ``doc.data._fallbacks.payload`` so it can be
      used as a payload callsign.
    * ``iridium_latitude`` and ``iridium_longitude`` will be copied to
      ``doc.data._fallbacks.latitude`` and ``longitude`` respectively.
    * ``data`` will be hex decoded before base64 encoding so it can be directly
      used by the binary parser module.
    * ``transmit_time`` will be decoded into an RFC3339 timestamp and used for
      the ``time_created`` field in the receiver section.
    * ``transmit_time`` will be decoded into hours, minutes and seconds and
      copied to ``doc.data._fallbacks.time``.

    Usage::

        POST /habitat/_design/payload_telemetry/_update/http_post?from=callsign
        
        data=hello&imei=whatever&so=forth

    This update handler may not currently be used on existing documents or
    with a PUT request; such requests will fail.

    Returns "OK" if everything was fine, otherwise CouchDB will return a
    (hopefully instructive) error.
    """
    if doc is not None:
        resp = {"headers": {"code": 405,
                            "body": "This update function may only be used to "
                                    "create new documents via POST, not with  "
                                    "an existing document ID on a PUT request."
                           }
        }
        return doc, resp

    form = req["form"]
    tc = ts = now_to_rfc3339_utcoffset()
    rawdata = base64.b64encode(form["data"])
    if set(("imei", "momsn", "transmit_time", "iridium_latitude",
           "iridium_longitude", "iridium_cep", "data")) <= set(form.keys()):
        form["payload"] = form["imei"]
        form["latitude"] = float(form["iridium_latitude"])
        form["longitude"] = float(form["iridium_longitude"])
        rawdata = base64.b64encode(form["data"].decode("hex"))
        fmt = "%y-%m-%d %H:%M:%S"
        tc = datetime.datetime.strptime(form["transmit_time"], fmt)
        form["time"] = tc.strftime("%H:%M:%S")
        tc = timestamp_to_rfc3339_utcoffset(calendar.timegm(tc.timetuple()))
    receiver = req["query"]["from"] if "from" in req["query"] else "HTTP POST"
    doc_id = hashlib.sha256(rawdata).hexdigest()
    doc = {"_id": doc_id, "type": "payload_telemetry",
            "data": {"_raw": rawdata, "_fallbacks": form}, "receivers": {}}
    doc["receivers"][receiver] = {"time_created": tc, "time_uploaded": ts,
                                  "time_server": ts}
    return doc, "OK"
Beispiel #9
0
def utc_timestamp():
    return strict_rfc3339.now_to_rfc3339_utcoffset()
Beispiel #10
0
    def set_update_time_to_now(self):
        if self.last_update is not None:
            self.dirty = True

        self.last_update = strict_rfc3339.now_to_rfc3339_utcoffset()