Exemplo n.º 1
0
 def __init__(self, warnCellId: str, regionName: str, end: Optional[Union[datetime, str]],
              start: Union[datetime, str], type: int,
              state: str, level: int,
              description: str, event: str, headline: str, instruction: str, stateShort: str,
              altitudeStart: Optional[int],
              altitudeEnd: Optional[int], urgency: str):
     self.warnCellId = warnCellId
     self.regionName = regionName
     self.__end = end
     if isinstance(end, datetime):
         self.__end = rfc3339.format(end, utc=True)
     elif end is None:
         self.__end = ''
     if isinstance(start, datetime):
         self.__start = rfc3339.format(start, utc=True)
     else:
         self.__start = start
     self.type = type
     self.state = state
     self.level = level
     self.description = description
     self.event = event
     self.headline = headline
     self.instruction = instruction
     self.stateShort = stateShort
     if altitudeStart is None:
         self.altitudeStart = 0
     else:
         self.altitudeStart = altitudeStart
     if altitudeEnd is None:
         self.altitudeEnd = 3000
     else:
         self.altitudeEnd = altitudeEnd
     self.urgency = urgency
def generateMaaSNotification(itemTitle, itemDescription, itemLink,
                             itemTimestamp):
    global APPS
    global notificationsGenerated

    # YYYY-MM-DDThh:mm:ssZ
    #1996-12-19T16:39:57-08:00
    iso_date = dateutil.parser.parse(itemTimestamp)
    utc_date = iso_date.astimezone(pytz.UTC)
    maasTime = rfc3339.format(utc_date, utc=True)
    print unicode(dateutil.parser.parse(itemTimestamp)) + " => " + maasTime

    # retrieve list of maas segments for each app
    for app in APPS:

        # find a MaaS segment that matches itemTitle
        destinationSegmentId = None
        for segment in app["segments"]:
            if segment["name"] == itemTitle:
                log("Found segment id (%s) that matches itemTitle (%s)" %
                    (segment["id"], itemTitle))
                destinationSegmentId = segment["id"]
                break

        if destinationSegmentId:
            data = {
                "pushType": "segment",
                "segments": [destinationSegmentId],
                "message": itemDescription,
                "createdBy": "*****@*****.**",
                "attributes": {
                    "title": itemTitle,
                    "description": itemDescription,
                    "url": itemLink,
                    "timestamp": "now"
                },
                "start": maasTime
            }
            xAuthString = buildXAuth("POST", app["access_key"],
                                     app["signature_key"], data)
            request = urllib2.Request(MAAS_PUSH_ENDPOINT, json.dumps(data))
            request.add_header("X-Auth", xAuthString)
            try:
                log("attempting request... " + json.dumps(data))

                if not TEST_MODE:
                    tempFile = urllib2.urlopen(request)
                else:
                    print "[API call to MaaS suppressed for test mode.]"
                notificationsGenerated = notificationsGenerated + 1
            except urllib2.HTTPError, e:
                #stuff
                print e
        else:
            log("Could not find matching segment id for (%s)" % (itemTitle))
Exemplo n.º 3
0
def data2geojson(method, df, fileout):

    features = []
    if method == 'velocity':

        insert_features = lambda X: features.append(
            geojson.Feature(geometry=geojson.Point((X["lon"], X["lat"])),
                            properties=dict(timestamp=rfc3339.format(X["time"],
                                                                     utc=True),
                                            U=X["u"],
                                            V=X["v"])))
    else:
        insert_features = lambda X: features.append(
            geojson.Feature(geometry=geojson.Point((X["lon"], X["lat"])),
                            properties=dict(timestamp=rfc3339.format(X["time"],
                                                                     utc=True),
                                            E=X["e"])))
    df.apply(insert_features, axis=1)
    with open(fileout, 'w', encoding='utf8') as fp:
        geojson.dump(geojson.FeatureCollection(features),
                     fp,
                     sort_keys=True,
                     ensure_ascii=False)
Exemplo n.º 4
0
async def new(ctx):
    """
        Creates a new event and uploads it to the calendar.        
    """
    message = await bot.say("Please enter a name for the event.")
    msg = await bot.wait_for_message(author=ctx.message.author,
                                     channel=ctx.message.channel)
    print(msg.content)
    eventName = msg.content
    await bot.edit_message(
        message,
        "Please enter a date and start time for the event. \r (Formate: dd.mm.yy hh:mm)"
    )
    msg = await bot.wait_for_message(author=ctx.message.author,
                                     channel=ctx.message.channel)
    eventStart = datetime.datetime.strptime(msg.content, '%d.%m.%y %H:%M')
    eventStart.replace(tzinfo=datetime.timezone.utc)
    print(eventStart.tzname())
    eventEnd = eventStart + datetime.timedelta(hours=1)
    event = calenderService.events().insert(
        calendarId='*****@*****.**',
        body={
            'summary': eventName,
            'status': 'confirmed',
            'start': {
                'dateTime': rfc3339.format(eventStart)
            },
            'end': {
                'dateTime': rfc3339.format(eventEnd)
            }
        }).execute()
    print(event)
    channelID = discord.utils.get(ctx.message.server.channels,
                                  name='announcements').id
    print(channelID)
    await reQueue(channelID)
Exemplo n.º 5
0
    def test_step003_ingest_events(self):
        """Tests the ingest_events method"""
        time = datetime.utcnow() - timedelta(hours=1)
        time_formatted = rfc3339.format(time)

        res = self.client.datasets.ingest_events(
            dataset=self.dataset_name,
            events=[
                {
                    "foo": "bar",
                    "_time": time_formatted
                },
                {
                    "bar": "baz",
                    "_time": time_formatted
                },
            ],
        )
        self.logger.debug(res)

        assert (res.ingested == 2
                ), f"expected ingested count to equal 2, found {res.ingested}"
Exemplo n.º 6
0
 def start(self, start: datetime):
     self.__start = rfc3339.format(start, utc=True)
Exemplo n.º 7
0
 def end(self, end: Optional[datetime]):
     self.__end = end
     if end is not None:
         self.__end = rfc3339.format(end, utc=True)
Exemplo n.º 8
0
def timestamp_after(date, after):
    return rfc3339.format(date + timedelta(seconds=after), utc=True) \
            .replace('Z','.0Z')
Exemplo n.º 9
0
    def get_fields_as_series(
            self,
            measurement: Type[T],
            field_aggregations: Dict[str, Optional[List[AggregationMode]]],
            name_components: Optional[Dict[str, str]] = None,
            tags: Optional[Dict[str, str]] = None,
            group_by_time_interval: Optional[str] = None,
            fill_mode: Optional[FillMode] = None,
            fill_number: Optional[int] = None,
            window_index_location:
        AggregationWindowIndex = AggregationWindowIndex.START,
            time_range: Union[datetime.date, Tuple[datetime.datetime,
                                                   datetime.datetime]] = None,
            limit: Optional[int] = None,
            tz: pytz.UTC = pytz.utc) -> Dict[str, Series]:
        if field_aggregations is None or len(field_aggregations.items()) == 0:
            raise Exception('Null or invalid field aggregations')

        if fill_mode is not None and fill_mode == FillMode.NUMBER:
            assert fill_number is not None, 'Null fill number passed with number fill mode'
        else:
            assert fill_number is None, 'Fill number passed with non-number fill mode'

        group_by_time_regex = re.compile('^[1-9][0-9]*[dhms]$')
        assert group_by_time_interval is None or bool(group_by_time_regex.match(group_by_time_interval)), \
            'Invalid group by time ' + str(group_by_time_interval) + ', needs to be a positive integer and either of [dhms]'

        query_string = "SELECT "

        measurement_name = Measurement.get_name(
            measurement, name_components=name_components)
        fields = Measurement.get_fields(measurement)

        aggregated_field_names = []
        properties = []
        for field_name, aggregation_modes in field_aggregations.items():
            if field_name not in fields:
                raise Exception('Field name ' + str(field_name) +
                                ' not found in measurement ' +
                                measurement_name + ' fields')
            if aggregation_modes is None or len(aggregation_modes) == 0:
                properties.append(field_name)
                aggregated_field_names.append(field_name)
            else:
                for aggregation_mode in aggregation_modes:
                    properties.append(
                        aggregation_mode.aggregate_field(
                            field_name=field_name))
                    aggregated_field_names.append(
                        aggregation_mode.get_result_field_name(field_name))

        query_string += ', '.join(properties)
        query_string += """ FROM "{measurement_name}" """.format(
            measurement_name=measurement_name)

        and_conditions_list = []
        if tags is not None:
            for tag_name, tag_value in tags.items():
                and_conditions_list.append(
                    """"{tag_name}"='{tag_value}'""".format(
                        tag_name=tag_name, tag_value=tag_value))

        if time_range is not None:
            if isinstance(time_range, datetime.date):
                and_conditions_list.append(
                    """time >= '{day_start}' and time < '{nex_day_start}'""".
                    format(day_start=rfc3339.format(time_range,
                                                    use_system_timezone=False),
                           nex_day_start=rfc3339.format(
                               time_range + datetime.timedelta(days=1),
                               use_system_timezone=False)))
            else:
                if time_range[0] is not None:
                    and_conditions_list.append(
                        """time >= '{since_dt}'""".format(
                            since_dt=rfc3339.format(
                                time_range[0], use_system_timezone=False)))
                if time_range[1] is not None:
                    and_conditions_list.append(
                        """time <= '{until_dt}'""".format(
                            until_dt=rfc3339.format(
                                time_range[1], use_system_timezone=False)))

        if len(and_conditions_list) > 0:
            query_string += " WHERE " + (" AND ".join(and_conditions_list))

        if group_by_time_interval is not None:
            query_string += " GROUP BY time({time_interval})".format(
                time_interval=group_by_time_interval)

        if limit is not None:
            query_string += " LIMIT {limit}".format(limit=limit)

        if fill_mode is not None:
            if fill_mode == FillMode.NUMBER:
                query_string += " FILL(" + str(fill_number) + ")"
            else:
                query_string += " FILL(" + fill_mode.get_str() + ")"

        points = [p for p in self.db_client.query(query_string).get_points()]
        if group_by_time_interval is not None:
            times = [
                window_index_location.get_time_point_of_window(
                    parse_influx_str_time(p.get('time'), tz),
                    str(group_by_time_interval)) for p in points
            ]
        else:
            times = [parse_influx_str_time(p.get('time'), tz) for p in points]

        result_dict = {}
        for aggregated_field_name in aggregated_field_names:
            result_dict[aggregated_field_name] = Series(
                data=[p.get(aggregated_field_name) for p in points],
                index=times)

        return result_dict
Exemplo n.º 10
0
    def load_points(self,
                    measurement_type: Type[T],
                    name_components: Optional[Dict[str, str]] = None,
                    tags: Optional[Dict[str, str]] = None,
                    time_range: Union[datetime.date,
                                      Tuple[datetime.datetime,
                                            datetime.datetime]] = None,
                    limit: Optional[int] = None,
                    tz: pytz.UTC = pytz.utc) -> List[T]:
        # noinspection SqlNoDataSourceInspection
        query_string = """SELECT * FROM "{measurement_name}" """.format(
            measurement_name=Measurement.get_name(
                measurement_type, name_components=name_components))

        and_conditions_list = []
        if tags is not None:
            for tag_name, tag_value in tags.items():
                and_conditions_list.append(
                    """"{tag_name}"='{tag_value}'""".format(
                        tag_name=tag_name, tag_value=tag_value))

        if time_range is not None:
            if isinstance(time_range, datetime.date):
                and_conditions_list.append(
                    """time >= '{day_start}' and time < '{nex_day_start}'""".
                    format(day_start=rfc3339.format(time_range,
                                                    use_system_timezone=False),
                           nex_day_start=rfc3339.format(
                               time_range + datetime.timedelta(days=1),
                               use_system_timezone=False)))
            else:
                if time_range[0] is not None:
                    and_conditions_list.append(
                        """time >= '{since_dt}'""".format(
                            since_dt=rfc3339.format(
                                time_range[0], use_system_timezone=False)))
                if time_range[1] is not None:
                    and_conditions_list.append(
                        """time <= '{until_dt}'""".format(
                            until_dt=rfc3339.format(
                                time_range[1], use_system_timezone=False)))

        if len(and_conditions_list) > 0:
            query_string += " WHERE " + (" AND ".join(and_conditions_list))

        if limit is not None:
            query_string += " LIMIT {limit}".format(limit=limit)
        query_string += ';'

        measurement_tags = Measurement.get_tags(cls=measurement_type)
        measurement_fields = Measurement.get_fields(cls=measurement_type)

        ts1 = time.monotonic()
        result = [p for p in self.db_client.query(query_string).get_points()]
        ts2 = time.monotonic()

        # printx('query results in ' + str(ts2-ts1) + ' seconds')

        measurements_list = []
        field_names = []
        tag_names = []

        for field_name, f in measurement_fields.items():
            field_names.append(field_name)
        for tag_name, t in measurement_tags.items():
            tag_names.append(tag_name)

        # printx('field names ' + str(field_names))
        # printx('tag names ' + str(tag_names))

        total_field_finding = 0
        total_creation = 0
        total_appending = 0
        total_parsing = 0
        ts3 = time.monotonic()
        for item in result:
            t5 = time.monotonic()
            data_points = {
                **{f: item[f]
                   for f in field_names},
                **{t: item[t]
                   for t in tag_names}, 'time_point':
                parse_influx_str_time(item.get('time'), tz)
            }
            # for f_name in field_names:
            #     data_points[f_name] = item[f_name]
            # for t_name in tag_names:
            #     data_points[t_name] = item[t_name]
            # printx(item.get('time'))
            # t11 = time.monotonic()
            # dt = parse_influx_str_time(item.get('time'), tz)
            # t12 = time.monotonic()
            # total_parsing += t12 - t11
            # data_points['time_point'] = dt
            # data_points['time_point'] = parse_influx_str_time(item.get('time'), tz)
            # noinspection PyCallingNonCallable
            t6 = time.monotonic()
            total_field_finding += t6 - t5

            t7 = time.monotonic()
            iitem = measurement_type(**data_points)
            t8 = time.monotonic()
            total_creation += t8 - t7

            t9 = time.monotonic()
            measurements_list.append(iitem)
            t10 = time.monotonic()
            total_appending += t10 - t9
        ts4 = time.monotonic()

        # printx('create list in ' + str(ts4 - ts3) + ' seconds')
        # printx('total_field_finding  ' + str(total_field_finding) + ' seconds')
        # printx('total_creation  ' + str(total_creation) + ' seconds')
        # printx('total_appending  ' + str(total_appending) + ' seconds')
        # printx('total_parsing  ' + str(total_parsing) + ' seconds')

        return measurements_list