Beispiel #1
0
 def available(self):
     now = timezone.now()
     return self.filter(
         available_in__contains=DateTimeTZRange(now, None)
     )
Beispiel #2
0
    def UpdateEvent(self, request, context):
        with session_scope() as session:
            res = session.execute(
                select(Event, EventOccurrence).where(
                    EventOccurrence.id == request.event_id).where(
                        EventOccurrence.event_id == Event.id)).one_or_none()

            if not res:
                context.abort(grpc.StatusCode.NOT_FOUND,
                              errors.EVENT_NOT_FOUND)

            event, occurrence = res

            if not _can_edit_event(session, event, context.user_id):
                context.abort(grpc.StatusCode.PERMISSION_DENIED,
                              errors.EVENT_EDIT_PERMISSION_DENIED)

            occurrence_update = {"last_edited": now()}

            if request.HasField("title"):
                event.title = request.title.value
                event.last_edited = now()

            if request.HasField("content"):
                occurrence_update["content"] = request.content.value

            if request.HasField("photo_key"):
                occurrence_update["photo_key"] = request.photo_key.value

            if request.HasField("online_information"):
                if not request.online_information.link:
                    context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                                  errors.ONLINE_EVENT_REQUIRES_LINK)
                occurrence_update["link"] = request.online_information.link
                occurrence_update["geom"] = None
                occurrence_update["address"] = None
            elif request.HasField("offline_information"):
                occurrence_update["link"] = None
                if request.offline_information.lat == 0 and request.offline_information.lng == 0:
                    context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                                  errors.INVALID_COORDINATE)
                occurrence_update["geom"] = create_coordinate(
                    request.offline_information.lat,
                    request.offline_information.lng)
                occurrence_update[
                    "address"] = request.offline_information.address

            if request.HasField("start_time") or request.HasField("end_time"):
                if request.update_all_future:
                    context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                                  errors.EVENT_CANT_UPDATE_ALL_TIMES)
                if request.HasField("start_time"):
                    start_time = to_aware_datetime(request.start_time)
                else:
                    start_time = occurrence.start_time
                if request.HasField("end_time"):
                    end_time = to_aware_datetime(request.end_time)
                else:
                    end_time = occurrence.end_time

                _check_occurrence_time_validity(start_time, end_time, context)

                during = DateTimeTZRange(start_time, end_time)

                # && is the overlap operator for ranges
                if (session.execute(
                        select(EventOccurrence.id).where(
                            EventOccurrence.event_id == event.id).where(
                                EventOccurrence.id != occurrence.id).where(
                                    EventOccurrence.during.op("&&")
                                    (during))).scalars().first() is not None):
                    context.abort(grpc.StatusCode.FAILED_PRECONDITION,
                                  errors.EVENT_CANT_OVERLAP)

                occurrence_update["during"] = during

            # TODO
            # if request.HasField("timezone"):
            #     occurrence_update["timezone"] = request.timezone

            # allow editing any event which hasn't ended more than 24 hours before now
            # when editing all future events, we edit all which have not yet ended

            if request.update_all_future:
                session.execute(
                    update(EventOccurrence).where(
                        EventOccurrence.end_time >= now() -
                        timedelta(hours=24)).where(
                            EventOccurrence.start_time >= occurrence.start_time
                        ).values(occurrence_update).execution_options(
                            synchronize_session=False))
            else:
                if occurrence.end_time < now() - timedelta(hours=24):
                    context.abort(grpc.StatusCode.FAILED_PRECONDITION,
                                  errors.EVENT_CANT_UPDATE_OLD_EVENT)
                session.execute(
                    update(EventOccurrence).where(
                        EventOccurrence.end_time >= now() -
                        timedelta(hours=24)).where(
                            EventOccurrence.id == occurrence.id).values(
                                occurrence_update).execution_options(
                                    synchronize_session=False))

            # TODO notify

            session.flush()

            # since we have synchronize_session=False, we have to refresh the object
            session.refresh(occurrence)

            return event_to_pb(session, occurrence, context)
Beispiel #3
0
 def test_datetime_open(self):
     field = pg_forms.DateTimeRangeField()
     value = field.clean(['', '2013-04-09 11:45'])
     self.assertEqual(
         value, DateTimeTZRange(None, datetime.datetime(2013, 4, 9, 11,
                                                        45)))
Beispiel #4
0
 def extend_timespan_to(self, timestamp):
     if self.start_time is None:
         return
     start_time = datetime.fromtimestamp(self.start_time)
     end_time = max(start_time, datetime.fromtimestamp(timestamp))
     self.timespan = DateTimeTZRange(start_time, end_time)
Beispiel #5
0
    def CreateEvent(self, request, context):
        if not request.title:
            context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                          errors.MISSING_EVENT_TITLE)
        if not request.content:
            context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                          errors.MISSING_EVENT_CONTENT)
        if request.HasField("online_information"):
            online = True
            geom = None
            address = None
            if not request.online_information.link:
                context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                              errors.ONLINE_EVENT_REQUIRES_LINK)
            link = request.online_information.link
        elif request.HasField("offline_information"):
            online = False
            if not (request.offline_information.address
                    and request.offline_information.lat
                    and request.offline_information.lng):
                context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                              errors.MISSING_EVENT_ADDRESS_OR_LOCATION)
            if request.offline_information.lat == 0 and request.offline_information.lng == 0:
                context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                              errors.INVALID_COORDINATE)
            geom = create_coordinate(request.offline_information.lat,
                                     request.offline_information.lng)
            address = request.offline_information.address
            link = None
        else:
            context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                          errors.MISSING_EVENT_ADDRESS_LOCATION_OR_LINK)

        start_time = to_aware_datetime(request.start_time)
        end_time = to_aware_datetime(request.end_time)

        _check_occurrence_time_validity(start_time, end_time, context)

        with session_scope() as session:
            if request.parent_community_id:
                parent_node = session.execute(
                    select(Node).where(Node.id == request.parent_community_id)
                ).scalar_one_or_none()
            else:
                if online:
                    context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                                  errors.ONLINE_EVENT_MISSING_PARENT_COMMUNITY)
                # parent community computed from geom
                parent_node = get_parent_node_at_location(session, geom)

            if not parent_node:
                context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                              errors.COMMUNITY_NOT_FOUND)

            if (request.photo_key and not session.execute(
                    select(Upload).where(Upload.key == request.photo_key)).
                    scalar_one_or_none()):
                context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                              errors.PHOTO_NOT_FOUND)

            event = Event(
                title=request.title,
                parent_node_id=parent_node.id,
                owner_user_id=context.user_id,
                thread=Thread(),
                creator_user_id=context.user_id,
            )
            session.add(event)

            occurrence = EventOccurrence(
                event=event,
                content=request.content,
                geom=geom,
                address=address,
                link=link,
                photo_key=request.photo_key
                if request.photo_key != "" else None,
                # timezone=timezone,
                during=DateTimeTZRange(start_time, end_time),
                creator_user_id=context.user_id,
            )
            session.add(occurrence)

            organizer = EventOrganizer(
                user_id=context.user_id,
                event=event,
            )
            session.add(organizer)

            subscription = EventSubscription(
                user_id=context.user_id,
                event=event,
            )
            session.add(subscription)

            attendee = EventOccurrenceAttendee(
                user_id=context.user_id,
                occurrence=occurrence,
                attendee_status=AttendeeStatus.going,
            )
            session.add(attendee)

            session.commit()

            return event_to_pb(session, occurrence, context)
Beispiel #6
0
 def pt_to_range(phenomenon_time):
     pt_range = DateTimeTZRange(phenomenon_time,
                                phenomenon_time + timedelta(1))
     return pt_range
Beispiel #7
0
 def test_model_field_with_default_bounds(self):
     field = pg_forms.DateTimeRangeField(default_bounds="[]")
     value = field.clean(["2014-01-01 00:00:00", "2014-02-03 12:13:14"])
     lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
     upper = datetime.datetime(2014, 2, 3, 12, 13, 14)
     self.assertEqual(value, DateTimeTZRange(lower, upper, "[]"))
					if m['ip_addr'] != '127.0.0.1' and m['request'] == 'GET' and m['url'] in streams.values_list('mountpoint', flat=True):

						stream = streams.get(mountpoint=m['url'])
						
						# If we didn't parse a duration from the original string, calculate it from the bytes sent.
						# Note that this is not backwards compatible with the previous bitrates (128kbps) - when did this change?
						if duration:
							duration = int(duration[0])
						else:
							duration = int(int(m['bytes']) / (stream.bitrate * 128))
						
						if duration >= params.minimum_duration:
							duration 		= timedelta(seconds=duration)
							disconnected_at = parse(m['date'].replace(':', ' ', 1)).astimezone()
							connected_at	= disconnected_at - duration
							session 		= DateTimeTZRange(connected_at, disconnected_at)
							user_agent 		= get_user_agent(unquote(user_agent))
							location		= get_location(m['ip_addr'])

							listener = Listener(
								ip_address 		= m['ip_addr'],
								stream 			= stream,
								referer			= unquote(referer.replace('-', ''))[:255],
								session 		= session,
								duration		= duration,
								user_agent 		= user_agent,
								country			= location['country_code'],
								city 			= location['city'],
								latitude 		= location['latitude'],
								longitude 		= location['longitude'],
								)
Beispiel #9
0
    def get_autoevents(self, events):
        """Return a list of resources.Event objects, one for each Event"""
        autoevents = []
        autoeventindex = {}
        eventindex = {}
        for event in events:
            autoevents.append(
                resources.Event(
                    name=event.id,
                    duration=event.duration_minutes,
                    tags=event.tags.names(),
                    demand=event.demand,
                ),
            )
            # create a dict of events with the autoevent index as key and the Event as value
            autoeventindex[autoevents.index(autoevents[-1])] = event
            # create a dict of events with the Event as key and the autoevent index as value
            eventindex[event] = autoevents.index(autoevents[-1])

        # loop over all autoevents to add unavailability...
        # (we have to do this in a seperate loop because we need all the autoevents to exist)
        for autoevent in autoevents:
            # get the Event
            event = autoeventindex[autoevents.index(autoevent)]
            # loop over all other event_types...
            for et in self.event_types.all().exclude(pk=event.event_type.pk):
                if et in self.event_type_slots:
                    # and add all slots for this EventType as unavailable for this event,
                    # this means we don't schedule a talk in a workshop slot and vice versa.
                    autoevent.add_unavailability(*self.event_type_slots[et])

            # loop over all speakers for this event and add event conflicts
            for speaker in event.speakers.all():
                # loop over other events featuring this speaker, register each conflict,
                # this means we dont schedule two events for the same speaker at the same time
                conflict_ids = speaker.events.exclude(id=event.id).values_list(
                    "id",
                    flat=True,
                )
                for conflictevent in autoevents:
                    if conflictevent.name in conflict_ids:
                        # only the event with the lowest index gets the unavailability,
                        if autoevents.index(conflictevent) > autoevents.index(
                            autoevent,
                        ):
                            autoevent.add_unavailability(conflictevent)

                # loop over event_conflicts for this speaker, register unavailability for each,
                # this means we dont schedule this event at the same time as something the
                # speaker wishes to attend.
                # Only process Events which the AutoScheduler is handling
                for conflictevent in speaker.event_conflicts.filter(
                    pk__in=events.values_list("pk", flat=True),
                ):
                    # only the event with the lowest index gets the unavailability
                    if eventindex[conflictevent] > autoevents.index(autoevent):
                        autoevent.add_unavailability(
                            autoevents[eventindex[conflictevent]],
                        )

                # loop over event_conflicts for this speaker, register unavailability for each,
                # only process Events which the AutoScheduler is not handling, and which have
                # been scheduled in one or more EventSlots
                for conflictevent in speaker.event_conflicts.filter(
                    event_slots__isnull=False,
                ).exclude(pk__in=events.values_list("pk", flat=True)):
                    # loop over the EventSlots this conflict is scheduled in
                    for conflictslot in conflictevent.event_slots.all():
                        # loop over all slots
                        for slot in self.autoslots:
                            # check if this slot overlaps with the conflictevents slot
                            if conflictslot.when & DateTimeTZRange(
                                slot.starts_at,
                                slot.starts_at + timedelta(minutes=slot.duration),
                            ):
                                # this slot overlaps with the conflicting event
                                autoevent.add_unavailability(slot)

                # Register all slots where we have no positive availability
                # for this speaker as unavailable
                available = []
                for availability in speaker.availabilities.filter(
                    available=True,
                ).values_list("when", flat=True):
                    availability = DateTimeTZRange(
                        availability.lower,
                        availability.upper,
                        "()",
                    )
                    for slot in self.autoslots:
                        slotrange = DateTimeTZRange(
                            slot.starts_at,
                            slot.starts_at + timedelta(minutes=slot.duration),
                            "()",
                        )
                        if slotrange in availability:
                            # the speaker is available for this slot
                            available.append(self.autoslots.index(slot))
                autoevent.add_unavailability(
                    *[
                        s
                        for s in self.autoslots
                        if not self.autoslots.index(s) in available
                    ]
                )

        return autoevents, autoeventindex
Beispiel #10
0
 def save(self, commit=True):
     # self has .lower and .upper from .clean()
     self.instance.shift_range = DateTimeTZRange(self.lower, self.upper)
     return super().save(commit=commit)
Beispiel #11
0
 def search__at(self, op, value):  # pylint: disable=unused-argument
     min_value, max_value = value_parsers.parse_date(value)
     date_range = DateTimeTZRange(min_value, max_value, '[]')
     return self.MODEL.timespan.overlaps(date_range)
Beispiel #12
0
 def mark_started(self):
     self.start_time = get_current_time()
     self.timespan = DateTimeTZRange(datetime.fromtimestamp(self.start_time), None)
def get_period(element):
    start = element.find('StartTime').text
    end = element.findtext('EndTime')
    return DateTimeTZRange(start, end, '[]')
 def test_datetime_range(self):
     RangeFieldsModel.objects.create(
         datetime_range='[2014-01-01 09:00:00, 2014-01-01 12:30:00)')
     RangeFieldsModel.objects.create(datetime_range=DateTimeTZRange(
         datetime.datetime(2014, 1, 1, 9, 0),
         datetime.datetime(2014, 1, 1, 12, 30)))
Beispiel #15
0
    def setUp(self):

        Topic.objects.create(name_id='drought', name='drought')

        am_process = Process.objects.create(
            name_id='apps.common.aggregate.arithmetic_mean',
            name='arithmetic mean')

        station_key = '11359201'
        station = create_station(station_key)

        station_key = 'brno2_id_by_provider'
        station_2 = create_station(station_key)

        at_prop = Property.objects.create(name_id='air_temperature',
                                          name='air temperature',
                                          unit='°C',
                                          default_mean=am_process)

        Property.objects.create(name_id='ground_air_temperature',
                                name='ground_air_temperature',
                                unit='°C',
                                default_mean=am_process)

        import_time_slots_from_config()
        t = TimeSlots.objects.get(name_id='1_hour_slot')
        t30 = TimeSlots.objects.get(name_id='30_days_daily')

        time_from = datetime(2018, 6, 10, 23, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station_2,
                                   procedure=am_process,
                                   result=2,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(days=30),
                                       time_range_boundary),
                                   time_slots=t30)

        time_from = datetime(2018, 6, 11, 23, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station_2,
                                   procedure=am_process,
                                   result=1.5,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(days=30),
                                       time_range_boundary),
                                   time_slots=t30)

        time_from = datetime(2018, 6, 12, 23, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station_2,
                                   procedure=am_process,
                                   result=3.5,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(days=30),
                                       time_range_boundary),
                                   time_slots=t30)

        time_from = datetime(2018, 6, 13, 23, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station_2,
                                   procedure=am_process,
                                   result=1.5,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(days=30),
                                       time_range_boundary),
                                   time_slots=t30)

        time_from = datetime(2018, 6, 14, 23, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station_2,
                                   procedure=am_process,
                                   result=1.5,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(days=30),
                                       time_range_boundary),
                                   time_slots=t30)

        time_from = datetime(2018, 6, 15, 23, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station_2,
                                   procedure=am_process,
                                   result=1.5,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(days=30),
                                       time_range_boundary),
                                   time_slots=t30)

        time_from = datetime(2018, 6, 16, 23, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station_2,
                                   procedure=am_process,
                                   result=1.5,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(days=30),
                                       time_range_boundary),
                                   time_slots=t30)

        time_from = datetime(2018, 6, 17, 23, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station_2,
                                   procedure=am_process,
                                   result=1.5,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(days=30),
                                       time_range_boundary),
                                   time_slots=t30)

        time_from = datetime(2018, 6, 15, 11, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station_2,
                                   procedure=am_process,
                                   result=15,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(hours=1),
                                       time_range_boundary),
                                   time_slots=t)

        time_from = datetime(2018, 6, 15, 12, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station_2,
                                   procedure=am_process,
                                   result=16,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(hours=1),
                                       time_range_boundary),
                                   time_slots=t)

        time_from = datetime(2018, 6, 14, 13, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station,
                                   procedure=am_process,
                                   result=17,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(hours=1),
                                       time_range_boundary),
                                   time_slots=t)

        time_from = datetime(2018, 6, 15, 10, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station,
                                   procedure=am_process,
                                   result=18,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(hours=1),
                                       time_range_boundary),
                                   time_slots=t)

        time_from = datetime(2018, 6, 15, 11, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station,
                                   procedure=am_process,
                                   result=19,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(hours=1),
                                       time_range_boundary),
                                   time_slots=t)

        time_from = datetime(2018, 6, 15, 12, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station,
                                   procedure=am_process,
                                   result=20,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(hours=1),
                                       time_range_boundary),
                                   time_slots=t)

        time_from = datetime(2018, 6, 16, 00, 00, 00)
        Observation.objects.create(observed_property=at_prop,
                                   feature_of_interest=station,
                                   procedure=am_process,
                                   result=21,
                                   phenomenon_time_range=DateTimeTZRange(
                                       time_from,
                                       time_from + timedelta(hours=1),
                                       time_range_boundary),
                                   time_slots=t)
Beispiel #16
0
    def create(self, validated_data):
        user = self.context["user"]
        source = validated_data["source"]
        collection = self.target_node.get_collection()
        start, end = parse(validated_data["date"])
        dt_range = DateTimeTZRange(lower=start, upper=end)
        # actually create the media object
        media = Media.objects.create(
            creation_date=dt_range,
            license=validated_data.get("media_license"),
            title=validated_data.get("media_title", ""),
            description=validated_data.get("media_description", ""),
            set=self.target_node,
            collection=collection,
            created_by=user,
            original_media_type=validated_data["media_type"],
            original_media_identifier=validated_data.get(
                "media_identifier", ""),
            embargo_end_date=validated_data.get("embargo_end_date"),
            is_private=validated_data.get("is_private", False),
            coords_lat=validated_data.get("media_lat"),
            coords_lon=validated_data.get("media_lon"),
        )

        # save m2m
        for media2creator in validated_data["creators"]:
            MediaToCreator.objects.create(media=media, **media2creator)

        # set tags
        #
        tags_data = validated_data.get("media_tags", [])
        tags = []
        for td in tags_data:
            tag_serializer = SimpleTagSerializer(
                data=td, context={"collection": self.collection})
            assert tag_serializer.is_valid()
            tag = tag_serializer.save()
            tags.append(tag)

        media.tags.set(tags)

        archive_file = ArchiveFile.objects.create(source_id=source,
                                                  created_by=user,
                                                  media=media)

        media.files.set([archive_file])

        attachment_files = []
        for attachment in validated_data["attachments"]:
            attachment_file = AttachmentFile.objects.create(
                source_id=attachment["source"], created_by=user)
            for creator in attachment["creators"]:
                FileCreator.objects.create(file=attachment_file, **creator)

            attachment_files.append(attachment_file)

        media.attachments.set(attachment_files)

        # update the ingest queue (if available) by removing the source
        queue = self.context.get("queue")
        if queue:
            queue = IngestQueue.objects.select_for_update().get(pk=queue.pk)
            queue.link_to_media(media, source)
            queue.save()
        archive_ids = [a.pk for a in chain([archive_file], attachment_files)]

        logger.info(
            "Triggering archiving for %d file(s) (%s); media: %d; user: %d",
            len(archive_ids),
            ", ".join([str(pk) for pk in archive_ids]),
            media.pk,
            user.pk,
        )

        def ingestion_trigger():
            return archive_and_create_webassets(
                archive_ids,
                media.pk,
                # these args are for websockets via channels
                self.context["channel"],
            )

        # this instructs django to execute the function after any commit
        transaction.on_commit(ingestion_trigger)
        return media
    def create(self, context, data_dict):
        u'''datacitation extension will only be activated if the dataset has
        an unique field otherwise it will proceed according to CKAN standard
        '''

        records = data_dict.get('records', None)

        current_entry_properties.primary_key = find_primary_key(records)

        if current_entry_properties.primary_key is None:
            raise InvalidDataError(
                toolkit._("The data has no unique field!"))
        else:
            if super(VersionedDatastorePostgresqlBackend, self).resource_exists(data_dict['resource_id']):
                # CKAN Datapusher pushes the entries in chunks of 250 entries
                # Because of that after pushing 250 entries, the table will exist.
                # Therefore if the table exists it does not automatically
                # indicate that it is an update. There is another manual check
                # to distinguish between UPDATE and CREATE.
                # If would be better, if it is determined at UI level

                if not records:
                    return

                detect_dict = detect_deleted_rows(data_dict, self.connection, records,
                                                  current_entry_properties.primary_key)

                if detect_dict.get('mode', None) == 'create':
                    return super(VersionedDatastorePostgresqlBackend, self).create(context, data_dict)

                old_record = detect_dict['old_record']
                record_after_delete = detect_dict['new_record']
                old_ids = detect_dict['old_ids']

                # there is also other checks to do
                # TODO check if all fields name are the same if updating dataset
                # TODO check if the number of columns is equal

                data_dict['method'] = 'update'
                data_dict['primary_key'] = current_entry_properties.primary_key

                updated_rows = detect_updated_rows(record_after_delete, old_record,
                                                   current_entry_properties.primary_key)

                insert_data = detect_inserted_rows(record_after_delete, old_ids, current_entry_properties.primary_key)

                data_dict['records'] = updated_rows


                super(VersionedDatastorePostgresqlBackend, self).upsert(context, data_dict)

                data_dict['method'] = 'insert'
                data_dict['records'] = insert_data

                return super(VersionedDatastorePostgresqlBackend, self).upsert(context, data_dict)
            else:
                fields = data_dict.get('fields', None)
                records = data_dict.get('records', None)
                fields.append(
                    {
                        "id": "sys_period",
                        "type": "tstzrange"
                    }
                )
                if records is not None:
                    for r in records:
                        r['sys_period'] = DateTimeTZRange(datetime.now(), None)

                data_dict['primary_key'] = current_entry_properties.primary_key
                data_dict['fields'] = fields
                data_dict['records'] = records
                datastore_fields = [
                    {'id': '_id', 'type': 'integer'},
                    {'id': '_full_text', 'type': 'tsvector'},
                ]
                extra_field = [
                    {
                        "id": "op_type",
                        "type": "text"
                    }
                ]
                fields_of_history_table = datastore_fields + list(fields) + extra_field
                history_data_dict = {
                    "fields": fields_of_history_table,
                    "resource_id": data_dict['resource_id'] + '_history'
                }
                create_history_table(history_data_dict, self.engine)
                result = super(VersionedDatastorePostgresqlBackend, self).create(context, data_dict)
                create_versioning_trigger(data_dict, self.connection)
                create_op_type_trigger(identifier(data_dict['resource_id']),
                                       identifier(data_dict['resource_id'] + '_history'), self.connection)

                return result
Beispiel #18
0
def mv_search_datasets(index,
                       sel=MVSelectOpts.IDS,
                       times=None,
                       layer=None,
                       geom=None,
                       mask=False):
    """
    Perform a dataset query via the space_time_view

    :param layer: A ows_configuration.OWSNamedLayer object (single or multiproduct)
    :param index: A datacube index (required)

    :param sel: Selection mode - a MVSelectOpts enum. Defaults to IDS.
    :param times: A list of pairs of datetimes (with time zone)
    :param geom: A datacube.utils.geometry.Geometry object
    :param mask: Bool, if true use the flags product of layer

    :return: See MVSelectOpts doc
    """
    engine = get_sqlalc_engine(index)
    stv = st_view
    if layer is None:
        raise Exception("Must filter by product/layer")
    if mask:
        prod_ids = [p.id for p in layer.pq_products]
    else:
        prod_ids = [p.id for p in layer.products]

    s = select(sel.sel(stv)).where(stv.c.dataset_type_ref.in_(prod_ids))
    if times is not None:
        s = s.where(
            or_(*[
                stv.c.temporal_extent.op("&&")(DateTimeTZRange(*t))
                for t in times
            ]))
    orig_crs = None
    if geom is not None:
        orig_crs = geom.crs
        if str(geom.crs) != "EPSG:4326":
            geom = geom.to_crs("EPSG:4326")
        geom_js = json.dumps(geom.json)
        s = s.where(stv.c.spatial_extent.intersects(geom_js))
    # print(s) # Print SQL Statement
    conn = engine.connect()
    if sel == MVSelectOpts.ALL:
        return conn.execute(s)
    if sel == MVSelectOpts.IDS:
        return [r[0] for r in conn.execute(s)]
    if sel in (MVSelectOpts.COUNT, MVSelectOpts.EXTENT):
        for r in conn.execute(s):
            if sel == MVSelectOpts.COUNT:
                return r[0]
            if sel == MVSelectOpts.EXTENT:
                geojson = r[0]
                if geojson is None:
                    return None
                uniongeom = ODCGeom(json.loads(geojson), crs="EPSG:4326")
                if geom:
                    intersect = uniongeom.intersection(geom)
                    if orig_crs and orig_crs != "EPSG:4326":
                        intersect = intersect.to_crs(orig_crs)
                else:
                    intersect = uniongeom
                return intersect
    if sel == MVSelectOpts.DATASETS:
        return index.datasets.bulk_get([r[0] for r in conn.execute(s)])
    assert False
Beispiel #19
0
 def hour_to_pt_range(hour):
     time_from = time(int(hour))
     pt_from = datetime.combine(phenomenon_date.date(),
                                time_from).replace(tzinfo=UTC_P0100)
     pt_range = DateTimeTZRange(pt_from, pt_from + timedelta(hours=1))
     return pt_range
 def filter_queryset(self, request, queryset, view):
     return queryset.filter(active_range__overlap=DateTimeTZRange(
         timezone.now() - datetime.timedelta(hours=1),
         timezone.now() + datetime.timedelta(hours=1),
     ))
Beispiel #21
0
 def test_json_datetimerange_dump_correctly(self):
     t = datetime(2016, 1, 1, 1, 2, 3, 313337, tzinfo=timezone.utc)
     d = DateTimeTZRange(t, t)
     self.assertEqual(
         '["2016-01-01T01:02:03.313337+0000", "2016-01-01T01:02:03.313337+0000"]',
         binder_json.jsondumps(d))
Beispiel #22
0
def process_file(filename):
    """
    Process a single waveform file.

    This is a bit more complex as it needs to update existing database
    objects and cannot just always create new ones. Otherwise the
    identifiers quickly reach very high numbers.
    """
    # Resolve symlinks and make a canonical simple path.
    filename = os.path.realpath(os.path.normpath(os.path.abspath(filename)))

    # ------------------------------------------------------------------------
    # Step 1: Get the file if it exists.
    try:
        file = models.File.objects.get(path__name=os.path.dirname(filename),
                                       name=os.path.basename(filename))

        # This path is only reached if the file exists. Check size, mtime,
        # and ctime and if it all remains the same, return.
        stats = os.stat(filename)
        mtime = to_datetime(stats.st_mtime)
        ctime = to_datetime(stats.st_ctime)
        size = int(stats.st_size)

        # Nothing to do if nothing changed.
        if file.size == size and file.mtime == mtime and file.ctime == ctime:
            return

    # If it does not exist, create it in the next step.
    except models.File.DoesNotExist:
        file = None

    # ------------------------------------------------------------------------
    # Step 2: Read the file and perform a couple of sanity checks. Delete an
    #         eventually existing file.
    try:
        stream = read(filename, verify_chksum=False)
    except:
        # Delete if invalid file.
        if file is not None:
            file.delete()
        # Reraise the exception.
        raise

    if len(stream) == 0:
        msg = "'%s' is a valid waveform file but contains no actual data"
        raise JaneWaveformTaskException(msg % filename)
        # Delete if invalid file.
        if file is not None:
            file.delete()

    # Log channels for example are special as they have no sampling rate.
    if any(tr.stats.sampling_rate == 0 for tr in stream):
        # Make sure there is only one set of network, station,
        # location, and channel.
        ids = set(tr.id for tr in stream)
        if len(ids) != 1:
            # Delete if invalid file.
            if file is not None:
                file.delete()
            raise ValueError("File has a trace with sampling rate zero "
                             "and more then one different id.")

    # ------------------------------------------------------------------------
    # Step 3: Parse the file. Figure out which traces changed.
    #         Make sure it either gets created for a file or not.
    with transaction.atomic():
        # Create the file object if it does not exist.
        if file is None:
            path_obj = models.Path.objects.get_or_create(
                name=os.path.dirname(os.path.abspath(filename)))[0]
            models.File.objects. \
                filter(path=path_obj, name=os.path.basename(filename)). \
                delete()
            file = models.File.objects. \
                create(path=path_obj, name=os.path.basename(filename))

        # set format
        file.format = stream[0].stats._format

        # Collect information about all traces in a dictionary.
        traces_in_file = {}

        # Log channels for example are special as they have no sampling rate.
        if any(tr.stats.sampling_rate == 0 for tr in stream):
            starttime = min(tr.stats.starttime for tr in stream)
            endtime = max(tr.stats.endtime for tr in stream)
            if starttime == endtime:
                starttime += 0.001

            file.gaps = 0
            file.overlaps = 0
            file.save()

            try:
                quality = stream[0].stats.mseed.dataquality
            except AttributeError:
                quality = None

            traces_in_file[0] = {
                "starttime": starttime,
                "endtime": endtime,
                "network": stream[0].stats.network.upper(),
                "station": stream[0].stats.station.upper(),
                "location": stream[0].stats.location.upper(),
                "channel": stream[0].stats.channel.upper(),
                "sampling_rate": stream[0].stats.sampling_rate,
                "npts": sum(tr.stats.npts for tr in stream),
                "duration": endtime - starttime,
                "quality": quality,
                "preview_trace": None,
                "pos": 0
            }
        else:
            # get number of gaps and overlaps per file
            gap_list = stream.get_gaps()
            file.gaps = len([g for g in gap_list if g[6] >= 0])
            file.overlaps = len([g for g in gap_list if g[6] < 0])
            file.save()
            for pos, trace in enumerate(stream):
                try:
                    quality = trace.stats.mseed.dataquality
                except AttributeError:
                    quality = None

                # Preview is optional. For some traces, e.g. LOG channels it
                # does not work.
                try:
                    preview_trace = create_preview(trace, 60)
                except:
                    preview_trace = None
                else:
                    preview_trace = list(map(float, preview_trace.data))

                traces_in_file[pos] = {
                    "starttime": trace.stats.starttime,
                    "endtime": trace.stats.endtime,
                    "network": trace.stats.network.upper(),
                    "station": trace.stats.station.upper(),
                    "location": trace.stats.location.upper(),
                    "channel": trace.stats.channel.upper(),
                    "sampling_rate": trace.stats.sampling_rate,
                    "npts": trace.stats.npts,
                    "duration": trace.stats.endtime - trace.stats.starttime,
                    "quality": quality,
                    "preview_trace": preview_trace,
                    "pos": pos
                }

        # Get all existing traces.
        for tr_db in models.ContinuousTrace.objects.filter(file=file):
            # Attempt to get the existing trace object.
            if tr_db.pos in traces_in_file:
                tr = traces_in_file[tr_db.pos]
                # Delete in the dictionary.
                del traces_in_file[tr_db.pos]

                tr_db.timerange = DateTimeTZRange(
                    lower=tr["starttime"].datetime,
                    upper=tr["endtime"].datetime)
                tr_db.network = tr["network"]
                tr_db.station = tr["station"]
                tr_db.location = tr["location"]
                tr_db.channel = tr["channel"]
                tr_db.sampling_rate = tr["sampling_rate"]
                tr_db.npts = tr["npts"]
                tr_db.duration = tr["duration"]
                tr_db.quality = tr["quality"]
                tr_db.preview_trace = tr["preview_trace"]
                tr_db.pos = tr["pos"]
                tr_db.save()

            # If it does not exist in the waveform file, delete it here as
            # it is (for whatever reason) no longer in the file..
            else:
                tr_db.delete()

        # Add remaining items.
        for tr in traces_in_file.values():
            tr_db = models.ContinuousTrace(file=file,
                                           timerange=DateTimeTZRange(
                                               lower=tr["starttime"].datetime,
                                               upper=tr["endtime"].datetime))
            tr_db.network = tr["network"]
            tr_db.station = tr["station"]
            tr_db.location = tr["location"]
            tr_db.channel = tr["channel"]
            tr_db.sampling_rate = tr["sampling_rate"]
            tr_db.npts = tr["npts"]
            tr_db.duration = tr["duration"]
            tr_db.quality = tr["quality"]
            tr_db.preview_trace = tr["preview_trace"]
            tr_db.pos = tr["pos"]
            tr_db.save()
Beispiel #23
0
 def test_datetime_prepare_value(self):
     field = pg_forms.DateTimeRangeField()
     value = field.prepare_value(
         DateTimeTZRange(datetime.datetime(2015, 5, 22, 16, 6, 33, tzinfo=timezone.utc), None)
     )
     self.assertEqual(value, [datetime.datetime(2015, 5, 22, 18, 6, 33), None])
Beispiel #24
0
def create_event(user, start_time=None, end_time=None) -> Event:
    return Event.objects.create(user=user,
                                time_range=DateTimeTZRange(lower=start_time
                                                           or now(),
                                                           upper=end_time))
Beispiel #25
0
    def ScheduleEvent(self, request, context):
        if not request.content:
            context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                          errors.MISSING_EVENT_CONTENT)
        if request.HasField("online_information"):
            online = True
            geom = None
            address = None
            link = request.online_information.link
        elif request.HasField("offline_information"):
            online = False
            if not (request.offline_information.address
                    and request.offline_information.lat
                    and request.offline_information.lng):
                context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                              errors.MISSING_EVENT_ADDRESS_OR_LOCATION)
            if request.offline_information.lat == 0 and request.offline_information.lng == 0:
                context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                              errors.INVALID_COORDINATE)
            geom = create_coordinate(request.offline_information.lat,
                                     request.offline_information.lng)
            address = request.offline_information.address
            link = None
        else:
            context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                          errors.MISSING_EVENT_ADDRESS_LOCATION_OR_LINK)

        start_time = to_aware_datetime(request.start_time)
        end_time = to_aware_datetime(request.end_time)

        _check_occurrence_time_validity(start_time, end_time, context)

        with session_scope() as session:
            res = session.execute(
                select(Event, EventOccurrence).where(
                    EventOccurrence.id == request.event_id).where(
                        EventOccurrence.event_id == Event.id)).one_or_none()

            if not res:
                context.abort(grpc.StatusCode.NOT_FOUND,
                              errors.EVENT_NOT_FOUND)

            event, occurrence = res

            if not _can_edit_event(session, event, context.user_id):
                context.abort(grpc.StatusCode.PERMISSION_DENIED,
                              errors.EVENT_EDIT_PERMISSION_DENIED)

            if (request.photo_key and not session.execute(
                    select(Upload).where(Upload.key == request.photo_key)).
                    scalar_one_or_none()):
                context.abort(grpc.StatusCode.INVALID_ARGUMENT,
                              errors.PHOTO_NOT_FOUND)

            during = DateTimeTZRange(start_time, end_time)

            # && is the overlap operator for ranges
            if (session.execute(
                    select(EventOccurrence.id).where(
                        EventOccurrence.event_id == event.id).where(
                            EventOccurrence.during.op("&&")(
                                during))).scalars().first() is not None):
                context.abort(grpc.StatusCode.FAILED_PRECONDITION,
                              errors.EVENT_CANT_OVERLAP)

            occurrence = EventOccurrence(
                event=event,
                content=request.content,
                geom=geom,
                address=address,
                link=link,
                photo_key=request.photo_key
                if request.photo_key != "" else None,
                # timezone=timezone,
                during=during,
                creator_user_id=context.user_id,
            )
            session.add(occurrence)

            attendee = EventOccurrenceAttendee(
                user_id=context.user_id,
                occurrence=occurrence,
                attendee_status=AttendeeStatus.going,
            )
            session.add(attendee)

            session.flush()

            # TODO: notify

            return event_to_pb(session, occurrence, context)
Beispiel #26
0
def update_event(pk, start_time, end_time) -> Event:
    return Event.objects.get(id=pk).update(
        time_range=DateTimeTZRange(lower=start_time, upper=end_time))
Beispiel #27
0
 def test_valid_timestamps(self):
     field = pg_forms.DateTimeRangeField()
     value = field.clean(['01/01/2014 00:00:00', '02/02/2014 12:12:12'])
     lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
     upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
     self.assertEqual(value, DateTimeTZRange(lower, upper))
Beispiel #28
0
        'id_by_provider': '11359201',
        'geom_type': 'Point',
        'name': 'Brno',
        'coordinates': [1847520.94, 6309563.27]
    },
    'brno2_id_by_provider': {
        'id_by_provider': 'brno2_id_by_provider',
        'geom_type': 'Point',
        'name': 'Brno2',
        'coordinates': [1847520.94, 6309563.27]
    }
}

time_range_boundary = '[)'
time_from = datetime(2018, 6, 15, 00, 00, 00)
date_time_range = DateTimeTZRange(time_from, time_from + timedelta(hours=24),
                                  time_range_boundary)


def create_station(key):
    station_key = key
    props = STATION_PROPS[station_key]
    coordinates = props['coordinates']
    return SamplingFeature.objects.create(
        id_by_provider=props['id_by_provider'],
        name=props['name'],
        geometry=GEOSGeometry(props['geom_type'] + ' (' + str(coordinates[0]) +
                              ' ' + str(coordinates[1]) + ')',
                              srid=3857))


class UtilTestCase(APITestCase):
Beispiel #29
0
def get_timeseries(
        phenomenon_time_range,
        num_time_slots,
        get_observations,
        detector_method='bitmap_detector',  # LinkedIn bitmap
        detector_params={
            "precision": 8,
            "lag_window_size": 20,
            "future_window_size": 20,
            "chunk_size": 2
        },
        anomaly_breaks=DEFAULT_ANOMALY_BREAKS,
        value_breaks=DEFAULT_VALUE_BREAKS,
        extend_range=True,
        baseline_time_range=None,
        shift=True,
        use_baseline=True):
    #observations = get_observations(3, 5)
    #observations = get_observations(0, 0)

    # if baseline_time_range is not None:
    # use_baseline = True
    #     baseline_time_series = observation_provider_model.objects.filter(
    #         phenomenon_time_range__contained_by=baseline_time_range,
    #         phenomenon_time_range__duration=frequency,
    #         phenomenon_time_range__matches=frequency,
    #         observed_property=observed_property,
    #         procedure=process,
    #         feature_of_interest=feature_of_interest
    #     )
    #     baseline_reduced = {obs.phenomenon_time_range.lower.timestamp(): obs.result for obs in baseline_time_series}

    lower_ext = 0
    upper_ext = 0

    if extend_range:
        lower_ext = detector_params["lag_window_size"]
        upper_ext = detector_params["future_window_size"]

        if use_baseline and shift:
            upper_ext = 0

        if use_baseline and not shift:
            lower_ext = int(upper_ext / 2)
            upper_ext -= lower_ext + 1

    observations = get_observations(lower_ext, upper_ext)

    if not isinstance(observations, list):
        raise Exception('property_values should be array')

    if len(observations) == 0:
        return {
            'phenomenon_time_range': DateTimeTZRange(),
            'property_values': [],
            'property_value_percentiles': {},
            'property_anomaly_rates': [],
            'property_anomaly_percentiles': {},
        }

    property_values = observations_to_property_values(observations)

    VALID_VALUES_LENGTH = len(property_values) - property_values.count(None)

    if VALID_VALUES_LENGTH == 1:
        return {
            'phenomenon_time_range': phenomenon_time_range,
            'property_values': property_values,
            'property_value_percentiles': {
                50: property_values[0]
            },
            'property_anomaly_rates': [0],
            'property_anomaly_percentiles': {
                0: 0
            },
        }

    MINIMAL_POINTS_IN_WINDOWS = DEFAULT_BITMAP_MOD_MINIMAL_POINTS_IN_WINDOWS

    if use_baseline:
        MINIMAL_POINTS_IN_WINDOWS /= 2

    # if VALID_VALUES_LENGTH <= MINIMAL_POINTS_IN_WINDOWS:
    #     # warn the user?

    WINDOW_LENGTH = detector_params[
        "future_window_size"] if use_baseline else detector_params[
            "future_window_size"] + detector_params["lag_window_size"]

    if VALID_VALUES_LENGTH > MINIMAL_POINTS_IN_WINDOWS and VALID_VALUES_LENGTH <= WINDOW_LENGTH:
        detector_params["future_window_size"] = int(
            max(
                DEFAULT_BITMAP_MOD_MINIMAL_POINTS_IN_WINDOWS / 2,
                VALID_VALUES_LENGTH *
                DEFAULT_BITMAP_MOD_LEADING_WINDOW_SIZE_PCT))
        detector_params["lag_window_size"] = int(
            max(
                DEFAULT_BITMAP_MOD_MINIMAL_POINTS_IN_WINDOWS / 2,
                VALID_VALUES_LENGTH *
                DEFAULT_BITMAP_MOD_LAGGING_WINDOW_SIZE_PCT))

    property_value_percentiles = percentiles(
        property_values[lower_ext:lower_ext + num_time_slots], value_breaks)

    if use_baseline and baseline_time_range is None:
        baseline_time_series = observations
        baseline_reduced = {
            obs.phenomenon_time_range.lower.timestamp(): obs.result
            for obs in baseline_time_series
        }

    obs_reduced = {
        obs.phenomenon_time_range.lower.timestamp(): obs.result
        for obs in observations
    }

    if (VALID_VALUES_LENGTH <= 1):
        property_anomaly_rates = [
            0 if value is not None else value
            for value in property_values[lower_ext:lower_ext + num_time_slots]
        ]

        return {
            'phenomenon_time_range':
            phenomenon_time_range,
            'property_values':
            property_values[lower_ext:lower_ext + num_time_slots],
            'property_value_percentiles':
            property_value_percentiles,
            'property_anomaly_rates':
            property_anomaly_rates,
            'property_anomaly_percentiles': {
                0: 0
            },
        }

    try:
        baseline_reduced
    except NameError:
        detector = AnomalyDetector(obs_reduced,
                                   algorithm_name=detector_method,
                                   algorithm_params=detector_params,
                                   score_only=True)
    else:
        detector = AnomalyDetector(obs_reduced,
                                   baseline_reduced,
                                   algorithm_name=detector_method,
                                   algorithm_params=detector_params,
                                   score_only=True)

    property_anomaly_rates = detector.get_all_scores().values

    property_anomaly_percentiles = percentiles(
        property_anomaly_rates[lower_ext:lower_ext + num_time_slots],
        anomaly_breaks)

    for i in range(len(property_values)):
        if property_values[i] is None:
            property_anomaly_rates.insert(i, None)

    return {
        'phenomenon_time_range':
        phenomenon_time_range,
        'property_values':
        property_values[lower_ext:lower_ext + num_time_slots],
        'property_value_percentiles':
        property_value_percentiles,
        'property_anomaly_rates':
        property_anomaly_rates[lower_ext:lower_ext + num_time_slots],
        'property_anomaly_percentiles':
        property_anomaly_percentiles,
    }
Beispiel #30
0
def import_events(provider_logs, day_from, day_to):
    observed_property = "occuring_events"
    procedure = "observation"
    new_observations = []
    # get whole extent for feature_of_interest
    admin_units = AdminUnit.objects.all().order_by('id_by_provider')
    units_list = []
    for admin_unit in admin_units:
        units_list.append(admin_unit)

    event_extents = EventExtent.objects.all()

    # extent of d1, brno, brno venkov admin units
    whole_extent = get_special_extent()
    whole_extent_units = list(whole_extent.admin_units.all())
    # get IDs to prevent duplicates
    ids = []
    for event in EventObservation.objects.iterator():
        ids.append(event.id_by_provider)

    i = 0
    for provider_log in provider_logs.filter(
            received_time__range=(day_from, day_to)).iterator():

        data = provider_log.body
        tree = ET.fromstring(data)

        for msg in tree.iter('MSG'):
            codes = []
            category = ""
            dt_range = None
            id_by_provider = ""

            id_by_provider = msg.attrib['id']

            if (id_by_provider in ids):
                print('Event already in database: {}'.format(id_by_provider))
                continue

            ids.append(id_by_provider)

            roads = []
            streets = []
            for tag in msg.iter('DEST'):
                road = tag.find('ROAD')
                is_d1 = False
                if road is not None and 'RoadNumber' in road.attrib:
                    is_d1 = True if road.attrib['RoadNumber'] == 'D1' else False
                town_ship = tag.attrib['TownShip']
                if ((town_ship == 'Brno-venkov' or town_ship == 'Brno-město')
                        or (is_d1)):
                    if ('TownDistrictCode' in tag.attrib):
                        code = tag.attrib['TownDistrictCode']
                    else:
                        code = tag.attrib['TownCode']
                    codes.append(code)

                    for street_tag in tag.iter('STRE'):
                        if 'StreetCode' in street_tag.attrib and 'StreetName' in street_tag.attrib:
                            street = Street.objects.get_or_create(
                                id_by_provider=street_tag.attrib['StreetCode'],
                                name=street_tag.attrib['StreetName'],
                                geometry=None)[0]
                            streets.append(street)

                    for road_tag in tag.iter('ROAD'):
                        if 'RoadNumber' in road_tag.attrib and 'RoadClass' in road_tag.attrib:
                            road = Road.objects.get_or_create(
                                road_number=road_tag.attrib['RoadNumber'],
                                road_class=road_tag.attrib['RoadClass'],
                                geometry=None)[0]
                            roads.append(road)

            if (len(codes) == 0):
                continue

            for cat in msg.iter('EVI'):
                category = cat.attrib["eventcode"]
                break
            for tag in msg.iter('TSTA'):
                start_time = parse(tag.text)
            for tag in msg.iter('TSTO'):
                end_time = parse(tag.text)

            start_time = start_time.astimezone(UTC_P0100)
            end_time = end_time.astimezone(UTC_P0100)
            if (end_time < start_time):
                start_time, end_time = end_time, start_time

            dt_range = DateTimeTZRange(start_time, end_time)

            for tag in msg.iter('COORD'):
                coord_x = tag.attrib['x']
                coord_y = tag.attrib['y']

            geom = Point(float(coord_y), float(coord_x))
            if geom is not None:
                geom = GEOSGeometry(geom, srid=4326)
                geom = geom.transform(3857, clone=True)

            if (len(codes) > 0):
                admin_units = AdminUnit.objects.filter(
                    id_by_provider__in=codes)
                units_list = []
                for admin_unit in admin_units:
                    units_list.append(admin_unit)
                    if (not admin_unit in whole_extent_units):
                        whole_extent.admin_units.add(admin_unit)
                        whole_extent_units.append(admin_unit)

                event_extents = EventExtent.objects.filter(
                    admin_units__in=admin_units).order_by('admin_units')
                event_extent = None

                for extent in event_extents:
                    extent_admin = []
                    for adm in extent.admin_units.all():
                        extent_admin.append(adm)
                    if (extent_admin == units_list):
                        event_extent = extent
                        break

                event_category = EventCategory.objects.filter(
                    id_by_provider=category).get()
                observation = EventObservation(
                    phenomenon_time_range=dt_range,
                    observed_property=Property.objects.filter(
                        name_id=observed_property).get(),
                    feature_of_interest=whole_extent,
                    procedure=Process.objects.filter(name_id=procedure).get(),
                    category=event_category,
                    id_by_provider=id_by_provider,
                    result=event_extent,
                    point_geometry=geom,
                    provider_log=provider_log,
                )
                observation.save()
                for road in roads:
                    observation.road.add(road)
                for street in streets:
                    observation.street.add(street)
                observation.save()
                new_observations.append(observation)
                i += 1
                print('Number of new events: {}'.format(i))

    # print('Number of new events: {}'.format(i))
    # print('Extents in database: {}'.format(extents))
    return new_observations