def test_node_create(self):
        node = Node.objects.create()
        graph_node = self._backend.get(node)

        self.assertEqual(str(node.id), graph_node.get("id"))
        self.assertEqual(node.created, parse_datetime(graph_node.get("created")))
        self.assertEqual(node.modified, parse_datetime(graph_node.get("modified")))
Example #2
0
    def test_synchronize_remove(self):
        self.test_synchronize_add()
        # add a dataset with a non-existing location and assert that it is
        # removed
        dataset = models.RectifiedDataset(
            identifier="rectified-1",
            footprint=GEOSGeometry("MULTIPOLYGON (((-111.6210939999999994 26.8588260000000005, -113.0273439999999994 -4.0786740000000004, -80.6835939999999994 -9.7036739999999995, -68.0273439999999994 15.6088260000000005, -111.6210939999999994 26.8588260000000005)))"),
            begin_time=parse_datetime("2013-06-11T14:55:23Z"),
            end_time=parse_datetime("2013-06-11T14:55:23Z"),
            min_x=10, min_y=10, max_x=20, max_y=20, srid=4326,
            size_x=100, size_y=100,
            range_type=models.RangeType.objects.get(name="RGB")
        )
        dataset.full_clean()
        dataset.save()

        backends.DataItem.objects.create(
            dataset=dataset, semantic="bands", location="doesnotexist.tif"
        )

        backends.DataItem.objects.create(
            dataset=dataset, semantic="metadata", location="doesnotexist.xml"
        )
        self.collection.insert(dataset)

        synchronize(self.collection)
        with self.assertRaises(models.Coverage.DoesNotExist):
            models.Coverage.objects.get(
                identifier="rectified-1"
            )
Example #3
0
 def test_get_mandatos_municipais_soh_um(self):
     ini_date = parse_datetime('2009-10-10 0:0:0')
     fim_date = parse_datetime('2012-10-10 0:0:0')
     mandato_lists = utils.MandatoLists()
     mandatos = mandato_lists.get_mandatos(MUNICIPAL, ini_date, fim_date)
     self.assertEquals(len(mandatos), 1)
     self.assertEquals(mandatos[0].year, 2009)
Example #4
0
 def setUp(self):
     """Setup the test"""
     # Create a some helper datetimes that relate to our patched now()
     self.one_hour_ago = parse_datetime('2014-09-22T21:21:36.867936+00:00')
     self.two_hours_ago = parse_datetime('2014-09-22T20:21:36.867936+00:00')
     self.one_day_ago = parse_datetime('2014-09-21T22:21:36.867936+00:00')
     self.two_days_ago = parse_datetime('2014-09-20T22:21:36.867936+00:00')
Example #5
0
	def test_decline_match_reschedule(self):
		self.create_bo_case()
		msrec = self.t.ask_for_match_reschedule(self.m1, dateparse.parse_datetime("2018-04-19T18:00:00+01:00"))
		self.t2.decline_match_reschedule(msrec)
		self.assertEqual(MatchReschedule.objects.all().count(), 0)
		self.assertTrue(Match.objects.get(bo=self.bo, date=dateparse.parse_datetime("2018-04-19T15:00:00+01:00")))
		self.assertFalse(Match.objects.filter(bo=self.bo, date=dateparse.parse_datetime("2018-04-19T18:00:00+01:00")).exists())
Example #6
0
 def test_parse_datetime(self):
     # Valid inputs
     self.assertEqual(
         parse_datetime('2012-04-23T09:15:00'),
         datetime(2012, 4, 23, 9, 15)
     )
     self.assertEqual(
         parse_datetime('2012-4-9 4:8:16'),
         datetime(2012, 4, 9, 4, 8, 16)
     )
     self.assertEqual(
         parse_datetime('2012-04-23T09:15:00Z'),
         datetime(2012, 4, 23, 9, 15, 0, 0, get_fixed_timezone(0))
     )
     self.assertEqual(
         parse_datetime('2012-4-9 4:8:16-0320'),
         datetime(2012, 4, 9, 4, 8, 16, 0, get_fixed_timezone(-200))
     )
     self.assertEqual(
         parse_datetime('2012-04-23T10:20:30.400+02:30'),
         datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(150))
     )
     self.assertEqual(
         parse_datetime('2012-04-23T10:20:30.400+02'),
         datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(120))
     )
     self.assertEqual(
         parse_datetime('2012-04-23T10:20:30.400-02'),
         datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(-120))
     )
     # Invalid inputs
     self.assertIsNone(parse_datetime('20120423091500'))
     with self.assertRaises(ValueError):
         parse_datetime('2012-04-56T09:15:90')
Example #7
0
def assert_datetime_fuzzy_equal(dt1, dt2, fuzziness=1):
    if isinstance(dt1, str):
        dt1 = parse_datetime(dt1)
    if isinstance(dt2, str):
        dt2 = parse_datetime(dt2)

    assert abs(dt1 - dt2).total_seconds() < fuzziness
    def test_report_hourly(self):
        """
        Case: The sum of all increment values get requested
        Expected: Two items get returned for each hour one with the sum of
                  value_increment
        """
        range_hour_21 = [43, 78, 28]
        range_hour_22 = [15, 23, 45]

        for value in range_hour_21:
            self.setup_reading(value, year=2016, month=5, day=19, hour=21)

        for value in range_hour_22:
            self.setup_reading(value, year=2016, month=5, day=19, hour=22)

        reports = Reading.reports.hour()

        self.assertEqual(len(reports), 2)

        # 2016-05-19 21:00:00
        item = reports[0]
        self.assertIsInstance(item, ReadingReport)
        self.assertEqual(item.datetime,
                         dateparse.parse_datetime('2016-05-19 21:00:00'))
        self.assertEqual(item.power_meter, self.power_meter.pk)
        self.assertEqual(item.value_increment__sum, sum(range_hour_21))

        # 2016-05-19 22:00:00
        item = reports[1]
        self.assertIsInstance(item, ReadingReport)
        self.assertEqual(item.datetime,
                         dateparse.parse_datetime('2016-05-19 22:00:00'))
        self.assertEqual(item.power_meter, self.power_meter.pk)
        self.assertEqual(item.value_increment__sum, sum(range_hour_22))
    def test_report_yearly(self):
        """
        Case: The sum of all increment values get requested
        Expected: Two items get returned for each hour one with the sum of
                  value_increment
        """
        range_year_2015 = [43, 78, 28]
        range_year_2016 = [15, 23, 45]

        for value in range_year_2015:
            self.setup_reading(value, year=2015)

        for value in range_year_2016:
            self.setup_reading(value, year=2016)

        reports = Reading.reports.year()

        self.assertEqual(len(reports), 2)

        # 2015-01-01 00:00:00
        item = reports[0]
        self.assertIsInstance(item, ReadingReport)
        self.assertEqual(item.datetime,
                         dateparse.parse_datetime('2015-01-01 00:00:00'))
        self.assertEqual(item.power_meter, self.power_meter.pk)
        self.assertEqual(item.value_increment__sum, sum(range_year_2015))

        # 2016-01-01 00:00:00
        item = reports[1]
        self.assertIsInstance(item, ReadingReport)
        self.assertEqual(item.datetime,
                         dateparse.parse_datetime('2016-01-01 00:00:00'))
        self.assertEqual(item.power_meter, self.power_meter.pk)
        self.assertEqual(item.value_increment__sum, sum(range_year_2016))
    def _get_overrides(self, identifier=None, size=None, extent=None, 
                       begin_time=None, end_time=None, footprint=None, projection=None,
                       **kwargs):

        overrides = {}

        if identifier:
            overrides["identifier"] = identifier

        if extent:
            overrides["extent"] = map(float, extent.split(","))

        if size:
            overrides["size"] = map(int, size.split(","))            

        if begin_time:
            overrides["begin_time"] = parse_datetime(begin_time)

        if end_time:
            overrides["end_time"] = parse_datetime(end_time)

        if footprint:
            overrides["footprint"] = GEOSGeometry(footprint)

        if projection:
            try:
                overrides["projection"] = int(projection)
            except ValueError:
                overrides["projection"] = projection

        return overrides
Example #11
0
def update_adherence_confidence(request, domain):
    try:
        request_json = json.loads(request.body)
    except ValueError:
        return json_response({"error": "Malformed JSON"}, status_code=400)
    beneficiary_id = request_json.get("beneficiary_id")
    start_date = request_json.get("start_date")
    end_date = request_json.get("end_date")
    confidence_level = request_json.get("confidence_level")

    try:
        validate_beneficiary_id(beneficiary_id)
        validate_dates(start_date, end_date)
        validate_confidence_level(confidence_level)
        update_adherence_confidence_level(
            domain=domain,
            person_id=beneficiary_id,
            start_date=parse_datetime(start_date),
            end_date=parse_datetime(end_date),
            new_confidence=confidence_level,
        )
    except AdherenceException as e:
        return json_response({"error": e.message}, status_code=400)

    return json_response({"success": "Patient adherences updated."})
Example #12
0
def chargeRefunded(body):
    card = None
    customer = None
    subscription = None
    transaction = safeCopy(body['transaction'], [
        'authorization', 'error_message', 'order_id', 'description'])
    if 'customer_id' in transaction:
        customer = models.get_customer_model().objects.get(
            openpay_id=transaction['customer_id'])
    if 'subscription_id' in transaction:
        subscription = models.Subscription.objects.get(
            openpay_id=transaction['subscription_id'])
    if 'card' in transaction:
        if 'id' in transaction['card']:
            card = models.Card.objects.get(
                openpay_id=transaction['card']['id'])
        if not customer and 'customer_id' in transaction['card']:
            customer = models.get_customer_model().objects.get(
                openpay_id=transaction['card']['customer_id'])
    charge, created = models.Charge.objects.update_or_create(
        openpay_id=transaction['id'],
        defaults={
            'authorization': transaction['authorization'],
            'method': transaction['method'],
            'operation_type': transaction['operation_type'],
            'transaction_type': transaction['transaction_type'],
            'status': transaction['status'],
            'conciliated': transaction['conciliated'],
            'creation_date': parse_datetime(transaction['creation_date']),
            'operation_date': parse_datetime(transaction['operation_date']),
            'description': transaction['description'],
            'error_message': transaction['error_message'],
            'order_id': transaction['order_id'],
            'amount': transaction['amount'],
            'currency': transaction['currency'],
            'customer': customer,
            'card': card,
        })

    transaction = safeCopy(transaction['refund'], [
        'authorization', 'error_message', 'order_id', 'description'])
    refund, created = models.Refund.objects.update_or_create(
        openpay_id=transaction['id'],
        defaults={
            'authorization': transaction['authorization'],
            'method': transaction['method'],
            'operation_type': transaction['operation_type'],
            'transaction_type': transaction['transaction_type'],
            'status': transaction['status'],
            'conciliated': transaction['conciliated'],
            'creation_date': parse_datetime(transaction['creation_date']),
            'operation_date': parse_datetime(transaction['operation_date']),
            'description': transaction['description'],
            'error_message': transaction['error_message'],
            'order_id': transaction['order_id'],
            'amount': transaction['amount'],
            'currency': transaction['currency'],
            'customer': customer,
            'charge': charge,
        })
Example #13
0
    def get(self, request, *args, **kwargs):
        page = int(request.GET.get('page', 1))

        # Fetch teams
        try:
            teams = self.api.list_teams(
                access_token=request.session['access_token']['access_token'],
                user_id=request.session['access_token']['user_id'],
                page=page,
            )

        # Fetching teams failed
        except self.api.APIError as e:
            logger.error(str(e))
            return HttpResponseServerError()

        # Parse datetime strings
        for team in teams['_embedded']['teams']:
            team['created_at'] = parse_datetime(team['created_at'])
            team['updated_at'] = parse_datetime(team['updated_at'])

        return self._render(
            request=request,
            title='Teams - Pinglist',
            active_link='teams',
            teams=teams,
        )
Example #14
0
def updateTimestamp():
   """
   Must be called after every login or logout attempt and/or periodically.
   Ensures that the system clock continues to tick forward in time, using a saved timestamp.

   Returns:
      - None

   Exceptions:
      - IncorrectSystemClock(), if the current system time is less than the saved timestamp.
   """

   now = datetime.datetime.now()

   try:
      with open(timestampFile, "r+") as f:
         savedTimestamp = parse_datetime(f.read().strip())

         if savedTimestamp:
            if now >= savedTimestamp:
               f.seek(0, 0)
               f.truncate()
               f.write(str(now)[:-7])
            else:
               raise IncorrectSystemClock("Please set your system clock to at least %s." % str(savedTimestamp)[:-7])
         else:
            raise IOError
   except IOError:
      with open(timestampFile, "w") as f:
         licence = getLicense()

         if now >= parse_datetime(licence["CREATED"]):
            f.write(str(now)[:-7])
         else:
            f.write(str(parse_datetime(licence["CREATED"]))[:-7])
Example #15
0
 def events(self, future=False):
     try:
         events = self._event_methods[self.data_source](self)
     except (TimeoutError, ConnectionError, LookupError):
         # if the event source is unreachable or down or data is invalid
         events = []
     if not future:
         return json.dumps(events)
     # the methods are assumed to return events latest first, reverse the order
     tz = pytz.timezone(settings.TIME_ZONE)
     for event in events:
         # for future filtering, make sure all events have end times not null
         try:
             end = event['end_time']
             if not end:
                 event['end_time'] = event['start_time']
         except LookupError:
             event['end_time'] = event['start_time']
         # check the datetimes first
         start = dateparse.parse_datetime(event['start_time'])
         end = dateparse.parse_datetime(event['end_time'])
         # linkedevents may not have exact times, parse_datetime may fail
         # we have to append time, assume server time zone and convert to utc for filtering
         if not start:
             start = tz.localize(datetime.combine(dateparse.parse_date(event['start_time']), time()))
             event['start_time'] = start.astimezone(pytz.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
         if not end:
             end = tz.localize(datetime.combine(dateparse.parse_date(event['end_time']), time(23,59,59)))
             event['end_time'] = end.astimezone(pytz.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
     # we want the next event first
     return json.dumps(list(reversed([event for event in events
                                      if dateparse.parse_datetime(event['end_time']) > datetime.now(tz)])))
Example #16
0
    def test_start_entry_with_current_running(self):
        DESCRIPTION = 'EXAMPLE'
        startTime = timezone.now()
        currentEntry = TimeEntry(user=self.TestUser, description=DESCRIPTION, start=startTime)
        currentEntry.save()


        NEWDESCRIPTION = "NEW DESCRIPTION"
        url = reverse("api:time-entry-start")
        data = {'description': NEWDESCRIPTION }
        response = self.client.post(url, data)


        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(response.data['description'], NEWDESCRIPTION)
        self.assertEqual(response.data['stop'], None)
        self.assertGreater(dateparse.parse_datetime(response.data['start']), startTime)
        self.assertGreater(response.data['duration'], 0)

        url = reverse("api:time-entry-get", args=(currentEntry.id,))
        response = self.client.get(url, data)
        response_start_time = dateparse.parse_datetime(response.data['start'])
        response_stop_time = dateparse.parse_datetime(response.data['stop'])
        response_duration = response.data['duration']
        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(response.data['description'], DESCRIPTION)
        self.assertEqual(response.data['user'], self.TestUser.id)
        self.assertEqual(response_start_time, startTime)

        self.assertGreater(response_stop_time, response_start_time)
        self.assertEqual(response_duration,(response_stop_time- response_start_time).total_seconds())
        response = self.client.get(url, data)

        self.assertEqual(response_duration, response.data['duration'], "We checked the duration twice but it changed from %s to %s" % (response_duration, response.data['duration']))
Example #17
0
    def create(self, validated_data):
        """
        Creates a new location and returns the instance.

        Parameter
        ---------
        validated_data : dict
            Data after validation.

        Returns
        -------
        geokey_airquality.models.AirQualityLocation
            The instance created.
        """

        data = self.context.get('data')
        created = data.get('created')
        called = data.get('called')
        now = timezone.now()

        if created is None or called is None:
            created = now
        else:
            timedelta = parse_datetime(called) - parse_datetime(created)
            created = now - timedelta

        self.instance = AirQualityLocation.objects.create(
            name=validated_data.get('name'),
            geometry=validated_data.get('geometry'),
            creator=self.context.get('user'),
            created=created,
            properties=validated_data.get('properties')
        )

        return self.instance
Example #18
0
def test_reservation_user_filter(api_client, list_url, reservation, resource_in_unit, user, user2):
    """
    Tests that reservation user and is_own filtering work correctly.
    """

    reservation2 = Reservation.objects.create(
        resource=resource_in_unit,
        begin=dateparse.parse_datetime('2115-04-07T11:00:00+02:00'),
        end=dateparse.parse_datetime('2115-04-07T12:00:00+02:00'),
        user=user2,
    )

    # even unauthenticated user should see all the reservations
    response = api_client.get(list_url)
    assert response.data['count'] == 2

    # filtering by user
    response = api_client.get(list_url + '?user=%s' % user.uuid)
    assert response.data['count'] == 1
    assert response.data['results'][0]['id'] == reservation.id

    # filtering by is_own
    api_client.force_authenticate(user=user)
    response = api_client.get(list_url + '?is_own=true')
    assert response.data['count'] == 1
    assert response.data['results'][0]['id'] == reservation.id
    response = api_client.get(list_url + '?is_own=false')
    assert response.data['count'] == 1
    assert response.data['results'][0]['id'] == reservation2.id
Example #19
0
    def get(self, request, subscriber_type, **kwargs):
        queryset_view = self.queryset_view_map[subscriber_type]

        class APIViewProxy(queryset_view):
            def __init__(self, provider):
                self.provider = provider
        view_proxy = APIViewProxy(self.get_organization())
        view_proxy.get_range_queryset = MethodType(
            queryset_view.get_range_queryset, view_proxy)

        start_date = datetime_or_now(
            parse_datetime(request.GET.get('start_date', None)))
        end_date = datetime_or_now(
            parse_datetime(request.GET.get('end_date', None)))

        content = StringIO()
        csv_writer = csv.writer(content)
        csv_writer.writerow(['Name', 'Email', 'Registration Date'])
        for org in view_proxy.get_range_queryset(start_date, end_date):
            csv_writer.writerow([
                org.full_name.encode('utf-8'),
                org.email.encode('utf-8'),
                org.created_at])
        content.seek(0)
        resp = HttpResponse(content, content_type='text/csv')
        resp['Content-Disposition'] = \
            'attachment; filename="subscribers-{}-{}.csv"'.format(
                subscriber_type, datetime.now().strftime('%Y%m%d'))
        return resp
Example #20
0
    def get(self, request, pzone_pk):
        """Get all the operations for a given pzone."""

        # attempt to get given pzone
        try:
            pzone = PZone.objects.get(pk=pzone_pk)
        except PZone.DoesNotExist:
            raise Http404("Cannot find given pzone.")

        # bulid filters
        filters = {"pzone": pzone}

        if "from" in request.GET:
            parsed = dateparse.parse_datetime(request.GET["from"])
            if parsed is not None:
                filters["when__gte"] = parsed

        if "to" in request.GET:
            parsed = dateparse.parse_datetime(request.GET["to"])
            if parsed is not None:
                filters["when__lt"] = parsed

        # get operations and serialize them
        operations = PZoneOperation.objects.filter(**filters)

        # return a json response with serialized operations
        return Response(self.serialize_operations(operations), content_type="application/json")
Example #21
0
def test_reservation_time_filters(api_client, list_url, reservation, resource_in_unit, user):
    reservation2 = Reservation.objects.create(
        resource=resource_in_unit,
        begin=dateparse.parse_datetime('2015-04-07T11:00:00+02:00'),
        end=dateparse.parse_datetime('2015-04-07T12:00:00+02:00'),
        user=user,
    )

    # without the filter, only the reservation in the future should be returned
    response = api_client.get(list_url)
    assert response.data['count'] == 1
    assert response.data['results'][0]['id'] == reservation.id

    # with the 'all' filter, both reservations should be returned
    response = api_client.get(list_url + '?all=true')
    assert response.data['count'] == 2
    assert {reservation.id, reservation2.id}.issubset(set(res['id'] for res in response.data['results']))

    # with start or end, both reservations should be returned
    # filtering by start date only
    response = api_client.get(list_url + '?start=2065-04-06')
    assert response.data['count'] == 1
    assert response.data['results'][0]['id'] == reservation.id

    # filtering by end date only
    response = api_client.get(list_url + '?end=2065-04-06')
    assert response.data['count'] == 1
    assert response.data['results'][0]['id'] == reservation2.id

    # filtering by start and end times
    response = api_client.get(list_url + '?start=2065-04-06T11:00:00%2b02:00' + '&end=2065-04-06T12:00:00%2b02:00')
    assert response.data['count'] == 0
    response = api_client.get(list_url + '?start=2005-04-07T11:30:00%2b02:00' + '&end=2115-04-04T09:30:00%2b02:00')
    assert response.data['count'] == 2
    assert {reservation.id, reservation2.id}.issubset(set(res['id'] for res in response.data['results']))
    def post(self, request, format=None):
        start_date = request.data.get('received_at__gte')
        end_date = request.data.get('received_at__lt')
        if not start_date or not end_date:
            return Response(
                data={'errors': _("'received_at__gte' and 'received_at__lt' fields are required")},
                status=400
            )

        try:
            parsed_start_date = parse_datetime(start_date)
            parsed_end_date = parse_datetime(end_date)
        except ValueError:
            return Response(data={'errors': _("Invalid date format")},
                            status=400)

        with atomic():
            Transaction.objects.reconcile(
                parsed_start_date, parsed_end_date, request.user)
            Payment.objects.reconcile(
                parsed_start_date, parsed_end_date, request.user)

        PrivateEstateBatch.objects.create_batches(parsed_start_date, parsed_end_date)

        return Response(status=204)
Example #23
0
def test_reservation_staff_members_can_make_reservations_for_others(
        api_client, list_url, reservation, reservation_data, user, user2):
    """
    Tests that a staff member can make reservations for other people without normal user restrictions.
    """
    user.is_staff = True
    user.save()
    api_client.force_authenticate(user=user)

    # dealing with another user's reservation
    reservation.user = user2
    reservation.save()
    reservation_data['user'] = {'id': user2.uuid}

    # modify an existing reservation
    detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk})
    response = api_client.put(detail_url, data=reservation_data, format='json')
    assert response.status_code == 200
    new_reservation = Reservation.objects.get(id=response.data['id'])
    assert new_reservation.user == user2

    # create a new reservation, which is also too long, outside the opening hours and exceeds normal user
    # reservation limit. creating such a reservation for a normal user should be possible for a staff member
    reservation_data['begin'] = dateparse.parse_datetime('2115-04-04T13:00:00+02:00')
    reservation_data['end'] = dateparse.parse_datetime('2115-04-04T20:00:00+02:00')
    response = api_client.post(list_url, data=reservation_data, format='json')
    assert response.status_code == 201
    new_reservation = Reservation.objects.get(id=response.data['id'])
    assert new_reservation.user == user2
Example #24
0
def test_normal_users_cannot_make_reservations_for_others(
        api_client, list_url, reservation, reservation_data, user, user2):
    """
    Tests that a normal user cannot make a reservation for other people.
    """
    api_client.force_authenticate(user=user)
    detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk})

    # set bigger max reservations limit so that it won't be a limiting factor here
    reservation.resource.max_reservations_per_user = 2
    reservation.resource.save()

    # set another user for new reservations
    reservation_data['user'] = {'id': user2.uuid}

    # modify an existing reservation, and verify that user isn't changed
    response = api_client.put(detail_url, data=reservation_data, format='json')
    assert response.status_code == 200
    new_reservation = Reservation.objects.get(id=response.data['id'])
    assert new_reservation.user == user

    # make a new reservation and verify that user isn't the other one
    reservation_data['begin'] = dateparse.parse_datetime('2115-04-04T13:00:00+02:00')
    reservation_data['end'] = dateparse.parse_datetime('2115-04-04T14:00:00+02:00')
    response = api_client.post(list_url, data=reservation_data, format='json')
    assert response.status_code == 201
    new_reservation = Reservation.objects.get(id=response.data['id'])
    assert new_reservation.user == user
Example #25
0
def get_remote_jobs(request):
    """
        Query the Fermi remote service for the user's jobs.
        @param request: request object
        
        The response will be like this:

        { "3954": { "CompletionDate": "2013-10-29T17:13:08+00:00",
                    "StartDate": "2013-10-29T17:12:32+00:00",
                    "SubmitDate": "2013-10-29T17:12:31+00:00",
                    "JobName": "eqsans",
                    "ScriptName": "job_submission_0.py",
                    "JobStatus": "COMPLETED",
                    "TransID": 57 } }
    """
    sessionid = request.session.get('fermi', '')
    status_data = []
    try:
        conn = httplib.HTTPSConnection(settings.FERMI_HOST, timeout=30)
        conn.request('GET', '%squery' % settings.FERMI_BASE_URL, headers={'Cookie': sessionid})
        r = conn.getresponse()
        # Check to see whether we need authentication
        jobs = json.loads(r.read())
        for key in jobs:
            jobs[key]['ID'] = key
            jobs[key]['CompletionDate'] = parse_datetime(jobs[key]['CompletionDate'])
            jobs[key]['StartDate'] = parse_datetime(jobs[key]['StartDate'])
            jobs[key]['SubmitDate'] = parse_datetime(jobs[key]['SubmitDate'])
            status_data.append(jobs[key])
    except:
        logging.error("Could not connect to status page: %s" % sys.exc_value)
    
    return status_data
Example #26
0
    def dateStringsToQ(self, field_name, date_from_str, date_to_str):
        """
        Convert the date strings from_date_str and to_date_str into a
        set of args in the form

          {'<field_name>__gte': <date from>, '<field_name>__lte': <date to>}

        where date_from and date_to are Django-timezone-aware dates; then
        convert that into a Django Q object

        Returns the Q object based on those criteria
        """

        # one of the values required for the filter is missing, so set
        # it to the one which was supplied
        if date_from_str == '':
            date_from_str = date_to_str
        elif date_to_str == '':
            date_to_str = date_from_str

        date_from_naive = dateparse.parse_datetime(date_from_str + ' 00:00:00')
        date_to_naive = dateparse.parse_datetime(date_to_str + ' 23:59:59')

        tz = timezone.get_default_timezone()
        date_from = timezone.make_aware(date_from_naive, tz)
        date_to = timezone.make_aware(date_to_naive, tz)

        args = {}
        args[field_name + '__gte'] = date_from
        args[field_name + '__lte'] = date_to

        return Q(**args)
Example #27
0
def check_extension_data(data, course):
    assert_fields_exist(data, COURSE_FIELDS)
    assert dateparse.parse_datetime(data['enrolment_start_time']) == course.enrolment_start_time
    assert dateparse.parse_datetime(data['enrolment_end_time']) == course.enrolment_end_time
    assert data['maximum_attendee_capacity'] == course.maximum_attendee_capacity
    assert data['minimum_attendee_capacity'] == course.minimum_attendee_capacity
    assert data['remaining_attendee_capacity'] == course.remaining_attendee_capacity
Example #28
0
def query_job(request, job_id):
    """
        Query Fermi for a specific job
        @param request: request object
        @param job_id: remote job id string
        
        The call to Fermi will look like this:
            https://fermi.ornl.gov/MantidRemote/query?JobID=7665
        
        and will return a json payload like the following:
        { "7665": { "CompletionDate": "2014-02-14T21:25:58+00:00",
                    "StartDate": "2014-02-14T21:25:37+00:00",
                    "SubmitDate": "2014-02-14T21:25:36+00:00",
                    "JobName": "Unknown",
                    "ScriptName": "web_submission.py",
                    "JobStatus": "COMPLETED",
                    "TransID": 136 } }
    """
    try:
        conn = httplib.HTTPSConnection(settings.FERMI_HOST, timeout=1.5)
        conn.request('GET', '%squery?JobID=%s' % (settings.FERMI_BASE_URL, job_id),
                     headers={'Cookie':request.session.get('fermi', '')})
        r = conn.getresponse()
        if r.status == 200:
            job_info = json.loads(r.read())[job_id]
            job_info['CompletionDate'] = parse_datetime(job_info['CompletionDate'])
            job_info['StartDate'] = parse_datetime(job_info['StartDate'])
            job_info['SubmitDate'] = parse_datetime(job_info['SubmitDate'])
            return job_info
        else:
            logging.error("Could not get job info: %s" % r.status)
    except:
        logging.error("Could not get job info: %s" % sys.exc_value)
    return None
Example #29
0
def test_non_reservable_resource_restrictions(api_client, list_url, resource_in_unit, reservation_data, user):
    """
    Tests that a normal user cannot make a reservation to a non reservable resource but staff can.

    Creating a new reservation with POST and updating an existing one with PUT are both tested.
    """
    resource_in_unit.reservable = False
    resource_in_unit.save()
    api_client.force_authenticate(user=user)
    response = api_client.post(list_url, data=reservation_data)
    assert response.status_code == 403

    # Create a reservation and try to change that with PUT
    reservation = Reservation.objects.create(
        resource=resource_in_unit,
        begin=dateparse.parse_datetime('2115-04-07T09:00:00+02:00'),
        end=dateparse.parse_datetime('2115-04-07T10:00:00+02:00'),
        user=user,
    )
    detail_url = reverse('reservation-detail', kwargs={'pk': reservation.pk})
    response = api_client.put(detail_url, reservation_data)
    assert response.status_code == 403

    # a staff member should be allowed to create and update
    user.is_staff = True
    user.save()
    response = api_client.post(list_url, data=reservation_data)
    assert response.status_code == 201
    reservation_data['begin'] = dateparse.parse_datetime('2115-04-08T09:00:00+02:00')
    reservation_data['end'] = dateparse.parse_datetime('2115-04-08T10:00:00+02:00')
    response = api_client.put(detail_url, data=reservation_data)
    assert response.status_code == 200
Example #30
0
 def cache_fields(self, request):
     self.ends_at = datetime_or_now(parse_datetime(request.GET.get("ends_at", "").strip('"')))
     self.start_at = request.GET.get("start_at", None)
     if self.start_at:
         self.start_at = datetime_or_now(parse_datetime(self.start_at.strip('"')))
     else:
         self.start_at = self.ends_at + dateutil.relativedelta.relativedelta(months=-1)
Example #31
0
def profil_goruntule(request):
    if request.method == "POST":
        #---------------------------------formdan veriler alınıyor---------------------------------
        isim = request.POST.get("isim")
        soyisim = request.POST.get("soyisim")
        dogum_tarihi = request.POST.get("dogum_tarihi")
        telefon = request.POST.get("telefon")
        email = request.POST.get("email")
        cinsiyet = request.POST.get("cinsiyet")
        kilo = request.POST.get("kilo")
        boy = request.POST.get("boy")
        problem = request.POST.get("problem")
        resim = request.FILES.get('resimDosya')
        if (resim):
            if os.path.exists(os.getcwd() + "/static/veriler"):
                yol = os.getcwd() + "/static/veriler/" + request.session[
                    'member'] + resim.name[-4:]
                with open(yol, "wb+") as dosya:
                    for i in resim:
                        dosya.write(i)
                yol = "/static/veriler/" + request.session[
                    'member'] + resim.name[-4:]
                dosya.close()
        else:
            r_kontrol = kullanici_ayrinti.objects.filter(
                id=request.session['id']).first()
            if r_kontrol:
                yol = r_kontrol.resim
            else:
                yol = os.getcwd() + "static/img/default.png"

        #-------------------------işlemler yapılıyor -------------------------------------------------

        ayrinti_tablosu = kullanici_ayrinti.objects.get(
            id=request.session['id'])

        ayrinti_tablosu.isim = isim
        #ayrinti_tablosu.soyisim = soyisim
        sonuc_tarih = parse_datetime(dogum_tarihi + " 00:00:00")
        ayrinti_tablosu.dogum_tarihi = sonuc_tarih
        ayrinti_tablosu.telefon = telefon
        ayrinti_tablosu.cinsiyet = cinsiyet
        ayrinti_tablosu.resim = yol

        if kilo == "":
            ayrinti_tablosu.kilo = 0
        else:
            ayrinti_tablosu.kilo = int(kilo)

        if boy == "":
            ayrinti_tablosu.boy = 0
        else:
            ayrinti_tablosu.boy = int(boy)

        ayrinti_tablosu.problem = problem
        kullanici_tablosu = kullanici.objects.get(id=request.session['id'])
        kullanici_tablosu.email = email
        ayrinti_tablosu.save()
        kullanici_tablosu.save()
    email = kullanici.objects.filter(id=request.session['id']).first()
    veri = kullanici_ayrinti.objects.filter(id=email.id).first()

    tarih_cevir = veri.dogum_tarihi
    if tarih_cevir != None:
        sonuc = tarih_cevir.strftime("%Y-%m-%d")
    else:
        sonuc = ""

    return render(
        request, "profil_goruntule.html", {
            "email": email.email,
            "isim": veri.isim,
            "resim": veri.resim,
            "dogum_tarihi": str(sonuc),
            "kilo": veri.kilo,
            "boy": veri.boy,
            "problem": veri.problem,
            "pt_kontrol": veri.pt_kontrol,
            "trainer": veri.trainer,
            "telefon": veri.telefon,
            "cinsiyet": veri.cinsiyet
        })
Example #32
0
    def incremental_dump(self, version):
        time_now = now()

        if self.last_date and self.last_date + ExportIndex.MINIMUM_PERIOD > time_now:
            log.warn(
                "Not enough time since last incremental dump for '{}'".format(
                    self.name))
            log.debug(" - wait until {} (now is {})".format(
                self.last_date + ExportIndex.MINIMUM_PERIOD, time_now))
            return False

        # Gather data
        worduse_data = WordUse.objects.all()
        coarse_data = CoarseWord.objects.all()

        f = self.get_file()
        lines = []
        last_date = None
        if f:
            lines = f.read().decode().split('\n')
            last_date = parse_datetime(lines[0])
            worduse_data = worduse_data.filter(timestamp__range=(last_date,
                                                                 time_now))
            coarse_data = coarse_data.filter(timestamp__range=(last_date,
                                                               time_now))
            log.debug(" - dump between: '{start}' and '{end}'".format(
                start=last_date, end=time_now))
        else:
            log.debug(" - dump between: -- and '{end}'".format(end=time_now))

        # Store temp files:
        with tempdir() as dirpath:
            log.debug(
                " - exporting data to '{dirpath}'".format(dirpath=dirpath))
            filenames = export(worduse_data,
                               coarse_data,
                               dirpath,
                               export_aux=False)

            with transaction.atomic():
                log.debug(" - save worduse_data")
                worduse = IncrementalFile(index=self,
                                          name='worduse',
                                          version=version,
                                          start=last_date,
                                          end=time_now)
                worduse.save(
                    file=os.path.join(dirpath, filenames['worduse_data']))

                log.debug(" - save coarse_data")
                wordcoarse = IncrementalFile(index=self,
                                             name='coarse',
                                             version=version,
                                             start=last_date,
                                             end=time_now)
                wordcoarse.save(
                    file=os.path.join(dirpath, filenames['wordcoarse_data']))

                log.debug(" - save informer_data")
                informers, _ = OverrideFile.objects.get_or_create(
                    index=self, name='informer', version=version)
                informers.end = time_now
                informers.save(
                    file=os.path.join(dirpath, filenames['informers_data']))

                log.debug(" - save meaning_data")
                meanings, _ = OverrideFile.objects.get_or_create(
                    index=self, name='meaning', version=version)
                meanings.end = time_now
                meanings.save(
                    file=os.path.join(dirpath, filenames['meanings_data']))

                log.debug(" - update export_index")
                lines.insert(0, time_now.strftime('%Y-%m-%d-%H-%M-%S'))
                self.last_date = time_now
                self.save(content='\n'.join(lines))
        return True
Example #33
0
def string_to_date(value):
    return parse_datetime(value)
Example #34
0
def profilegitmen(request):

    if request.method == "POST":
        #---------------------------------formdan veriler alınıyor---------------------------------
        isim = request.POST.get("isim")
        soyisim = request.POST.get("soyisim")
        dogum_tarihi = request.POST.get("dogum_tarihi")
        telefon = request.POST.get("telefon")
        email = request.POST.get("email")
        cinsiyet = request.POST.get("cinsiyet")
        universite = request.POST.get("uni")
        uni_bolum = request.POST.get("bolum")
        resim = request.FILES.get('resimDosya')
        if (resim):
            if os.path.exists(os.getcwd() + "/static/veriler"):
                yol = os.getcwd() + "/static/veriler/" + request.session[
                    'member'] + resim.name[-4:]
                with open(yol, "wb+") as dosya:
                    for i in resim:
                        dosya.write(i)
                yol = "/static/veriler/" + request.session[
                    'member'] + resim.name[-4:]
                dosya.close()
        else:
            r_kontrol = hocalar.objects.filter(
                id=request.session['id']).first()
            if r_kontrol:
                yol = r_kontrol.resim
            else:
                yol = os.getcwd() + "static/img/default.png"

        #-------------------------işlemler yapılıyor -------------------------------------------------

        ayrinti_tablosu = hocalar.objects.get(id=request.session['id'])
        ayrinti_tablosu.isim = isim
        #ayrinti_tablosu.soyisim = soyisim
        sonuc_tarih = parse_datetime(dogum_tarihi + " 00:00:00")
        ayrinti_tablosu.dogum_tarihi = sonuc_tarih
        ayrinti_tablosu.telefon = telefon
        ayrinti_tablosu.cinsiyet = cinsiyet
        ayrinti_tablosu.resim = yol
        ayrinti_tablosu.universite = universite
        ayrinti_tablosu.bolum = uni_bolum
        kullanici_tablosu = kullanici.objects.get(id=request.session['id'])
        kullanici_tablosu.email = email
        ayrinti_tablosu.save()
        kullanici_tablosu.save()
    email = kullanici.objects.filter(id=request.session['id']).first()
    veri = hocalar.objects.filter(id=email.id).first()
    tarih_cevir = veri.dogum_tarihi
    if tarih_cevir != None:
        sonuc = tarih_cevir.strftime("%Y-%m-%d")
    else:
        sonuc = ""

    uni = okul.objects.all()
    neresi = bolum.objects.all()
    uni_hangisi = hocalar.objects.get(id=request.session['id'])

    return render(
        request, "profil_goruntule_egitmen.html", {
            "uni_secilen": uni_hangisi.universite,
            "bolum_hangisi": uni_hangisi.bolum,
            "uni": uni,
            "bolum": neresi,
            "email": email.email,
            "isim": veri.isim,
            "resim": veri.resim,
            "dogum_tarihi": str(sonuc),
            "telefon": veri.telefon,
            "cinsiyet": veri.cinsiyet
        })
import logging
from ast import literal_eval
from typing import Any, Dict, List, Tuple
from uuid import UUID

from django.utils.dateparse import parse_datetime


logger = logging.getLogger(__name__)

# Lambda that attempts to convert something that failed while being parsed by `literal_ast`.
CASTERS = [
    lambda a: parse_datetime(a),
    lambda a: UUID(a),
]


class InvalidKwarg(Exception):
    pass


def parse_kwarg(kwarg: str, raise_if_unparseable=False) -> Dict[str, Any]:
    """
    Parses a potential kwarg as a string into a dictionary.

    For example: `parse_kwarg("test='1'")` == `{"test": "1"}`


    """

    parsed_kwarg = {}
Example #36
0
def create(request):
    # if not request.user.is_authenticated:
    #     messages.add_message(request, messages.ERROR,
    #                          'Login to Create Meetings')
    #     # return HttpResponseRedirect('/login', request=request)
    #     return redirect('login')
    def formatdate(dt):
        start = ''
        start += str(dt.year) + '-'
        if (len(str(dt.month)) == 1):
            start += '0' + str(dt.month) + '-'
        else:
            start += str(dt.month) + '-'
        if (len(str(dt.day)) == 1):
            start += '0' + str(dt.day)
        else:
            start += str(dt.day)
        start += 'T'
        if (len(str(dt.hour)) == 1):
            start += '0' + str(dt.hour) + ':'
        else:
            start += str(dt.hour) + ':'
        if (len(str(dt.minute)) == 1):
            start += '0' + str(dt.minute)
        else:
            start += str(dt.minute)

        return start

    tm = formatdate(timezone.now() + datetime.timedelta(hours=5, minutes=30))

    if (request.method == 'POST'):
        # print(request.POST)
        form = request.POST
        m = Meeting()
        m.name = form['Name']
        m.info = form['Info']
        m.creatingProfessor = form['CreatingProfessor']
        m.creatingStaff = request.user
        m.participants = form['Participants']
        m.start = timezone.make_aware(dateparse.parse_datetime(form['Start']),
                                      timezone.get_default_timezone())
        m.end = timezone.make_aware(dateparse.parse_datetime(form['End']),
                                    timezone.get_default_timezone())
        print(form['Start'], form['End'])
        ven = Room.objects.get(id=form['Venue'])
        m.venue = ven
        try:
            m.full_clean()
            m.save()
            if (len(form['tasks']) > 0):
                taskComma = form['tasks']
                # taskComma = taskComma.replace(' ','')
                taskList = taskComma.split(",")

                for task in taskList:
                    if (len(task) > 0):
                        t = Task()
                        t.meeting = m
                        t.name = task
                        t.save()

            return render(request, 'meeting_success.html', {
                'user': request.user,
                'msg': 'Meeting Successfully Created'
            })
        except ValidationError as e:
            print(e)
            r = Room.objects.all()
            return render(
                request, 'meeting_success1.html', {
                    'user': request.user,
                    'meeting': m,
                    'room': r,
                    'tasks': form['tasks'],
                    's': tm,
                    'e': tm
                })

    else:
        r = Room.objects.all()
        return render(request, 'create_meeting.html', {
            'user': request.user,
            'room': r,
            's': tm,
            'e': tm
        })
Example #37
0
    def analyse(self, request):
        startdate = request.POST.get('startdate')
        starttime = request.POST.get('starttime')
        enddate = request.POST.get('enddate')
        endtime = request.POST.get('endtime')
        if startdate is None or startdate == '' \
                or starttime is None or starttime == '' \
                or enddate is None or enddate == '' \
                or endtime is None or endtime == '':
            return HttpResponseBadRequest('All fields are required')

        start = dateparse.parse_datetime(startdate + 'T' + starttime)
        end = dateparse.parse_datetime(enddate + 'T' + endtime)

        if start is None or end is None:
            return HttpResponseBadRequest('invalid start-/endtime given')
        if start >= end:
            return HttpResponseBadRequest('start has to be before end')

        start = timezone.make_aware(start)
        end = timezone.make_aware(end)

        played = PlayLog.objects.all().filter(created__gte=start).filter(created__lt=end)
        requested = RequestLog.objects.all().filter(created__gte=start).filter(created__lt=end)
        played_count = played.values('song__url', 'song__artist', 'song__title').values('song__url', 'song__artist', 'song__title', count=models.Count('song__url')).order_by('-count')
        played_votes = PlayLog.objects.all().filter(created__gte=start).filter(created__lt=end).order_by('-votes')
        devices = requested.values('address').values('address', count=models.Count('address'))
        
        response = {}
        response['songs_played'] = len(played);
        response['most_played_song'] = song_utils.displayname(
                played_count[0]['song__artist'],
                played_count[0]['song__title']) + ' (' + str(played_count[0]['count']) + ')'
        response['highest_voted_song'] = played_votes[0].song.displayname() + ' (' + str(played_votes[0].votes) + ')'
        response['most_active_device'] = devices[0]['address'] + ' (' + str(devices[0]['count']) + ')'
        requested_by_ip = requested.filter(address=devices[0]['address'])
        for i in range(6):
            if i >= len(requested_by_ip):
                break
            response['most_active_device'] += '\n'
            if i == 5:
                response['most_active_device'] += '...'
            else:
                response['most_active_device'] += requested_by_ip[i].song.displayname()


        binsize = 3600
        number_of_bins = math.ceil((end - start).total_seconds() / binsize)
        request_bins = [0 for _ in range(number_of_bins)]

        for r in requested:
            seconds = (r.created - start).total_seconds()
            index = int(seconds / binsize)
            request_bins[index] += 1

        current_time = start
        current_index = 0
        response['request_activity'] = ''
        while current_time < end:
            response['request_activity'] += current_time.strftime('%H:%M') 
            response['request_activity'] += ':\t' + str(request_bins[current_index])
            response['request_activity'] += '\n'
            current_time += timedelta(seconds=binsize)
            current_index += 1

        localtz = tz.gettz(settings.TIME_ZONE)
        playlist = ''
        for log in played:
            localtime = log.created.astimezone(localtz)
            playlist += '[{:02d}:{:02d}] {}\n'.format(localtime.hour, localtime.minute, log.song.displayname())
        response['playlist'] = playlist

        return JsonResponse(response)
Example #38
0
def finish_scheduling(
    request: HttpRequest,
    schedule_item: ScheduledOperation = None,
    payload: Dict = None,
):
    """Finalize the creation of a scheduled action.

    All required data is passed through the payload.

    :param request: Request object received

    :param schedule_item: ScheduledOperation item being processed. If None,
    it has to be extracted from the information in the payload.

    :param payload: Dictionary with all the required data coming from
    previous requests.

    :return:
    """
    # Get the payload from the session if not given
    if payload is None:
        payload = request.session.get(action_session_dictionary)

        # If there is no payload, something went wrong.
        if payload is None:
            # Something is wrong with this execution. Return to action table.
            messages.error(
                request,
                _('Incorrect action scheduling invocation.'))
            return redirect('action:index')

    # Get the scheduled item if needed
    s_item_id = payload.pop('schedule_id', None)
    action = Action.objects.get(pk=payload.pop('action_id'))
    column_name = payload.pop('item_column', None)
    column = None
    if column_name:
        column = action.workflow.columns.get(name=column_name)

    # Clean up some parameters from the payload
    payload = {
        key: payload[key]
        for key in payload if key not in [
            'button_label',
            'valuerange',
            'step',
            'prev_url',
            'post_url',
            'confirm_items']}

    # Create the payload to record the event in the log
    log_payload = payload.copy()

    if s_item_id:
        # Get the item being processed
        if not schedule_item:
            schedule_item = ScheduledOperation.objects.filter(
                id=s_item_id).first()
        if not schedule_item:
            messages.error(
                None,
                _('Incorrect request in action scheduling'))
            return redirect('action:index')
        schedule_item.name = payload.pop('name')
        schedule_item.description_text = payload.pop('description_text')
        schedule_item.item_column = column
        schedule_item.execute = parse_datetime(payload.pop('execute'))
        schedule_item.execute_until = parse_datetime(
            payload.pop('execute_until'))
        schedule_item.exclude_values = payload.pop('exclude_values', [])
    else:
        schedule_item = ScheduledOperation(
            user=request.user,
            action=action,
            name=payload.pop('name'),
            description_text=payload.pop('description_text'),
            operation_type=ScheduledOperation.ACTION_RUN,
            item_column=column,
            execute=parse_datetime(payload.pop('execute')),
            execute_until=parse_datetime(payload.pop('execute_until')),
            exclude_values=payload.pop('exclude_values', []))

    # Check for exclude
    schedule_item.status = ScheduledOperation.STATUS_PENDING
    schedule_item.payload = payload
    schedule_item.save()

    # Create the payload to record the event in the log
    log_type = LOG_TYPE_DICT.get(schedule_item.action.action_type)
    if not log_type:
        messages.error(
            request,
            _('This type of actions cannot be scheduled'))
        return redirect('action:index')
    schedule_item.log(log_type)

    # Reset object to carry action info throughout dialogs
    set_action_payload(request.session)
    request.session.save()

    # Successful processing.
    is_executing, tdelta = create_timedelta_string(
        schedule_item.execute,
        schedule_item.execute_until)
    return render(
        request,
        'scheduler/schedule_done.html',
        {
            'is_executing': is_executing,
            'tdelta': tdelta,
            's_item': schedule_item})
 def test_strp_time_valid_iso_date(self):
     fl = IsoDateTimeField()
     valid_iso_date = '2015-04-14T06:46:32.709388Z'
     self.assertTrue(fl.strptime(valid_iso_date, ISO_8601))
     self.assertEquals(fl.strptime(value=valid_iso_date, format=ISO_8601),
                       parse_datetime(valid_iso_date))
    def grab_event(self, event_ocd_id):

        event_url = base_url + '/' + event_ocd_id + '/'
        r = requests.get(event_url)

        if r.status_code == 200:
            page_json = json.loads(r.text)

            try:
                legistar_id = re.findall('ID=(.*)&GUID',
                                         page_json['sources'][0]['url'])[0]
            except IndexError:
                print("\n\n" + "-" * 60)
                print("WARNING: MISSING SOURCE %s" % event_ocd_id)
                print("event has no source")
                print("-" * 60 + "\n")
                legistar_id = event_ocd_id

            event_fields = {
                'ocd_id':
                event_ocd_id,
                'ocd_created_at':
                page_json['created_at'],
                'ocd_updated_at':
                page_json['updated_at'],
                'name':
                page_json['name'],
                'description':
                page_json['description'],
                'classification':
                page_json['classification'],
                'start_time':
                parse_datetime(page_json['start_time']),
                'end_time':
                parse_datetime(page_json['end_time'])
                if page_json['end_time'] else None,
                'all_day':
                page_json['all_day'],
                'status':
                page_json['status'],
                'location_name':
                page_json['location']['name'],
                'location_url':
                page_json['location']['url'],
                'source_url':
                page_json['sources'][0]['url'],
                'source_note':
                page_json['sources'][0]['note'],
            }

            updated = False
            created = False

            # look for existing event
            try:
                event_obj = Event.objects.get(ocd_id=event_ocd_id)
                # check if it has been updated on api
                # TO-DO: fix date comparison to handle timezone naive times
                # from api
                if event_obj.ocd_updated_at.isoformat(
                ) != page_json['updated_at']:

                    event_obj.ocd_created_at = page_json['created_at']
                    event_obj.ocd_updated_at = page_json['updated_at']
                    event_obj.name = page_json['name']
                    event_obj.description = page_json['description']
                    event_obj.classification = page_json['classification']
                    event_obj.start_time = parse_datetime(
                        page_json['start_time'])
                    event_obj.end_time = parse_datetime(
                        page_json['end_time']
                    ) if page_json['end_time'] else None
                    event_obj.all_day = page_json['all_day']
                    event_obj.status = page_json['status']
                    event_obj.location_name = page_json['location']['name']
                    event_obj.location_url = page_json['location']['url']
                    event_obj.source_url = page_json['sources'][0]['url']
                    event_obj.source_note = page_json['sources'][0]['note']

                    event_obj.save()
                    updated = True

                    if DEBUG:
                        print('\u270E', end=' ', flush=True)

            # except if it doesn't exist, we need to make it
            except Event.DoesNotExist:
                try:
                    event_fields['slug'] = legistar_id
                    event_obj, created = Event.objects.get_or_create(
                        **event_fields)

                except IntegrityError:
                    event_fields['slug'] = event_ocd_id
                    event_obj, created = Event.objects.get_or_create(
                        **event_fields)
                    print("\n\n" + "-" * 60)
                    print("WARNING: SLUG ALREADY EXISTS FOR %s" % event_ocd_id)
                    print("legistar id (what slug should be): %s" %
                          legistar_id)
                    print("using ocd id as slug instead")
                    print("-" * 60 + "\n")

                # if created and DEBUG:
                #     print('   adding event: %s' % event_ocd_id)
                if created and DEBUG:
                    print('\u263A', end=' ', flush=True)
                    print(event_obj.ocd_id)

            if created or updated:

                if updated:
                    # delete existing participants, documents, agenda items
                    event_obj.participants.all().delete()
                    event_obj.documents.all().delete()
                    event_obj.agenda_items.all().delete()

                for participant_json in page_json['participants']:
                    obj, created = EventParticipant.objects.get_or_create(
                        event=event_obj,
                        note=participant_json['note'],
                        entity_name=participant_json['entity_name'],
                        entity_type=participant_json['entity_type'])
                    # if created and DEBUG:
                    #     print('      adding participant: %s' %obj.entity_name)

                for document_json in page_json['documents']:
                    self.load_eventdocument(document_json, event_obj)

                for agenda_item_json in page_json['agenda']:
                    self.load_eventagendaitem(agenda_item_json, event_obj)

        else:
            print("\n\n" + "*" * 60)
            print("SKIPPING EVENT %s" % event_ocd_id)
            print("cannot retrieve event data")
            print("*" * 60 + "\n")
Example #41
0
 def to_python(cls, value, **kwargs):
     parsed = parse_datetime(force_text(value))
     if parsed is None:
         raise cls.exception("Value {0} cannot be converted to a datetime object".format(value))
     return parsed
Example #42
0
    def esp_to_anymail_events(self, ses_event, sns_message):
        # Amazon SES has two notification formats, which are almost exactly the same:
        # - https://docs.aws.amazon.com/ses/latest/DeveloperGuide/event-publishing-retrieving-sns-contents.html
        # - https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html
        # This code should handle either.
        ses_event_type = getfirst(ses_event, ["eventType", "notificationType"],
                                  "<<type missing>>")
        if ses_event_type == "Received":
            # This is an inbound event
            raise AnymailConfigurationError(
                "You seem to have set an Amazon SES *inbound* receipt rule to publish "
                "to an SNS Topic that posts to Anymail's *tracking* webhook URL. "
                "(SNS TopicArn %s)" % sns_message.get("TopicArn"))

        event_id = sns_message.get(
            "MessageId")  # unique to the SNS notification
        try:
            timestamp = parse_datetime(sns_message["Timestamp"])
        except (KeyError, ValueError):
            timestamp = None

        mail_object = ses_event.get("mail", {})
        message_id = mail_object.get(
            "messageId")  # same as MessageId in SendRawEmail response
        all_recipients = mail_object.get("destination", [])

        # Recover tags and metadata from custom headers
        metadata = {}
        tags = []
        for header in mail_object.get("headers", []):
            name = header["name"].lower()
            if name == "x-tag":
                tags.append(header["value"])
            elif name == "x-metadata":
                try:
                    metadata = json.loads(header["value"])
                except (ValueError, TypeError, KeyError):
                    pass

        common_props = dict(  # AnymailTrackingEvent props for all recipients
            esp_event=ses_event,
            event_id=event_id,
            message_id=message_id,
            metadata=metadata,
            tags=tags,
            timestamp=timestamp,
        )
        per_recipient_props = [  # generate individual events for each of these
            dict(recipient=email_address) for email_address in all_recipients
        ]

        event_object = ses_event.get(ses_event_type.lower(),
                                     {})  # e.g., ses_event["bounce"]

        if ses_event_type == "Bounce":
            common_props.update(
                event_type=EventType.BOUNCED,
                description="{bounceType}: {bounceSubType}".format(
                    **event_object),
                reject_reason=RejectReason.BOUNCED,
            )
            per_recipient_props = [
                dict(
                    recipient=recipient["emailAddress"],
                    mta_response=recipient.get("diagnosticCode"),
                ) for recipient in event_object["bouncedRecipients"]
            ]
        elif ses_event_type == "Complaint":
            common_props.update(
                event_type=EventType.COMPLAINED,
                description=event_object.get("complaintFeedbackType"),
                reject_reason=RejectReason.SPAM,
                user_agent=event_object.get("userAgent"),
            )
            per_recipient_props = [
                dict(recipient=recipient["emailAddress"], )
                for recipient in event_object["complainedRecipients"]
            ]
        elif ses_event_type == "Delivery":
            common_props.update(
                event_type=EventType.DELIVERED,
                mta_response=event_object.get("smtpResponse"),
            )
            per_recipient_props = [
                dict(recipient=recipient, )
                for recipient in event_object["recipients"]
            ]
        elif ses_event_type == "Send":
            common_props.update(event_type=EventType.SENT, )
        elif ses_event_type == "Reject":
            common_props.update(
                event_type=EventType.REJECTED,
                description=event_object["reason"],
                reject_reason=RejectReason.BLOCKED,
            )
        elif ses_event_type == "Open":
            # SES doesn't report which recipient opened the message (it doesn't
            # track them separately), so just report it for all_recipients
            common_props.update(
                event_type=EventType.OPENED,
                user_agent=event_object.get("userAgent"),
            )
        elif ses_event_type == "Click":
            # SES doesn't report which recipient clicked the message (it doesn't
            # track them separately), so just report it for all_recipients
            common_props.update(
                event_type=EventType.CLICKED,
                user_agent=event_object.get("userAgent"),
                click_url=event_object.get("link"),
            )
        elif ses_event_type == "Rendering Failure":
            event_object = ses_event[
                "failure"]  # rather than ses_event["rendering failure"]
            common_props.update(
                event_type=EventType.FAILED,
                description=event_object["errorMessage"],
            )
        else:
            # Umm... new event type?
            common_props.update(
                event_type=EventType.UNKNOWN,
                description="Unknown SES eventType '%s'" % ses_event_type,
            )

        return [
            # AnymailTrackingEvent(**common_props, **recipient_props)  # Python 3.5+ (PEP-448 syntax)
            AnymailTrackingEvent(**combine(common_props, recipient_props))
            for recipient_props in per_recipient_props
        ]
Example #43
0
    def esp_to_anymail_events(self, ses_event, sns_message):
        ses_event_type = ses_event.get("notificationType")
        if ses_event_type != "Received":
            # This is not an inbound event
            raise AnymailConfigurationError(
                "You seem to have set an Amazon SES *sending* event or notification "
                "to publish to an SNS Topic that posts to Anymail's *inbound* webhook URL. "
                "(SNS TopicArn %s)" % sns_message.get("TopicArn"))

        receipt_object = ses_event.get("receipt", {})
        action_object = receipt_object.get("action", {})
        mail_object = ses_event.get("mail", {})

        action_type = action_object.get("type")
        if action_type == "SNS":
            content = ses_event.get("content")
            if action_object.get("encoding") == "BASE64":
                content = b64decode(content.encode("ascii"))
                message = AnymailInboundMessage.parse_raw_mime_bytes(content)
            else:
                message = AnymailInboundMessage.parse_raw_mime(content)
        elif action_type == "S3":
            # download message from s3 into memory, then parse
            # (SNS has 15s limit for an http response; hope download doesn't take that long)
            bucket_name = action_object["bucketName"]
            object_key = action_object["objectKey"]
            s3 = boto3.session.Session(**self.session_params).client(
                "s3", **self.client_params)
            content = io.BytesIO()
            try:
                s3.download_fileobj(bucket_name, object_key, content)
                content.seek(0)
                message = AnymailInboundMessage.parse_raw_mime_file(content)
            except botocore.exceptions.ClientError as err:
                # improve the botocore error message
                raise AnymailBotoClientAPIError(
                    "Anymail AmazonSESInboundWebhookView couldn't download S3 object '{bucket_name}:{object_key}'"
                    "".format(bucket_name=bucket_name, object_key=object_key),
                    raised_from=err)
            finally:
                content.close()
        else:
            raise AnymailConfigurationError(
                "Anymail's Amazon SES inbound webhook works only with 'SNS' or 'S3' receipt rule actions, "
                "not SNS notifications for {action_type!s} actions. (SNS TopicArn {topic_arn!s})"
                "".format(action_type=action_type,
                          topic_arn=sns_message.get("TopicArn")))

        message.envelope_sender = mail_object.get(
            "source")  # "the envelope MAIL FROM address"
        try:
            # "recipients that were matched by the active receipt rule"
            message.envelope_recipient = receipt_object["recipients"][0]
        except (KeyError, TypeError, IndexError):
            pass
        spam_status = receipt_object.get("spamVerdict", {}).get("status",
                                                                "").upper()
        message.spam_detected = {
            "PASS": False,
            "FAIL": True
        }.get(spam_status)  # else None if unsure

        event_id = mail_object.get(
            "messageId")  # "unique ID assigned to the email by Amazon SES"
        try:
            timestamp = parse_datetime(
                mail_object["timestamp"]
            )  # "time at which the email was received"
        except (KeyError, ValueError):
            timestamp = None

        return [
            AnymailInboundEvent(
                event_type=EventType.INBOUND,
                event_id=event_id,
                message=message,
                timestamp=timestamp,
                esp_event=ses_event,
            )
        ]
Example #44
0
    def ptUpdate(self, json_data):
        from perftracker.models.test import TestModel

        self.ptValidateJson(json_data)

        self.title = json_data['job_title']
        self.cmdline = json_data.get('cmdline', None)
        self.uuid = json_data['uuid']
        self.project = ProjectModel.ptGetByName(json_data['project_name'])

        now = timezone.now()

        tests_json = json_data.get('tests', [])
        env_nodes_json = json_data.get('env_nodes', [])

        append = json_data.get('append', False)

        self.suite_name = json_data.get('suite_name', '')
        self.suite_ver = json_data.get('suite_ver', '')
        self.author = json_data.get('author', None)
        self.product_name = json_data.get('product_name', None)
        self.product_ver = json_data.get('product_ver', None)
        self.links = json.dumps(json_data.get('links', None))
        self.regression_tag = json_data.get('regression_tag', '')

        self.upload = now

        begin = parse_datetime(json_data['begin']) if json_data.get(
            'begin', None) else now
        end = parse_datetime(json_data['end']) if json_data.get('end',
                                                                None) else now

        self.tests_total = 0
        self.tests_completed = 0
        self.tests_failed = 0
        self.tests_errors = 0
        self.tests_warnings = 0

        if not self.begin:
            self.begin = begin
        if not self.end:
            self.end = end
        if self.duration:
            self.duration += end - begin
        else:
            self.duration = end - begin

        if self.begin and (self.begin.tzinfo is None
                           or self.begin.tzinfo.utcoffset(self.begin) is None):
            raise SuspiciousOperation(
                "'begin' datetime object must include timezone: %s" %
                str(self.begin))
        if self.end and (self.end.tzinfo is None
                         or self.end.tzinfo.utcoffset(self.end) is None):
            raise SuspiciousOperation(
                "'end' datetime object must include timezone: %s" %
                str(self.end))

        self.save()

        # process env_nodes, try not to delete and re-create all the nodes each time because normally this is static information
        env_nodes_to_update = EnvNodeModel.ptFindEnvNodesForUpdate(
            self, env_nodes_json)
        if env_nodes_to_update:
            EnvNodeModel.objects.filter(job=self).delete()
            for env_node_json in env_nodes_to_update:
                serializer = EnvNodeUploadSerializer(job=self,
                                                     data=env_node_json)
                if serializer.is_valid():
                    serializer.save()
                else:
                    raise SuspiciousOperation(
                        str(serializer.errors) + ", original json: " +
                        str(env_node_json))

        # process tests
        tests = TestModel.objects.filter(job=self)
        test_seq_num = 0
        uuid2test = {}
        for t in tests:
            uuid2test[str(t.uuid)] = t
            if test_seq_num <= t.seq_num:
                test_seq_num = t.seq_num

        for t in tests_json:
            TestModel.ptValidateJson(t)
            test_uuid = t['uuid']

            if test_uuid not in uuid2test:
                uuid2test[test_uuid] = TestModel(job=self, uuid=test_uuid)
                test_seq_num += 1
                uuid2test[test_uuid].seq_num = test_seq_num

            test = uuid2test[test_uuid]

            test.ptUpdate(self, t)

            self.tests_total += 1
            if test.ptStatusIsCompleted():
                self.tests_completed += 1
            if test.ptStatusIsFailed():
                self.tests_failed += 1
            if test.errors:
                self.tests_errors += 1
            if test.warnings:
                self.tests_warnings += 1
            ret = uuid2test.pop(test_uuid, None)

        if json_data.get('replace', False):
            TestModel.ptDeleteTests(uuid2test.keys())

        for t in uuid2test.values():
            self.tests_total += 1
            if t.ptStatusIsCompleted():
                self.tests_completed += 1
            if t.ptStatusIsFailed():
                self.tests_failed += 1
            if t.errors:
                self.tests_errors += 1
            if t.warnings:
                self.tests_warnings += 1

        self.save()
Example #45
0
def evaluate_node(node, given_variables):
    """
    Given a node representing a query, and a dictionary with (name, values),
    evaluates the expression represented by the node.
    :param node: Node representing the expression
    :param given_variables: Dictionary (name, value) of variables
    :return: True/False depending on the evaluation
    """
    if 'condition' in node:
        # Node is a condition, get the values of the sub-clauses
        sub_clauses = [
            evaluate_node(x, given_variables) for x in node['rules']
        ]

        # Now combine
        if node['condition'] == 'AND':
            result = all(sub_clauses)
        else:
            result = any(sub_clauses)

        if node.pop('not', False):
            result = not result

        return result

    # Get the variable name
    varname = node['field']
    # Get the variable value if running in boolean mode
    varvalue = None
    if given_variables is not None:
        varvalue = given_variables.get(varname, None)

    # Get the operator
    operator = node['operator']

    # If calculating a boolean result and no value in the dictionary, finish
    if varvalue is None:
        raise OntaskException('No value found for variable', varname)

    # If the operator is between or not_between, there is a special case,
    # the constant cannot be computed because the node['value'] is a pair
    constant = None
    if 'between' not in operator:
        # Calculate the constant value depending on the type
        if node['type'] == 'number':
            constant = float(node['value'])
        elif node['type'] == 'boolean':
            constant = node['value'] == '1'
        elif node['type'] == 'string':
            constant = str(node['value'])
        elif node['type'] == 'datetime':
            constant = parse_datetime(node['value'])
        else:
            raise Exception('No function to translate type', node['type'])

    # Terminal Node
    if operator == 'equal':
        result = varvalue == constant

    elif operator == 'not_equal':
        result = varvalue != constant

    elif operator == 'begins_with' and node['type'] == 'string':
        result = varvalue.startswith(constant)

    elif operator == 'not_begin_with' and node['type'] == 'string':
        result = not varvalue.startswith(constant)

    elif operator == 'contains' and node['type'] == 'string':
        result = varvalue.find(constant) != -1

    elif operator == 'not_contains' and node['type'] == 'string':
        result = varvalue.find(constant) == -1

    elif operator == 'ends_with' and node['type'] == 'string':
        result = varvalue.endswith(constant)

    elif operator == 'not_ends_width' and node['type'] == 'string':
        result = not varvalue.endswith(constant)

    elif operator == 'is_empty' and node['type'] == 'string':
        result = varvalue == ''

    elif operator == 'is_not_empty' and node['type'] == 'string':
        result = varvalue != ''

    elif operator == 'less' and \
            (node['type'] == 'number' or node['type'] == 'datetime'):
        result = varvalue < constant

    elif operator == 'less_or_equal' and \
            (node['type'] == 'number' or node['type'] == 'datetime'):
        result = varvalue <= constant

    elif operator == 'greater' and \
            (node['type'] == 'number' or node['type'] == 'datetime'):
        result = varvalue > constant

    elif operator == 'greater_or_equal' and \
            (node['type'] == 'number' or node['type'] == 'datetime'):
        result = varvalue >= constant

    elif operator == 'between' and \
            (node['type'] == 'number' or node['type'] == 'datetime'):
        result = node['value'][0] <= varvalue <= node['value'][1]

    elif operator == 'not_between' and \
            (node['type'] == 'number' or node['type'] == 'datetime'):
        result = not (node['value'][0] <= varvalue <= node['value'][1])

    else:
        raise Exception('Type, operator, field', node['type'], operator,
                        varname, 'not supported yet.')

    if node.get('not', False):
        raise Exception('Negation found in unexpected location')

    return result
def iso_date(value):
    """
    Serialization returns an ISO date by default, this tags
    allows converting it back for displaying it in template
    """
    return parse_datetime(value)
Example #47
0
    def post(self, request, *args, **kw):
        username = request.user.username
        bucket_name = request.data.get('bucket')
        site_url = request.data.get('site_url')
        table_indexs = request.data.get('table_indexs')
        source_path = request.data.get("source_path") or '/'
        scraper_id = request.data.get("scraper_id") or None
        scraper_name = request.data.get("scraper_name") or None

        schedule_frequency = request.data.get('schedule_frequency')
        hours_of_day = request.data.get('hours_of_day')
        if len(hours_of_day) == 0:
            str_hours_of_day = "*"
        else:
            str_hours_of_day = json.dumps(hours_of_day).strip("[").strip(
                "]").replace('"', '').replace(" ", '')
        days_of_week = request.data.get('days_of_week')
        if len(days_of_week) == 0:
            str_days_of_week = "*"
        else:
            str_days_of_week = json.dumps(days_of_week).strip("[").strip(
                "]").replace('"', '').replace(" ", '')
        # weeks_of_month = request.data.get('weeks_of_month')
        days_of_month = request.data.get('days_of_month')
        if days_of_month is None:
            str_days_of_month = "0"
        else:
            str_days_of_month = str(days_of_month)
        months_of_year = request.data.get('months_of_year')
        if len(months_of_year) == 0:
            str_months_of_year = "*"
        else:
            str_months_of_year = json.dumps(months_of_year).strip("[").strip(
                "]").replace('"', '').replace(" ", '')

        frequency = request.data.get('frequency')
        cur_utc = request.data.get('cur_utc') or "0000-00-00 00:00"
        select_month = cur_utc[5:7]
        if select_month[0] == "0":
            select_month = select_month[1]
        select_date = cur_utc[8:10]
        if select_date[0] == "0":
            select_date = select_date[1]
        select_hour = cur_utc[11:13]
        if select_hour[0] == "0":
            select_hour = select_hour[1]
        select_minute = cur_utc[14:16]
        if select_minute[0] == "0":
            select_minute = select_minute[1]
        sel_datetime = parse_datetime(cur_utc)
        sel_datetime = pytz.timezone("UTC").localize(sel_datetime, is_dst=None)

        strdate = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M")
        task_name = schedule_frequency + "__scraper_" + strdate

        if not request.user.is_superuser:
            source_path = username + source_path
            bucket_name = settings.BUCKET_NAME
        if bucket_name is None:
            bucket_name = settings.BUCKET_NAME

        try:
            scraper = Scraper.objects.get(id=scraper_id)
        except Scraper.DoesNotExist:
            scraper = Scraper.objects.create(
                name=scraper_name,
                site=site_url,
                upload_path=source_path,
                bucket_name=bucket_name,
                table_numbers=json.dumps(table_indexs),
                user=request.user)
        param_args = [
            scraper.id, site_url, source_path, bucket_name,
            json.dumps(table_indexs)
        ]

        pt = scraper.schedule

        if schedule_frequency == 'None':
            if pt:
                scraper.schedule = None
                scraper.save()
                cs = pt.crontab
                cs.delete()
                pt.delete()
                pt = None
                cs = None

        if schedule_frequency == 'Once':

            if pt:
                cs = pt.crontab
                cs.minute = select_minute
                cs.hour = select_hour
                cs.day_of_week = "*"
                cs.day_of_month = select_date
                cs.month_of_year = select_month
                cs.save()
                pt.enabled = 1
                pt.name = task_name
                pt.last_run_at = timezone.now()
                pt.save()
            else:
                cs = CrontabSchedule.objects.create(minute=select_minute,
                                                    hour=select_hour,
                                                    day_of_week="*",
                                                    day_of_month=select_date,
                                                    month_of_year=select_month)

                pt = PeriodicTask.objects.create(
                    name=task_name,
                    task='uploader.tasks.parse_upload_task',
                    args=json.dumps(param_args),
                    enabled=1,
                    crontab=cs,
                    kwargs='{}',
                    last_run_at=timezone.now())

        if schedule_frequency == 'Minutely':

            if pt:
                cs = pt.crontab
                cs.minute = "*/" + str(frequency)
                cs.hour = str_hours_of_day
                cs.day_of_week = "*"
                cs.day_of_month = "*"
                cs.month_of_year = "*"
                cs.save()
                pt.enabled = 1
                pt.last_run_at = sel_datetime
                pt.name = task_name
                pt.save()
            else:
                cs = CrontabSchedule.objects.create(minute="*/" +
                                                    str(frequency),
                                                    hour=str_hours_of_day,
                                                    day_of_week="*",
                                                    day_of_month="*",
                                                    month_of_year="*")

                pt = PeriodicTask.objects.create(
                    name=task_name,
                    task='uploader.tasks.parse_upload_task',
                    args=json.dumps(param_args),
                    enabled=1,
                    crontab=cs,
                    kwargs='{}',
                    last_run_at=sel_datetime)

        if schedule_frequency == 'Hourly':

            if pt:
                cs = pt.crontab
                cs.minute = select_minute
                cs.hour = "*/" + str(frequency)
                cs.day_of_week = str_days_of_week
                cs.day_of_month = "*"
                cs.month_of_year = "*"
                cs.save()
                pt.enabled = 1
                pt.last_run_at = sel_datetime
                pt.name = task_name
                pt.save()
            else:
                cs = CrontabSchedule.objects.create(
                    minute=select_minute,
                    hour="*/" + str(frequency),
                    day_of_week=str_days_of_week,
                    day_of_month="*",
                    month_of_year="*")

                pt = PeriodicTask.objects.create(
                    name=task_name,
                    task='uploader.tasks.parse_upload_task',
                    args=json.dumps(param_args),
                    enabled=1,
                    crontab=cs,
                    kwargs='{}',
                    last_run_at=sel_datetime)
        if schedule_frequency == 'Daily':

            if pt:
                cs = pt.crontab
                cs.minute = select_minute
                cs.hour = select_hour
                cs.day_of_week = "*"
                cs.day_of_month = "*/" + str(frequency)
                cs.month_of_year = "*"
                cs.save()
                pt.enabled = 1
                pt.last_run_at = sel_datetime
                pt.name = task_name
                pt.save()
            else:
                cs = CrontabSchedule.objects.create(minute=select_minute,
                                                    hour=select_hour,
                                                    day_of_week="*",
                                                    day_of_month="*/" +
                                                    str(frequency),
                                                    month_of_year="*")

                pt = PeriodicTask.objects.create(
                    name=task_name,
                    task='uploader.tasks.parse_upload_task',
                    args=json.dumps(param_args),
                    enabled=1,
                    crontab=cs,
                    kwargs='{}',
                    last_run_at=sel_datetime)
        if schedule_frequency == 'Weekly':

            if pt:
                cs = pt.crontab
                cs.minute = select_minute
                cs.hour = select_hour
                cs.day_of_week = str_days_of_week
                cs.day_of_month = "*"
                cs.month_of_year = "*"
                cs.save()
                pt.enabled = 1
                pt.last_run_at = sel_datetime
                pt.name = task_name
                pt.save()
            else:
                cs = CrontabSchedule.objects.create(
                    minute=select_minute,
                    hour=select_hour,
                    day_of_week=str_days_of_week,
                    day_of_month="*",
                    month_of_year="*")

                pt = PeriodicTask.objects.create(
                    name=task_name,
                    task='uploader.tasks.parse_upload_task',
                    args=json.dumps(param_args),
                    enabled=1,
                    crontab=cs,
                    kwargs='{}',
                    last_run_at=sel_datetime)
        if schedule_frequency == 'Monthly':

            if pt:
                cs = pt.crontab
                cs.minute = select_minute
                cs.hour = select_hour
                cs.day_of_week = str_days_of_week
                cs.day_of_month = str_days_of_month
                cs.month_of_year = "*/" + str(frequency)
                cs.save()
                pt.enabled = 1
                pt.last_run_at = sel_datetime
                pt.name = task_name
                pt.save()
            else:
                cs = CrontabSchedule.objects.create(
                    minute=select_minute,
                    hour=select_hour,
                    day_of_week=str_days_of_week,
                    day_of_month=str_days_of_month,
                    month_of_year="*/" + str(frequency))

                pt = PeriodicTask.objects.create(
                    name=task_name,
                    task='uploader.tasks.parse_upload_task',
                    args=json.dumps(param_args),
                    enabled=1,
                    crontab=cs,
                    kwargs='{}',
                    last_run_at=sel_datetime)
        if schedule_frequency == 'Yearly':

            if pt:
                cs = pt.crontab
                cs.minute = select_minute
                cs.hour = select_hour
                cs.day_of_week = str_days_of_week
                cs.day_of_month = str_days_of_month
                cs.month_of_year = str_months_of_year
                cs.save()
                pt.enabled = 1
                pt.last_run_at = sel_datetime
                pt.name = task_name
                pt.save()
            else:
                cs = CrontabSchedule.objects.create(
                    minute=select_minute,
                    hour=select_hour,
                    day_of_week=str_days_of_week,
                    day_of_month=str_days_of_month,
                    month_of_year=str_months_of_year)

                pt = PeriodicTask.objects.create(
                    name=task_name,
                    task='uploader.tasks.parse_upload_task',
                    args=json.dumps(param_args),
                    enabled=1,
                    crontab=cs,
                    kwargs='{}',
                    last_run_at=sel_datetime)

        scraper.schedule = pt
        scraper.name = scraper_name
        scraper.site = site_url
        scraper.upload_path = source_path
        scraper.bucket_name = bucket_name
        scraper.table_numbers = json.dumps(table_indexs)
        scraper.user = request.user
        scraper.save()

        PeriodicTasks.objects.update(last_update=timezone.now())
        if scraper:
            return Response({"status": "Success"}, status=status.HTTP_200_OK)
        else:
            return Response({"details": "failed"},
                            status=status.HTTP_400_BAD_REQUEST)
Example #48
0
XML2012 = os.path.join(MODULE_DIR, 'dados/cmsp/cmsp2012.xml')
XML2013 = os.path.join(MODULE_DIR, 'dados/cmsp/cmsp2013.xml')
XML2014 = os.path.join(MODULE_DIR, 'dados/cmsp/cmsp2014.xml')
XML2015 = os.path.join(MODULE_DIR, 'dados/cmsp/cmsp2015.xml')
XML2016 = os.path.join(MODULE_DIR, 'dados/cmsp/cmsp2016.xml')

# tipos de proposições encontradas nos XMLs da cmsp
# esta lista ajuda a identificar as votações que são de proposições
# Exemplos de votações que não são de proposições: Adiamento do Prolong.
# do Expediente; Adiamento dos Demais itens da Pauta.
TIPOS_PROPOSICOES = ['PL', 'PLO', 'PDL']

# regex que captura um nome de proposição (ex: PL 12/2010)
PROP_REGEX = '([a-zA-Z]{1,3}) ([0-9]{1,4}) ?/([0-9]{4})'

INICIO_PERIODO = parse_datetime('2010-01-01 0:0:0')
FIM_PERIODO = parse_datetime('2012-12-31 0:0:0')


class GeradorCasaLegislativa(object):

    def gerar_cmsp(self):
        try:
            cmsp = models.CasaLegislativa.objects.get(nome_curto=NOME_CURTO)
        except models.CasaLegislativa.DoesNotExist:
            cmsp = self.salvar_cmsp()
        return cmsp

    def salvar_cmsp(self):
        cmsp = models.CasaLegislativa()
        cmsp.nome = 'Câmara Municipal de São Paulo'
Example #49
0
    def get_context_data(self, **kwargs):
        context = super(SteamView, self).get_context_data(**kwargs)

        # Is this Steam ID associated with a user?
        try:
            social_obj = UserSocialAuth.objects.get(uid=kwargs['steam_id'], )
            context['steam_info'] = social_obj.extra_data['player']

            context['uploaded'] = social_obj.user.replay_set.all()
            context['has_user'] = True
            context['social_obj'] = social_obj
        except UserSocialAuth.DoesNotExist:
            # Pull the profile data and pass it in.
            context['has_user'] = False
            context['steam_info'] = None

            # Do we have a cache object for this already?
            try:
                cache = SteamCache.objects.filter(uid=kwargs['steam_id'])

                if cache.count() > 0:
                    for cache_item in cache[1:]:
                        cache_item.delete()

                    cache = cache[0]

                    # Have we updated this profile recently?
                    if 'last_updated' in cache.extra_data:
                        # Parse the last updated date.
                        last_date = parse_datetime(
                            cache.extra_data['last_updated'])

                        seconds_ago = (now() - last_date).seconds

                        # 3600  seconds = 1 hour
                        # 21600 seconds = 6 hours
                        if seconds_ago < 21600:
                            context['steam_info'] = cache.extra_data['player']

            except SteamCache.DoesNotExist:
                pass

            try:
                if not context['steam_info']:
                    player = requests.get(
                        USER_INFO,
                        params={
                            'key': settings.SOCIAL_AUTH_STEAM_API_KEY,
                            'steamids': kwargs['steam_id'],
                        }).json()

                    if len(player['response']['players']) > 0:
                        context['steam_info'] = player['response']['players'][
                            0]

                        # Store this data in a SteamCache object.
                        cache_obj, _ = SteamCache.objects.get_or_create(
                            uid=kwargs['steam_id'])
                        cache_obj.extra_data = {
                            'player': context['steam_info'],
                            'last_updated': now().isoformat(),
                        }
                        cache_obj.save()
            except:
                pass

        context['appears_in'] = Replay.objects.filter(
            show_leaderboard=True,
            player__platform__in=['OnlinePlatform_Steam', '1'],
            player__online_id=kwargs['steam_id'],
        ).distinct()

        if not context.get('steam_info', None):
            context['steam_info'] = {
                'steamid': kwargs['steam_id'],
            }

        return context
Example #50
0
def make_datetime(datestr):
    return make_aware(parse_datetime(datestr), timezone=utc)
schedule = scheduleTasks(taskList, blockedList)

for s in schedule:
	print("Task name: " + s.task.name + ", start: " + str(s.start_time) + ", end: " + str(s.end_time))
	# print("Task name: " + s.task.name + ", duration: " + str((s.end_time - s.start_time).total_seconds()/60))

Task(name="codechef", priority="2", span=60*4, deadline=timezone.now()+timezone.timedelta(days=10), at_a_stretch=60, left=60*4, done=False).save()
Task(name="codeforces", priority="2", span=60*3, deadline=timezone.now()+timezone.timedelta(days=10), at_a_stretch=60, left=60*3, done=False).save()
Task(name="badminton", priority="1", span=60*4, deadline=timezone.now()+timezone.timedelta(days=20), at_a_stretch=60, left=60*4, done=False).save()
Task(name="transport phenomena", priority="0", span=60*5, deadline=timezone.now()+timezone.timedelta(days=30), at_a_stretch=120, left=60*5, done=False).save()
Task(name="process engineering", priority="0", span=60*1, deadline=timezone.now()+timezone.timedelta(days=40), at_a_stretch=30, left=60*1, done=False).save()
Blocked(name="class", start_time=timezone.now()+timezone.timedelta(minutes=10), end_time=timezone.now()+timezone.timedelta(minutes=70)).save()
Blocked(name="sleep", start_time=timezone.now()+timezone.timedelta(minutes=100), end_time=timezone.now()+timezone.timedelta(minutes=150)).save()
Blocked(name="class", start_time=timezone.now()+timezone.timedelta(minutes=220), end_time=timezone.now()+timezone.timedelta(minutes=280)).save()
Blocked(name="class", start_time=pytz.timezone('UTC').localize(parse_datetime("2018-09-05 13:30:00"), is_dst=None), end_time=pytz.timezone('UTC').localize(parse_datetime("2018-09-05 14:00:00"), is_dst=None)).save()
Blocked(name="sleep", start_time=pytz.timezone('UTC').localize(parse_datetime("2018-09-05 16:00:00"), is_dst=None), end_time=pytz.timezone('UTC').localize(parse_datetime("2018-09-05 17:30:00"), is_dst=None)).save()
Blocked(name="class", start_time=pytz.timezone('UTC').localize(parse_datetime("2018-09-05 20:30:00"), is_dst=None), end_time=pytz.timezone('UTC').localize(parse_datetime("2018-09-05 22:00:00"), is_dst=None)).save()


# Test
from TaskScheduler.models import Task, Schedule, Blocked
# from TaskScheduler.schedulerAlgorithms import *
from TaskScheduler.SlackRoundRobinScheduler import *
from django.utils import *
import pytz
from django.utils.dateparse import parse_datetime

for t in Task.objects.all():
	t.delete()
Example #52
0
def _save(request, save_type):
    post_data = request.POST

    if any(_ not in post_data for _ in ('auth_data', 'cms_page_id')):
        return JsonResponse({'success': False}, status=400)

    cms_page = get_object_or_404(Content, id=post_data['cms_page_id'])
    draft_date = parse_datetime(post_data['draft_modified'])

    if draft_date and draft_date < cms_page.modified_on:
        return JsonResponse(
            {
                'success': False,
                'draft_error': True,
                'message': 'Draft data was out of date'
            },
            status=200)

    try:
        content = json.loads(post_data['auth_data'])
    except:
        return JsonResponse(
            {
                'success': False,
                'message': 'Invalid JSON object'
            }, status=400)
    else:
        cms_page.auth_data.update(content)
        try:
            template_context = cache.get('template_context_{}'.format(
                cms_page.id))
            render_content(cms_page,
                           request=request,
                           template_context=template_context)
        except Exception as e:
            return JsonResponse({
                'success':
                False,
                'message':
                'Unable to parse the new content.\n'
                'Please check the console for issues.',
                'exception':
                unicode(e),
                'traceback':
                traceback.format_exc()
            })
        else:
            if save_type == 'draft':
                cms_page.save()
            else:
                pre_signal_response = pre_publish_signal.send(
                    sender=cms_page._meta.model, cms_page=cms_page)
                errors, warns = [], []
                for _, response in pre_signal_response:
                    should_publish, msg = response
                    if should_publish is False:
                        errors.append(msg)
                    elif should_publish is None:
                        warns.append(msg)

                if errors:
                    return JsonResponse({
                        'success':
                        False,
                        'message_in_detail':
                        '\n'.join(errors),
                        'message':
                        """
                                Unable to publish.
                                Please check console for details
                            """
                    })
                else:
                    cms_page.publish_cms_content()
                    post_publish_signal.send(sender=cms_page._meta.model,
                                             cms_page=cms_page)

                if warns:
                    return JsonResponse({
                        'success':
                        None,
                        'message_in_details':
                        '\n'.join(warns),
                        'message':
                        """
                                Please check console for warnings
                            """
                    })

            return JsonResponse({'success': True})
Example #53
0
def client_acknowledgement(self, txid):
    this_transaction = Transaction.objects.filter(id=txid)
    third_parties = []
    if this_transaction.exists():
        transaction = this_transaction.first()
        block = None
        if transaction.blockheight:
            block = transaction.blockheight.number
        
        address = transaction.address

            
        subscriptions = Subscription.objects.filter(
            address=address             
        )

        if subscriptions.exists():
            
            for subscription in subscriptions:

                recipient = subscription.recipient
                websocket = subscription.websocket

                wallet_version = 1
                if address.wallet:
                    wallet_version = address.wallet.version
                else:
                    # Hardcoded date-based check for addresses that are not associated with wallets
                    v2_rollout_date_str = dateparse.parse_datetime('2021-09-11 00:00:00')
                    v2_rollout_date = pytz.UTC.localize(v2_rollout_date_str)
                    if address.date_created >= v2_rollout_date:
                        wallet_version = 2

                if wallet_version == 2:
                    data = {
                        'token_name': transaction.token.name,
                        'token_id':  'slp/' + transaction.token.tokenid if  transaction.token.tokenid  else 'bch',
                        'token_symbol': transaction.token.token_ticker.lower(),
                        'amount': transaction.amount,
                        'address': transaction.address.address,
                        'source': 'WatchTower',
                        'txid': transaction.txid,
                        'block': block,
                        'index': transaction.index,
                        'address_path' : transaction.address.address_path
                    }
                elif wallet_version == 1:
                    data = {
                        'amount': transaction.amount,
                        'address': transaction.address.address,
                        'source': 'WatchTower',
                        'token': transaction.token.tokenid or transaction.token.token_ticker.lower(),
                        'txid': transaction.txid,
                        'block': block,
                        'index': transaction.index,
                        'address_path' : transaction.address.address_path
                    }

                if recipient:
                    if recipient.valid:
                        if recipient.web_url:
                            LOGGER.info(f"Webhook call to be sent to: {recipient.web_url}")
                            LOGGER.info(f"Data: {str(data)}")
                            resp = requests.post(recipient.web_url,data=data)
                            if resp.status_code == 200:
                                this_transaction.update(acknowledged=True)
                                LOGGER.info(f'ACKNOWLEDGEMENT SENT TX INFO : {transaction.txid} TO: {recipient.web_url} DATA: {str(data)}')
                            elif resp.status_code == 404 or resp.status_code == 522 or resp.status_code == 502:
                                Recipient.objects.filter(id=recipient.id).update(valid=False)
                                LOGGER.info(f"!!! ATTENTION !!! THIS IS AN INVALID DESTINATION URL: {recipient.web_url}")
                            else:
                                LOGGER.error(resp)
                                self.retry(countdown=3)

                        if recipient.telegram_id:

                            if transaction.token.name != 'bch':
                                message=f"""<b>WatchTower Notification</b> ℹ️
                                    \n Address: {transaction.address.address}
                                    \n Token: {transaction.token.name}
                                    \n Token ID: {transaction.token.tokenid}
                                    \n Amount: {transaction.amount}
                                    \nhttps://explorer.bitcoin.com/bch/tx/{transaction.txid}
                                """
                            else:
                                message=f"""<b>WatchTower Notification</b> ℹ️
                                    \n Address: {transaction.address.address}
                                    \n Amount: {transaction.amount} BCH
                                    \nhttps://explorer.bitcoin.com/bch/tx/{transaction.txid}
                                """

                            args = ('telegram' , message, recipient.telegram_id)
                            third_parties.append(args)
                            this_transaction.update(acknowledged=True)

                if websocket:
                    
                    tokenid = ''
                    room_name = transaction.address.address.replace(':','_')
                    room_name += f'_{tokenid}'
                    channel_layer = get_channel_layer()
                    async_to_sync(channel_layer.group_send)(
                        f"{room_name}", 
                        {
                            "type": "send_update",
                            "data": data
                        }
                    )
                    if transaction.address.address.startswith('simpleledger:'):
                        tokenid = transaction.token.tokenid
                        room_name += f'_{tokenid}'
                        channel_layer = get_channel_layer()
                        async_to_sync(channel_layer.group_send)(
                            f"{room_name}", 
                            {
                                "type": "send_update",
                                "data": data
                            }
                        )
    return third_parties
Example #54
0
 def to_python(self, value):
     if value is None or isinstance(value, datetime.datetime):
         return value
     else:
         return parse_datetime(value)
Example #55
0
 def import_value_from_string(self, value, serializer):
     return parse_datetime(value)
Example #56
0
    def update_youtube(self, num_videos):
        YOUTUBE_API_SERVICE_NAME = 'youtube'
        YOUTUBE_API_VERSION = 'v3'
        resultados_por_pagina = 0
        pagina_actual = None
        proveedor_youtube = ProveedorMultimedia.objects.get(nombre='Youtube')
        youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION)
        channels_response = youtube.channels().list(
            id=proveedor_youtube.identificador,
            part='contentDetails',
            key=proveedor_youtube.clave).execute()
        for channel in channels_response['items']:
            uploads_list_id = channel['contentDetails']['relatedPlaylists'][
                'uploads']
            while num_videos > resultados_por_pagina:
                videos_list_ids = []
                playlistitems_list_request = youtube.playlistItems().list(
                    playlistId=uploads_list_id,
                    part='snippet',
                    maxResults=(10 if num_videos > 10 else num_videos),
                    key=proveedor_youtube.clave,
                    pageToken=pagina_actual)
                playlistitems_list_response = playlistitems_list_request.execute(
                )
                resultados_por_pagina = playlistitems_list_response[
                    'pageInfo']['resultsPerPage']
                num_videos -= len(playlistitems_list_response['items'])
                if 'nextPageToken' in playlistitems_list_response:
                    pagina_actual = playlistitems_list_response[
                        'nextPageToken']
                else:
                    pagina_actual = None
                print '%s resultados, %s por página' % (
                    playlistitems_list_response['pageInfo']['totalResults'],
                    playlistitems_list_response['pageInfo']['resultsPerPage'])
                for playlist_item in playlistitems_list_response['items']:
                    videos_list_ids.append(
                        playlist_item['snippet']['resourceId']['videoId'])

                print videos_list_ids
                videos_list_response = youtube.videos().list(
                    id=', '.join(videos_list_ids),
                    part='contentDetails, snippet',
                    key=proveedor_youtube.clave).execute()
                for video in videos_list_response['items']:
                    youtube_video, creado = Multimedia.objects.get_or_create(
                        identificador=video['id'], proveedor=proveedor_youtube)
                    if creado:  # or youtube_video.titulo != video['snippet']['title'] or youtube_video.descripcion != video['snippet']['description'] or youtube_video.duracion != self.get_youtube_duration(video['contentDetails']['duration']) or youtube_video.icono != video['snippet']['thumbnails']['high']['url']:
                        print(u' '.join((video['snippet']['title'],
                                         video['contentDetails']['duration']
                                         )).encode('utf-8'))
                        youtube_video.identificador = video['id']
                        youtube_video.titulo = video['snippet'][
                            'title'].encode('utf-8', 'replace')
                        youtube_video.descripcion = video['snippet'][
                            'description'].encode('utf-8', 'replace')
                        youtube_video.duracion = self.get_youtube_duration(
                            video['contentDetails']['duration'])
                        youtube_video.icono = video['snippet']['thumbnails'][
                            'medium']['url']
                        youtube_video.fecha = dateparse.parse_datetime(
                            video['snippet']['publishedAt'])
                        youtube_video.save()
Example #57
0
    def __init__(self, *args, **kwargs):
        self.workflow = kwargs.pop('workflow', None)
        self.plugin_instance = kwargs.pop('plugin_instance', None)

        super(SelectColumnForm, self).__init__(*args, **kwargs)

        if self.plugin_instance.input_column_names != []:
            # The set of columns is fixed, remove the field.
            self.fields.pop('columns')
        else:
            # The queryset for the columns must be extracted from the
            # workflow and should only include the non-key columns
            self.fields['columns'].queryset = self.workflow.columns.filter(
                is_key=False)

        # Field to choose the Key column to merge the results
        self.fields['merge_key'] = forms.ChoiceField(
            initial=('', '---'),
            label='Key column for merging',
            required=True,
            help_text='One of the existing key columns to merge the results',
            choices=[('', '---')] +
            [(x, x) for x in self.workflow.columns.filter(is_key=True)])

        # Add the fields for the output column names
        for idx, cname in enumerate(self.plugin_instance.output_column_names):
            self.fields[field_prefix + 'output_%s' % idx] = forms.CharField(
                initial=cname,
                label='Name for result column "{0}"'.format(cname),
                strip=True,
                required=False,
            )

        self.fields['out_column_suffix'] = forms.CharField(
            initial='',
            label='Suffix to add to result columns (empty to ignore)',
            strip=True,
            required=False,
            help_text=
            'Added to all output column names. Useful to keep results from '
            'several executions in separated columns.')

        for idx, (k, p_type, p_allow, p_init, p_help) in \
                enumerate(self.plugin_instance.parameters):

            if p_allow:
                new_field = forms.ChoiceField(choices=[(x, x)
                                                       for x in p_allow],
                                              required=False,
                                              label=k,
                                              help_text=p_help)
            elif p_type == 'integer':
                new_field = forms.IntegerField(label=k,
                                               required=False,
                                               help_text=p_help)
            elif p_type == 'double':
                new_field = forms.FloatField(label=k,
                                             required=False,
                                             help_text=p_help)
            elif p_type == 'string':
                new_field = forms.CharField(max_length=1024,
                                            strip=True,
                                            required=False,
                                            label=k,
                                            help_text=p_help)
            elif p_type == 'boolean':
                new_field = forms.BooleanField(required=False,
                                               label=k,
                                               help_text=p_help)
            else:  # p_type == 'datetime':
                new_field = forms.DateTimeField(required=False,
                                                label=k,
                                                widget=DateTimeWidget(
                                                    options=dateTimeOptions,
                                                    usel10n=True,
                                                    bootstrap_version=3),
                                                help_text=p_help)

            # Set the initial value of each field
            if p_allow:
                new_field.initial = (p_init, p_init)
            else:
                if p_type == 'datetime':
                    new_field.initial = parse_datetime(p_init)
                else:
                    new_field.initial = p_init

            # Insert the new_field in the form
            self.fields[field_prefix + 'parameter_%s' % idx] = new_field
Example #58
0
  def parseJsonToStoryList(self, json_obj):
    stories = json_obj['list']['story']
    story_objects = []
    for s in stories:
      story_obj = models.Story()
      try:
        title = s['title']['$text']
        story_obj.title = u"{}".format(title)
        story_obj.teaser = s['teaser']['$text']
        story_obj.creation_date = timezone.now()

        date_list = s['storyDate']['$text'].split()
        naive_date_str = ' '.join(date_list[:-1])
        tz = date_list[-1]
        naive_date = datetime.datetime.strptime(
            naive_date_str,
            "%a, %d %b %Y %H:%M:%S")
        new_date_str = "{}-{}-{}T{}:{}:{}{}".format(
            naive_date.year, naive_date.month, naive_date.day,
            naive_date.hour, naive_date.minute, naive_date.second, tz)
        story_obj.story_date = parse_datetime(new_date_str)

        try:
          story_obj.outlet = s['outlet']
        except KeyError:
          story_obj.outlet = "NPR"

        s_paragraphs = []
        for paragraph in s['textWithHtml']['paragraph']:
          s_paragraphs.append(u"<p>{}</p>".format(paragraph['$text']))
        story_obj.text = ' '.join(s_paragraphs)

      except KeyError as e:
        continue

      try:
        story_obj.byline = json.dumps(s['byline'])
      except KeyError as e:
        story_obj.byline = None

      try:
        for img in s['image']:
          new_img = models.Image()
          try:
            new_img.src = img['src']
            for crop in img['crop']:
              if crop['type'] == 'standard':
                new_img.crop_standard_src = crop['src']
              elif crop['type'] == 'square':
                new_img.crop_square_src = crop['src']
              elif crop['type'] == 'wide':
                new_img.crop_wide_src = crop['src']
              elif crop['type'] == 'enlargement':
                new_img.crop_enlargement_src = crop['src']
              elif crop['type'] == 'custom':
                new_img.crop_custom_src = crop['src']
            try:
              new_img.caption = img['caption']['$text']
              new_img.provider = img['provider']['$text']
              new_img.producer = img['producer']['$text']
              new_img.copyright = img['copyright']
            except KeyError as e:
              pass
          except KeyError as e:
            continue
          else:
            story_obj.images.append(new_img)
      except KeyError as e:
        story_obj.images = []

      try:
        for quote in s['pullQuote']:
          new_quote = models.PullQuote()
          try:
            new_quote.text = quote['text']['$text']
            new_quote.person = quote['person']['$text']
          except KeyError as e:
            continue
          else:
            story_obj.pull_quotes.append(new_quote)
      except KeyError as e:
        story_obj.pull_quotes = []
        
      story_objects.append(story_obj)

    return story_objects
Example #59
0
 def test_converte_data_valida(self):
     from django.utils.dateparse import parse_datetime
     data = self.xmlCMSP.converte_data("1/1/1000")
     self.assertEqual(data, parse_datetime("1000-1-1 0:0:0"))
Example #60
0
def bookScreen(request, room_id):
    room = get_object_or_404(RoomResource, id=room_id)
    errormsg = None
    try:
        req_rno = request.POST['rno']
        req_stt = request.POST['start_tm']
    # see if post values are there. If not, render
    # basic page
    except:
        req_rno = None
        return render(request, 'polls/book.html', {
            'room': room,
            'head_title': 'Room Book'
        })
    else:
        try:
            begin = parse_datetime(req_stt)
        except:
            errormsg = "Enter Valid DateTime!"
            return render(request, 'polls/book.html', {
                'room': room,
                'head_title': 'Room Book',
                'errormsg': errormsg
            })
        else:
            if (begin is None):
                errormsg = "Enter Valid DateTime!"
                return render(request, 'polls/book.html', {
                    'room': room,
                    'head_title': 'Room Book',
                    'errormsg': errormsg
                })
            else:
                end = begin + timedelta(minutes=59)
                thisbook = RoomBooking(room=room,
                                       start_time=begin,
                                       end_time=end,
                                       roll_no=req_rno)
                # thisbook.save()
                # Do it later after some checks

                # room_id === foreign-key.id
                querySet = RoomBooking.objects.filter(room_id=room.id).filter(
                    end_time__gte=thisbook.start_time,
                    start_time__lte=thisbook.end_time,
                    active__exact=True,
                )

                if querySet.count() == 0:
                    thisbook.save()
                    return render(
                        request, 'polls/book.html', {
                            'room': room,
                            'head_title': 'Room Book',
                            'booking_info': thisbook
                        })
                else:
                    return render(
                        request, 'polls/book.html', {
                            'room': room,
                            'head_title': 'Room Book',
                            'errormsg': 'This slot is already booked!'
                        })