示例#1
0
def test_ticking_time_clock():
    with freeze_time('2012-01-14 03:21:34', tick=True):
        first = time.clock()
        time.sleep(0.001)  # Deal with potential clock resolution problems
        with freeze_time('2012-01-14 03:21:35', tick=True):
            second = time.clock()
            time.sleep(0.001)  # Deal with potential clock resolution problems

        with freeze_time('2012-01-14 03:21:36', tick=True):
            third = time.clock()
            time.sleep(0.001)

        # Rewind time backwards
        with freeze_time('2012-01-14 03:20:00', tick=True):
            fourth = time.clock()
            time.sleep(0.001)
            fifth = time.clock()

        assert first > 0
        assert second > first
        assert second > 1
        assert third > second
        assert third > 2

        assert third > fourth
        assert second > fourth
        assert first > fourth

        assert fifth > fourth
示例#2
0
def test_workflow_execution_start_to_close_timeout():
    pass
    with freeze_time("2015-01-01 12:00:00"):
        conn = setup_workflow()

    with freeze_time("2015-01-01 13:59:30"):
        resp = conn.get_workflow_execution_history("test-domain", conn.run_id, "uid-abcd1234")

        event_types = [evt["eventType"] for evt in resp["events"]]
        event_types.should.equal(
            ["WorkflowExecutionStarted", "DecisionTaskScheduled"]
        )

    with freeze_time("2015-01-01 14:00:30"):
        # => Workflow Execution Start to Close timeout reached!!
        resp = conn.get_workflow_execution_history("test-domain", conn.run_id, "uid-abcd1234")

        event_types = [evt["eventType"] for evt in resp["events"]]
        event_types.should.equal(
            ["WorkflowExecutionStarted", "DecisionTaskScheduled", "WorkflowExecutionTimedOut"]
        )
        attrs = resp["events"][-1]["workflowExecutionTimedOutEventAttributes"]
        attrs.should.equal({
            "childPolicy": "ABANDON", "timeoutType": "START_TO_CLOSE"
        })
        # checks that event has been emitted at 14:00:00, not 14:00:30
        resp["events"][-1]["eventTimestamp"].should.equal(1420120800.0)
示例#3
0
    def test_blacklist(self):
        """Test that users can only sign up if they are not blacklisted"""

        t_open = datetime(2016, 1, 1)
        t_close = datetime(2016, 12, 31)

        ev = self.new_object("events", spots=100)
        user1 = self.new_object("users")
        user2 = self.new_object("users")

        user1_token = self.get_user_token(user1['_id'])
        user2_token = self.get_user_token(user1['_id'])

        self.new_object("blacklist", user=user1['_id'],
                        start_time=t_open, end_time=t_close)
        self.new_object("blacklist", user=user2['_id'],
                        start_time=t_open)

        with freeze_time(datetime(2016, 6, 1)):
            self.api.post('eventsignups', data={'user': str(user1['_id']),
                          'event': str(ev['_id'])}, token=user1_token,
                          status_code=422)

        with freeze_time(datetime(2017, 6, 1)):
            self.api.post('eventsignups', data={'user': str(user1['_id']),
                          'event': str(ev['_id'])}, token=user1_token,
                          status_code=201)

        with freeze_time(datetime(2017, 6, 1)):
            self.api.post('eventsignups', data={'user': str(user2['_id']),
                          'event': str(ev['_id'])}, token=user2_token,
                          status_code=422)
示例#4
0
def test_pypy_compat():
    try:
        freeze_time("Jan 14th, 2012, 23:59:59", tick=True)
    except SystemError:
        pass
    else:
        raise AssertionError("tick=True should error on non-CPython")
示例#5
0
def test_decision_task_start_to_close_timeout():
    pass
    with freeze_time("2015-01-01 12:00:00"):
        conn = setup_workflow()
        conn.poll_for_decision_task("test-domain", "queue")["taskToken"]

    with freeze_time("2015-01-01 12:04:30"):
        resp = conn.get_workflow_execution_history("test-domain", conn.run_id, "uid-abcd1234")

        event_types = [evt["eventType"] for evt in resp["events"]]
        event_types.should.equal(
            ["WorkflowExecutionStarted", "DecisionTaskScheduled", "DecisionTaskStarted"]
        )

    with freeze_time("2015-01-01 12:05:30"):
        # => Decision Task Start to Close timeout reached!!
        resp = conn.get_workflow_execution_history("test-domain", conn.run_id, "uid-abcd1234")

        event_types = [evt["eventType"] for evt in resp["events"]]
        event_types.should.equal(
            ["WorkflowExecutionStarted", "DecisionTaskScheduled", "DecisionTaskStarted",
             "DecisionTaskTimedOut", "DecisionTaskScheduled"]
        )
        attrs = resp["events"][-2]["decisionTaskTimedOutEventAttributes"]
        attrs.should.equal({
            "scheduledEventId": 2, "startedEventId": 3, "timeoutType": "START_TO_CLOSE"
        })
        # checks that event has been emitted at 12:05:00, not 12:05:30
        resp["events"][-2]["eventTimestamp"].should.equal(1420113900.0)
示例#6
0
  def test_one_time_wf_activate(self):
    def get_person(person_id):
      return db.session.query(Person).filter(Person.id == person_id).one()
    with freeze_time("2015-04-10"):
      _, wf = self.wf_generator.generate_workflow(self.one_time_workflow_1)

      _, cycle = self.wf_generator.generate_cycle(wf)
      self.wf_generator.activate_workflow(wf)

      person_2 = get_person(self.random_people[2].id)

    with freeze_time("2015-04-11"):
      _, notif_data = common.get_daily_notifications()
      self.assertIn(person_2.email, notif_data)
      self.assertIn("cycle_started", notif_data[person_2.email])
      self.assertIn(cycle.id, notif_data[person_2.email]["cycle_started"])
      self.assertIn("my_tasks",
                    notif_data[person_2.email]["cycle_data"][cycle.id])

      person_1 = get_person(self.random_people[0].id)

    with freeze_time("2015-05-03"):  # two days befor due date
      _, notif_data = common.get_daily_notifications()
      self.assertIn(person_1.email, notif_data)
      self.assertNotIn("due_in", notif_data[person_1.email])
      self.assertNotIn("due_today", notif_data[person_1.email])

    with freeze_time("2015-05-04"):  # one day befor due date
      _, notif_data = common.get_daily_notifications()
      self.assertEqual(len(notif_data[person_1.email]["due_in"]), 1)

    with freeze_time("2015-05-05"):  # due date
      _, notif_data = common.get_daily_notifications()
      self.assertEqual(len(notif_data[person_1.email]["due_today"]), 1)
示例#7
0
 def _soft_delete_audits(self):
     with freezegun.freeze_time(self.FAKE_TODAY):
         self.dbapi.soft_delete_audit(self.audit1.uuid)
     with freezegun.freeze_time(self.FAKE_OLD_DATE):
         self.dbapi.soft_delete_audit(self.audit2.uuid)
     with freezegun.freeze_time(self.FAKE_OLDER_DATE):
         self.dbapi.soft_delete_audit(self.audit3.uuid)
示例#8
0
def test_bad_time_argument():
    try:
        freeze_time("2012-13-14", tz_offset=-4)
    except ValueError:
        pass
    else:
        assert False, "Bad values should raise a ValueError"
示例#9
0
def test_invalid_type():
    try:
        freeze_time(int(4))
    except TypeError:
        pass
    else:
        assert False, "Bad types should raise a TypeError"
示例#10
0
文件: test_edit.py 项目: ralf57/taxi
    def test_prefill_entries_add_to_bottom(self):
        config = self.default_config.copy()
        tmp_entries_dir = tempfile.mkdtemp()
        os.remove(self.entries_file)

        self.entries_file = os.path.join(tmp_entries_dir, '%m_%Y.txt')
        config['default']['file'] = self.entries_file

        with freeze_time('2014-01-21'):
            self.write_entries("""20/01/2014
alias_1 2 hello world

21/01/2014
alias_1 1 foo bar
""")

        with freeze_time('2014-02-21'):
            self.write_entries("""20/02/2014
alias_1 2 hello world
""")
            self.run_command('edit', config_options=config)

            with open(expand_date(self.entries_file), 'r') as f:
                lines = f.readlines()

            self.assertEqual('20/02/2014\n', lines[0])
            self.assertEqual('21/02/2014\n', lines[3])
示例#11
0
文件: test_edit.py 项目: ralf57/taxi
    def test_previous_file_doesnt_autofill(self):
        config = self.default_config.copy()
        tmp_entries_dir = tempfile.mkdtemp()
        os.remove(self.entries_file)

        self.entries_file = os.path.join(tmp_entries_dir, '%m_%Y.txt')
        config['default']['file'] = self.entries_file

        with freeze_time('2014-01-21'):
            self.write_entries("""20/01/2014
alias_1 2 hello world

21/01/2014
alias_1 1 foo bar
""")

        with freeze_time('2014-02-21'):
            self.write_entries("""20/02/2014
alias_1 2 hello world
""")
            self.run_command('edit', args=['1'], config_options=config)

        with freeze_time('2014-01-21'):
            with open(expand_date(self.entries_file), 'r') as f:
                lines = f.readlines()

            self.assertNotIn('21/02/2014\n', lines)
示例#12
0
    def test_commit_previous_file_previous_month(self):
        tmp_entries_dir = tempfile.mkdtemp()
        config = self.default_config.copy()

        os.remove(self.entries_file)

        self.entries_file = os.path.join(tmp_entries_dir, '%m_%Y.txt')
        config['default']['file'] = self.entries_file

        with freeze_time('2014-01-01'):
            self.write_entries("""01/01/2014
alias_1 2 january
""")

        with freeze_time('2014-02-01'):
            self.write_entries("""01/02/2014
    alias_1 4 february
    """)

            options = self.default_options.copy()
            options['ignore_date_error'] = True

            stdout = self.run_command('commit', config_options=config,
                                      options=options)
        shutil.rmtree(tmp_entries_dir)

        self.assertIn('january', stdout)
        self.assertIn('february', stdout)
    def test_soft_fail_with_reschedule(self):
        sensor = self._make_sensor(
            return_value=False,
            poke_interval=10,
            timeout=5,
            soft_fail=True,
            mode='reschedule')
        dr = self._make_dag_run()

        # first poke returns False and task is re-scheduled
        date1 = timezone.utcnow()
        with freeze_time(date1):
            self._run(sensor)
        tis = dr.get_task_instances()
        self.assertEquals(len(tis), 2)
        for ti in tis:
            if ti.task_id == SENSOR_OP:
                self.assertEquals(ti.state, State.NONE)
            if ti.task_id == DUMMY_OP:
                self.assertEquals(ti.state, State.NONE)

        # second poke returns False, timeout occurs
        date2 = date1 + timedelta(seconds=sensor.poke_interval)
        with freeze_time(date2):
            self._run(sensor)
        tis = dr.get_task_instances()
        self.assertEquals(len(tis), 2)
        for ti in tis:
            self.assertEquals(ti.state, State.SKIPPED)
    def test_delete_already_deleted_obj(self):
        """
        Tests the case when an object that was registered for deletion has already been deleted.
        """
        account = G(Account)
        self.initial_data_updater.model_objs_registered_for_deletion = [account]

        self.assertEquals(RegisteredForDeletionReceipt.objects.count(), 0)
        with freeze_time('2013-04-12'):
            self.initial_data_updater.handle_deletions()
        receipt = RegisteredForDeletionReceipt.objects.get()
        self.assertEquals(receipt.model_obj_type, ContentType.objects.get_for_model(Account))
        self.assertEquals(receipt.model_obj_id, account.id)
        self.assertEquals(receipt.register_time, datetime(2013, 4, 12))

        # Delete the model object. The receipt should still exist
        account.delete()
        self.assertEquals(RegisteredForDeletionReceipt.objects.count(), 1)

        # Now, don't register the object for deletion and run it again at a different time
        self.initial_data_updater.model_objs_registered_for_deletion = []
        with freeze_time('2013-04-12 05:00:00'):
            self.initial_data_updater.handle_deletions()
        # The object should be deleted, along with its receipt
        self.assertEquals(Account.objects.count(), 0)
        self.assertEquals(RegisteredForDeletionReceipt.objects.count(), 0)
示例#15
0
 def test_api__reset_password_reset__err_400__expired_token(self):
     dbsession = get_tm_session(self.session_factory, transaction.manager)
     admin = dbsession.query(User) \
         .filter(User.email == '*****@*****.**') \
         .one()
     uapi = UserApi(
         current_user=admin,
         session=dbsession,
         config=self.app_config,
     )
     with freeze_time("1999-12-31 23:59:59"):
         reset_password_token = uapi.reset_password_notification(
             admin,
             do_save=True
         )
         params = {
             'email': '*****@*****.**',
             'reset_password_token': reset_password_token,
             'new_password': '******',
             'new_password2': 'mynewpassword',
         }
         transaction.commit()
     with freeze_time("2000-01-01 00:00:05"):
         res = self.testapp.post_json(
             '/api/v2/auth/password/reset/modify',
             status=400,
             params=params,
         )
         assert isinstance(res.json, dict)
         assert 'code' in res.json.keys()
         assert res.json_body['code'] == error.EXPIRED_RESET_PASSWORD_TOKEN  # nopep8
 def setUpTestData(cls):
     Region.objects.create(id='NI')
     # Create 95c_ULB stops:
     StopPoint.objects.bulk_create(
         StopPoint(atco_code='7000000' + suffix, locality_centre=False, active=True) for suffix in (
             '12165', '12648', '12668', '12701', '12729', '12730', '12731', '12732', '12733', '12734', '12735',
             '12736', '12737', '12738', '12739', '12740', '12741', '12742', '12743', '12744', '12745', '12746',
             '12747', '12748', '12749', '12750', '12757', '12778', '12779', '12780', '12781', '12782', '12783',
             '15377'
         )
     )
     # Create 212_GLE stops:
     StopPoint.objects.bulk_create(
         StopPoint(atco_code='7000000' + suffix, locality_centre=False, active=True) for suffix in (
             '15363', '15678', '14232', '14230', '13311', '15229', '15679', '13331', '13214', '15739', '15746',
             '13305', '15747', '14231', '15677', '00792'
         )
     )
     Service.objects.bulk_create([
         Service(service_code='212_GLE', date='2016-01-01', region_id='NI', current=True),
         Service(service_code='95_ULB', date='2016-01-01', region_id='NI', current=True),
         Service(service_code='95c_ULB', date='2016-01-01', region_id='NI', current=True, show_timetable=True)
     ])
     with freeze_time('3 May 2017'):
         generate_departures.handle_region(Region(id='NI'))
     with freeze_time('4 May 2017'):
         generate_departures.handle_region(Region(id='NI'))
示例#17
0
    def test_update_metric_should_update_metric_metadata(self):
        metric_name = "test_update_metric_should_update_metric_metadata.a.b.c"
        initial_metadata = MetricMetadata(
            aggregator=Aggregator.total,
            retention=Retention.from_string("42*1s"),
            carbon_xfilesfactor=0.5,
        )
        create_date = datetime.datetime(2014, 1, 1)
        update_date = datetime.datetime(2018, 1, 1)
        new_metadata = MetricMetadata(
            aggregator=Aggregator.average,
            retention=Retention.from_string("43*100s"),
            carbon_xfilesfactor=0.25,
        )
        with freezegun.freeze_time(create_date):
            metric = bg_test_utils.make_metric(metric_name, initial_metadata)
            self.accessor.create_metric(metric)
            self.accessor.flush()
        with freezegun.freeze_time(update_date):
            self.accessor.update_metric(metric_name, new_metadata)
            self.accessor.flush()

        metric = self.accessor.get_metric(metric_name)
        self.assertEqual(update_date, metric.updated_on)
        self.assertEqual(new_metadata, metric.metadata)
示例#18
0
    def test_prefill_entries_add_to_bottom(self):
        tmp_entries_dir = tempfile.mkdtemp()
        os.remove(self.entries_file)

        self.entries_file = os.path.join(tmp_entries_dir, '%m_%Y.txt')

        with self.settings({'default': {'file': self.entries_file}}):
            with freeze_time('2014-01-21'):
                self.write_entries("""20/01/2014
    alias_1 2 hello world

    21/01/2014
    alias_1 1 foo bar
    """)

            with freeze_time('2014-02-21'):
                self.write_entries("""20/02/2014
    alias_1 2 hello world
    """)
                self.run_command('edit')

                with open(expand_date(self.entries_file), 'r') as f:
                    lines = f.readlines()

                self.assertEqual('20/02/2014\n', lines[0])
                self.assertEqual('21/02/2014\n', lines[3])
示例#19
0
    def test_previous_file_doesnt_autofill(self):
        tmp_entries_dir = tempfile.mkdtemp()
        os.remove(self.entries_file)

        self.entries_file = os.path.join(tmp_entries_dir, '%m_%Y.txt')

        with self.settings({'default': {'file': self.entries_file}}):
            with freeze_time('2014-01-21'):
                self.write_entries("""20/01/2014
    alias_1 2 hello world

    21/01/2014
    alias_1 1 foo bar
    """)

            with freeze_time('2014-02-21'):
                self.write_entries("""20/02/2014
    alias_1 2 hello world
    """)
                self.run_command('edit', args=['1'])

            with freeze_time('2014-01-21'):
                with open(expand_date(self.entries_file), 'r') as f:
                    lines = f.readlines()

                self.assertNotIn('21/02/2014\n', lines)
示例#20
0
def test_activity_task_heartbeat_timeout():
    with freeze_time("2015-01-01 12:00:00"):
        conn = setup_workflow()
        decision_token = conn.poll_for_decision_task(
            "test-domain", "queue")["taskToken"]
        conn.respond_decision_task_completed(decision_token, decisions=[
            SCHEDULE_ACTIVITY_TASK_DECISION
        ])
        conn.poll_for_activity_task(
            "test-domain", "activity-task-list", identity="surprise")

    with freeze_time("2015-01-01 12:04:30"):
        resp = conn.get_workflow_execution_history(
            "test-domain", conn.run_id, "uid-abcd1234")
        resp["events"][-1]["eventType"].should.equal("ActivityTaskStarted")

    with freeze_time("2015-01-01 12:05:30"):
        # => Activity Task Heartbeat timeout reached!!
        resp = conn.get_workflow_execution_history(
            "test-domain", conn.run_id, "uid-abcd1234")

        resp["events"][-2]["eventType"].should.equal("ActivityTaskTimedOut")
        attrs = resp["events"][-2]["activityTaskTimedOutEventAttributes"]
        attrs["timeoutType"].should.equal("HEARTBEAT")
        # checks that event has been emitted at 12:05:00, not 12:05:30
        resp["events"][-2]["eventTimestamp"].should.equal(1420113900.0)

        resp["events"][-1]["eventType"].should.equal("DecisionTaskScheduled")
def test_should_accept_token_that_just_within_bounds_old():
    # make token 31 seconds ago
    with freeze_time('2001-01-01T12:00:00'):
        token = create_jwt_token("key", "client_id")

    with freeze_time('2001-01-01T12:00:30'):
        assert decode_jwt_token(token, "key")
示例#22
0
  def test_force_one_wf_notifications(self, mock_mail):

    with freeze_time("2015-02-01 13:39:20"):
      _, wf_forced = self.wf_generator.generate_workflow(
          self.quarterly_wf_forced)
      response, wf_forced = self.wf_generator.activate_workflow(wf_forced)
      _, wf = self.wf_generator.generate_workflow(self.quarterly_wf)
      response, wf = self.wf_generator.activate_workflow(wf)

      self.assert200(response)

      user = models.Person.query.get(self.user.id)

    with freeze_time("2015-01-29 13:39:20"):
      _, notif_data = common.get_daily_notifications()
      self.assertIn(user.email, notif_data)
      self.assertIn("cycle_starts_in", notif_data[user.email])
      self.assertIn(wf_forced.id, notif_data[user.email]["cycle_starts_in"])
      self.assertIn(wf.id, notif_data[user.email]["cycle_starts_in"])

      self.object_generator.generate_notification_setting(
          self.user.id, "Email_Digest", False)

      user = models.Person.query.get(self.user.id)
      _, notif_data = common.get_daily_notifications()
      self.assertIn(user.email, notif_data)
      self.assertIn("cycle_starts_in", notif_data[user.email])
      self.assertIn(wf_forced.id, notif_data[user.email]["cycle_starts_in"])
      self.assertNotIn(wf.id, notif_data[user.email]["cycle_starts_in"])
示例#23
0
 def test_mailbox_size_with_changes(self):
     service = self.create_mailbox_disk_service()
     self.create_disk_resource()
     account = self.create_account()
     mailbox = self.create_mailbox(account=account)
     now = timezone.now()
     bp = now.date() + relativedelta(years=1)
     options = dict(billing_point=bp, fixed_point=True, proforma=True, new_open=True)
     
     self.allocate_disk(mailbox, 10)
     bill = service.orders.bill(**options).pop()
     self.assertEqual(9*10, bill.get_total())
     
     with freeze_time(now+relativedelta(months=6)):
         self.allocate_disk(mailbox, 20)
         bill = service.orders.bill(**options).pop()
         total = 9*10*0.5 + 19*10*0.5
         self.assertEqual(total, bill.get_total())
     
     with freeze_time(now+relativedelta(months=9)):
         self.allocate_disk(mailbox, 30)
         bill = service.orders.bill(**options).pop()
         total = 9*10*0.5 + 19*10*0.25 + 29*10*0.25
         self.assertEqual(total, bill.get_total())
     
     with freeze_time(now+relativedelta(years=1)):
         self.allocate_disk(mailbox, 10)
         bill = service.orders.bill(**options).pop()
         total = 9*10*0.5 + 19*10*0.25 + 29*10*0.25
         self.assertEqual(total, bill.get_total())
示例#24
0
    def test_invoice_list(self):
        # make some invoice for other company, to make sure they won't be
        # included in the result
        with db_transaction.manager:
            for i in range(4):
                with freeze_time('2013-08-16 00:00:{:02}'.format(i + 1)):
                    self.invoice_model.create(
                        customer=self.customer2,
                        amount=9999,
                    )

        with db_transaction.manager:
            guids = []
            for i in range(4):
                with freeze_time('2013-08-16 00:00:{:02}'.format(i + 1)):
                    invoice = self.invoice_model.create(
                        customer=self.customer,
                        amount=(i + 1) * 1000,
                    )
                    guids.append(invoice.guid)
        guids = list(reversed(guids))

        res = self.testapp.get(
            '/v1/invoices',
            extra_environ=dict(REMOTE_USER=self.api_key),
            status=200,
        )
        items = res.json['items']
        result_guids = [item['guid'] for item in items]
        self.assertEqual(result_guids, guids)
示例#25
0
文件: tests.py 项目: retr0h/whatsup
  def test_get_rolled_up_current(self):
    # populate 4 states across different times
    with freeze_time("2014-01-02 12:00:01"):
      self.post_stat()
    with freeze_time("2014-01-02 12:00:02"):
      self.post_stat(state = 0)
    with freeze_time("2014-01-02 12:00:03"):
      self.post_stat(state = -1)
    with freeze_time("2014-01-02 12:00:04"):
      self.post_stat()

    with freeze_time("2014-01-02 13:14:15"):
      rv = self.app.get('/api/v1.0/status/current', headers = self.headers)

    self.assertEqual(200, rv.status_code)

    data = json.loads(rv.data)

    status = data['status'][0]

    self.assertEqual(1, len(data['status']))
    self.assertEqual(4, len(status['states']))
    self.assertEqual(self.json_data()['message'], status['states'][0]['message'])
    self.assertEqual(1, status['current_state'])
    self.assertEqual(-1, status['worst_state'])
示例#26
0
def test_read_from_storage_cache(image, settings):

    settings.BETTY_CACHE_STORAGE_SEC = 3600

    cache_key = 'storage:' + image.source.name

    lenna_path = os.path.join(TEST_DATA_PATH, 'Lenna.png')
    with open(lenna_path, "rb") as lenna:
        expected_bytes = lenna.read()

    with patch.object(FieldFile, 'read') as mock_read:
        mock_read.side_effect = lambda: expected_bytes[:]

        # Check cache miss + fill, then cache hit
        with freeze_time('2016-07-06 00:00'):
            for _ in range(2):
                assert image.read_source_bytes().getvalue() == expected_bytes
                assert 1 == mock_read.call_count
                assert cache.get(cache_key) == expected_bytes

        # Check Expiration
        with freeze_time('2016-07-06 01:00'):
            assert not cache.get(cache_key)

            # Check cache re-fill
            assert image.read_source_bytes().getvalue() == expected_bytes
            assert 2 == mock_read.call_count
            assert cache.get(cache_key) == expected_bytes
    def test_symlink_latest_log_directory(self):
        handler = FileProcessorHandler(base_log_folder=self.base_log_folder,
                                       filename_template=self.filename)
        handler.dag_dir = self.dag_dir

        date1 = (timezone.utcnow() + timedelta(days=1)).strftime("%Y-%m-%d")
        date2 = (timezone.utcnow() + timedelta(days=2)).strftime("%Y-%m-%d")

        p1 = os.path.join(self.base_log_folder, date1, "log1")
        p2 = os.path.join(self.base_log_folder, date1, "log2")

        if os.path.exists(p1):
            os.remove(p1)
        if os.path.exists(p2):
            os.remove(p2)

        link = os.path.join(self.base_log_folder, "latest")

        with freeze_time(date1):
            handler.set_context(filename=os.path.join(self.dag_dir, "log1"))
            self.assertTrue(os.path.islink(link))
            self.assertEqual(os.path.basename(os.readlink(link)), date1)
            self.assertTrue(os.path.exists(os.path.join(link, "log1")))

        with freeze_time(date2):
            handler.set_context(filename=os.path.join(self.dag_dir, "log2"))
            self.assertTrue(os.path.islink(link))
            self.assertEqual(os.path.basename(os.readlink(link)), date2)
            self.assertTrue(os.path.exists(os.path.join(link, "log2")))
示例#28
0
 def test_mailbox_with_recharge(self):
     service = self.create_mailbox_disk_service()
     self.create_disk_resource()
     account = self.create_account()
     mailbox = self.create_mailbox(account=account)
     now = timezone.now()
     bp = now.date() + relativedelta(years=1)
     options = dict(billing_point=bp, fixed_point=True)
     
     self.allocate_disk(mailbox, 100)
     bill = service.orders.bill(**options).pop()
     self.assertEqual(99*10, bill.get_total())
     
     with freeze_time(now+relativedelta(months=6)):
         self.allocate_disk(mailbox, 50)
         bills = service.orders.bill(**options)
         self.assertEqual([], bills)
     
     with freeze_time(now+relativedelta(months=6)):
         self.allocate_disk(mailbox, 200)
         bill = service.orders.bill(new_open=True, **options).pop()
         self.assertEqual((199-99)*10*0.5, bill.get_total())
     
     with freeze_time(now+relativedelta(months=6)):
         bills = service.orders.bill(new_open=True, **options)
         self.assertEqual([], bills)
  def test_recurring_without_tgts_skip(self, has_tg):
    """Test that Active Workflow without TGTs is skipped on cron job"""
    with freeze_time(dtm.date(2017, 9, 25)):
      with factories.single_commit():
        workflow = wf_factories.WorkflowFactory(repeat_every=1,
                                                unit=Workflow.MONTH_UNIT)
        workflow_id = workflow.id
        group = wf_factories.TaskGroupFactory(workflow=workflow)
        wf_factories.TaskGroupTaskFactory(
            task_group=group,
            start_date=dtm.date(2017, 9, 26),
            end_date=dtm.date(2017, 9, 26) + dtm.timedelta(days=4))
      self.generator.activate_workflow(workflow)
      active_wf = db.session.query(Workflow).filter(
          Workflow.id == workflow_id).one()
      self.assertEqual(active_wf.next_cycle_start_date, dtm.date(2017, 9, 26))
      self.assertEqual(active_wf.recurrences, True)
      self.assertEqual(len(active_wf.cycles), 0)
      TaskGroupTask.query.delete()
      if not has_tg:
        TaskGroup.query.delete()
      db.session.commit()

    with freeze_time(dtm.date(2017, 10, 25)):
      start_recurring_cycles()
      active_wf = db.session.query(Workflow).filter(
          Workflow.id == workflow_id).one()
      self.assertEqual(active_wf.next_cycle_start_date, dtm.date(2017, 9, 26))
      self.assertEqual(active_wf.recurrences, True)
      self.assertEqual(len(active_wf.cycles), 0)
示例#30
0
def test_checkout_voucher_form_active_queryset_after_some_time(
    voucher, request_checkout_with_item
):
    assert Voucher.objects.count() == 1
    voucher.start_date = datetime.date(year=2016, month=6, day=1)
    voucher.end_date = datetime.date(year=2016, month=6, day=2)
    voucher.save()

    with freeze_time("2016-05-31"):
        form = CheckoutVoucherForm(
            {"voucher": voucher.code}, instance=request_checkout_with_item
        )
        assert form.fields["voucher"].queryset.count() == 0

    with freeze_time("2016-06-01"):
        form = CheckoutVoucherForm(
            {"voucher": voucher.code}, instance=request_checkout_with_item
        )
        assert form.fields["voucher"].queryset.count() == 1

    with freeze_time("2016-06-03"):
        form = CheckoutVoucherForm(
            {"voucher": voucher.code}, instance=request_checkout_with_item
        )
        assert form.fields["voucher"].queryset.count() == 0
示例#31
0
    def test_auth_token_flow_works_correctly(self):
        base_time = datetime.now()
        with app.test_client() as c:
            # Step 1 : login
            with freeze_time(base_time):
                login_response = c.post_graphql(
                    self.login_query,
                    variables=dict(email=self.user.email, password="******"),
                )
                self.assertEqual(login_response.status_code, 200)
                login_response_data = login_response.json["data"]["auth"][
                    "login"]
                self.assertIn("accessToken", login_response_data)
                self.assertIn("refreshToken", login_response_data)

            # Step 2 : access protected endpoint within token expiration time
            with freeze_time(base_time + timedelta(seconds=30)):
                access_response = c.post_graphql(
                    self.check_query,
                    headers=[(
                        "Authorization",
                        f"Bearer {login_response_data['accessToken']}",
                    )],
                )
                self.assertEqual(access_response.status_code, 200)
                access_response_data = access_response.json["data"]["auth"][
                    "check"]
                self.assertEqual(access_response_data["userId"], self.user.id)

            # Refresh access token after expiration
            with freeze_time(base_time + timedelta(minutes=10) +
                             app.config["ACCESS_TOKEN_EXPIRATION"]):
                expired_access_response = c.post_graphql(
                    self.check_query,
                    headers=[(
                        "Authorization",
                        f"Bearer {login_response_data['accessToken']}",
                    )],
                )
                self.assertIsNotNone(
                    expired_access_response.json.get("errors"))
                self.assertIsNone(
                    expired_access_response.json["data"]["auth"]["check"])

                refresh_response = c.post_graphql(
                    self.refresh_query,
                    headers=[(
                        "Authorization",
                        f"Bearer {login_response_data['refreshToken']}",
                    )],
                )
                self.assertEqual(refresh_response.status_code, 200)
                refresh_response_data = refresh_response.json["data"]["auth"][
                    "refresh"]
                self.assertIn("accessToken", refresh_response_data)
                self.assertIn("refreshToken", refresh_response_data)

                new_access_response = c.post_graphql(
                    self.check_query,
                    headers=[(
                        "Authorization",
                        f"Bearer {refresh_response_data['accessToken']}",
                    )],
                )
                self.assertEqual(new_access_response.status_code, 200)
                new_access_response_data = new_access_response.json["data"][
                    "auth"]["check"]
                self.assertEqual(new_access_response_data["userId"],
                                 self.user.id)

                reuse_refresh_token_response = c.post_graphql(
                    self.refresh_query,
                    headers=[(
                        "Authorization",
                        f"Bearer {login_response_data['refreshToken']}",
                    )],
                )
                self.assertIsNotNone(
                    reuse_refresh_token_response.json.get("errors"))
                self.assertIsNone(reuse_refresh_token_response.json["data"]
                                  ["auth"]["refresh"])
示例#32
0
def test_parse_filter_date(input_string, end_date, frozen_date, expected_date):
    with freeze_time(frozen_date):
        assert parse_maybe_relative_date_string(input_string,
                                                end_date) == expected_date
示例#33
0
def test_exporting_csv_table_writes_file_to_storage(data_fixture, api_client,
                                                    tmpdir, settings):
    user, token = data_fixture.create_user_and_token()
    table = data_fixture.create_database_table(user=user)
    text_field = data_fixture.create_text_field(table=table,
                                                name="text_field",
                                                order=0)
    option_field = data_fixture.create_single_select_field(table=table,
                                                           name="option_field",
                                                           order=1)
    option_a = data_fixture.create_select_option(field=option_field,
                                                 value="A",
                                                 color="blue")
    option_b = data_fixture.create_select_option(field=option_field,
                                                 value="B",
                                                 color="red")
    date_field = data_fixture.create_date_field(
        table=table,
        date_include_time=True,
        date_format="US",
        name="date_field",
        order=2,
    )

    grid_view = data_fixture.create_grid_view(table=table)
    data_fixture.create_view_filter(view=grid_view,
                                    field=text_field,
                                    type="contains",
                                    value="test")
    data_fixture.create_view_sort(view=grid_view,
                                  field=text_field,
                                  order="ASC")

    row_handler = RowHandler()
    row_handler.create_row(
        user=user,
        table=table,
        values={
            text_field.id: "test",
            date_field.id: "2020-02-01 01:23",
            option_field.id: option_b.id,
        },
    )
    row_handler.create_row(
        user=user,
        table=table,
        values={
            text_field.id: "atest",
            date_field.id: "2020-02-01 01:23",
            option_field.id: option_a.id,
        },
    )
    storage = FileSystemStorage(location=(str(tmpdir)),
                                base_url="http://localhost")

    with patch("baserow.contrib.database.export.handler.default_storage",
               new=storage):
        run_time = make_aware(parse_datetime("2020-02-01 01:00"), timezone=utc)
        # DRF uses some custom internal date time formatting, use the field itself
        # so the test doesn't break if we set a different default timezone format etc
        expected_created_at = DateTimeField().to_representation(run_time)
        with freeze_time(run_time):
            with capture_on_commit_callbacks(execute=True):
                response = api_client.post(
                    reverse(
                        "api:database:export:export_table",
                        kwargs={"table_id": table.id},
                    ),
                    data={
                        "view_id": None,
                        "exporter_type": "csv",
                        "export_charset": "utf-8",
                        "csv_include_header": "True",
                        "csv_column_separator": ",",
                    },
                    format="json",
                    HTTP_AUTHORIZATION=f"JWT {token}",
                )
            response_json = response.json()
            job_id = response_json["id"]
            assert response_json == {
                "id": job_id,
                "created_at": expected_created_at,
                "exported_file_name": None,
                "exporter_type": "csv",
                "progress_percentage": 0.0,
                "status": "pending",
                "table": table.id,
                "view": None,
                "url": None,
            }
            response = api_client.get(
                reverse("api:database:export:get", kwargs={"job_id": job_id}),
                format="json",
                HTTP_AUTHORIZATION=f"JWT {token}",
            )
            json = response.json()
            filename = json["exported_file_name"]
            assert json == {
                "id": job_id,
                "created_at": expected_created_at,
                "exported_file_name": filename,
                "exporter_type": "csv",
                "progress_percentage": 1.0,
                "status": "complete",
                "table": table.id,
                "view": None,
                "url": f"http://localhost:8000/media/export_files/{filename}",
            }

            file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename)
            assert file_path.isfile()
            expected = ("\ufeff"
                        "id,text_field,option_field,date_field\n"
                        "1,test,B,02/01/2020 01:23\n"
                        "2,atest,A,02/01/2020 01:23\n")
            with open(file_path, "r", encoding="utf-8") as written_file:
                assert written_file.read() == expected
示例#34
0
        def test_breakdown_by_cohort(self):
            person1, person2, person3, person4 = self._create_multiple_people()
            cohort = cohort_factory(name="cohort1",
                                    team=self.team,
                                    groups=[{
                                        "properties": {
                                            "name": "person1"
                                        }
                                    }])
            cohort2 = cohort_factory(name="cohort2",
                                     team=self.team,
                                     groups=[{
                                         "properties": {
                                             "name": "person2"
                                         }
                                     }])
            cohort3 = cohort_factory(
                name="cohort3",
                team=self.team,
                groups=[
                    {
                        "properties": {
                            "name": "person1"
                        }
                    },
                    {
                        "properties": {
                            "name": "person2"
                        }
                    },
                ],
            )
            action = action_factory(name="watched movie", team=self.team)
            action.calculate_events()

            with freeze_time("2020-01-04T13:01:01Z"):
                action_response = trends().run(
                    Filter(
                        data={
                            "date_from":
                            "-14d",
                            "breakdown":
                            json.dumps(
                                [cohort.pk, cohort2.pk, cohort3.pk, "all"]),
                            "breakdown_type":
                            "cohort",
                            "actions": [{
                                "id": action.pk,
                                "type": "actions",
                                "order": 0
                            }],
                        }),
                    self.team,
                )
                event_response = trends().run(
                    Filter(
                        data={
                            "date_from":
                            "-14d",
                            "breakdown":
                            json.dumps(
                                [cohort.pk, cohort2.pk, cohort3.pk, "all"]),
                            "breakdown_type":
                            "cohort",
                            "events": [{
                                "id": "watched movie",
                                "name": "watched movie",
                                "type": "events",
                                "order": 0,
                            }],
                        }),
                    self.team,
                )

            self.assertEqual(event_response[1]["label"],
                             "watched movie - cohort2")
            self.assertEqual(event_response[2]["label"],
                             "watched movie - cohort3")
            self.assertEqual(event_response[3]["label"],
                             "watched movie - all users")

            self.assertEqual(sum(event_response[0]["data"]), 1)
            self.assertEqual(event_response[0]["breakdown_value"], cohort.pk)

            self.assertEqual(sum(event_response[1]["data"]), 3)
            self.assertEqual(event_response[1]["breakdown_value"], cohort2.pk)

            self.assertEqual(sum(event_response[2]["data"]), 4)
            self.assertEqual(event_response[2]["breakdown_value"], cohort3.pk)

            self.assertEqual(sum(event_response[3]["data"]), 7)
            self.assertEqual(event_response[3]["breakdown_value"], "all")

            self.assertTrue(
                self._compare_entity_response(
                    event_response,
                    action_response,
                ))
示例#35
0
        def test_breakdown_filtering(self):
            self._create_events()
            # test breakdown filtering
            with freeze_time("2020-01-04T13:01:01Z"):
                response = trends().run(
                    Filter(
                        data={
                            "date_from":
                            "-14d",
                            "breakdown":
                            "$some_property",
                            "events": [
                                {
                                    "id": "sign up",
                                    "name": "sign up",
                                    "type": "events",
                                    "order": 0,
                                },
                                {
                                    "id": "no events"
                                },
                            ],
                        }),
                    self.team,
                )

            self.assertEqual(response[0]["label"], "sign up - Other")
            self.assertEqual(response[1]["label"], "sign up - other_value")
            self.assertEqual(response[2]["label"], "sign up - value")
            self.assertEqual(response[3]["label"], "no events - Other")

            self.assertEqual(sum(response[0]["data"]), 2)
            self.assertEqual(response[0]["data"][4 + 7], 2)
            self.assertEqual(response[0]["breakdown_value"], "None")

            self.assertEqual(sum(response[1]["data"]), 1)
            self.assertEqual(response[1]["data"][5 + 7], 1)
            self.assertEqual(response[1]["breakdown_value"], "other_value")

            # check numerical breakdown
            with freeze_time("2020-01-04T13:01:01Z"):
                response = trends().run(
                    Filter(
                        data={
                            "date_from":
                            "-14d",
                            "breakdown":
                            "$some_numerical_prop",
                            "events": [
                                {
                                    "id": "sign up",
                                    "name": "sign up",
                                    "type": "events",
                                    "order": 0,
                                },
                                {
                                    "id": "no events"
                                },
                            ],
                        }),
                    self.team,
                )

            self.assertEqual(response[0]["label"], "sign up - Other")
            self.assertEqual(response[0]["count"], 4.0)
            self.assertEqual(response[1]["label"], "sign up - 80.0")
            self.assertEqual(response[1]["count"], 1.0)
示例#36
0
        def test_interval_filtering(self):
            self._create_events(use_time=True)

            # test minute
            with freeze_time("2020-01-02"):
                response = trends().run(
                    Filter(
                        data={
                            "date_from": "2020-01-01",
                            "interval": "minute",
                            "events": [{
                                "id": "sign up"
                            }]
                        }),
                    self.team,
                )
            self.assertEqual(response[0]["labels"][6], "Wed. 1 January, 00:06")
            self.assertEqual(response[0]["data"][6], 3.0)

            # test hour
            with freeze_time("2020-01-02"):
                response = trends().run(
                    Filter(
                        data={
                            "date_from": "2019-12-24",
                            "interval": "hour",
                            "events": [{
                                "id": "sign up"
                            }]
                        }),
                    self.team,
                )
            self.assertEqual(response[0]["labels"][3],
                             "Tue. 24 December, 03:00")
            self.assertEqual(response[0]["data"][3], 1.0)
            # 217 - 24 - 1
            self.assertEqual(response[0]["data"][192], 3.0)

            # test week
            with freeze_time("2020-01-02"):
                response = trends().run(
                    Filter(
                        data={
                            "date_from": "2019-11-24",
                            "interval": "week",
                            "events": [{
                                "id": "sign up"
                            }]
                        }),
                    self.team,
                )
            self.assertEqual(response[0]["labels"][4], "Sun. 22 December")
            self.assertEqual(response[0]["data"][4], 1.0)
            self.assertEqual(response[0]["labels"][5], "Sun. 29 December")
            self.assertEqual(response[0]["data"][5], 4.0)

            # test month
            with freeze_time("2020-01-02"):
                response = trends().run(
                    Filter(
                        data={
                            "date_from": "2019-9-24",
                            "interval": "month",
                            "events": [{
                                "id": "sign up"
                            }]
                        }),
                    self.team,
                )
            self.assertEqual(response[0]["labels"][2], "Sat. 30 November")
            self.assertEqual(response[0]["data"][2], 1.0)
            self.assertEqual(response[0]["labels"][3], "Tue. 31 December")
            self.assertEqual(response[0]["data"][3], 4.0)

            with freeze_time("2020-01-02 23:30"):
                event_factory(team=self.team,
                              event="sign up",
                              distinct_id="blabla")

            # test today + hourly
            with freeze_time("2020-01-02T23:31:00Z"):
                response = trends().run(
                    Filter(
                        data={
                            "date_from": "dStart",
                            "interval": "hour",
                            "events": [{
                                "id": "sign up"
                            }]
                        }), self.team)
            self.assertEqual(response[0]["labels"][23],
                             "Thu. 2 January, 23:00")
            self.assertEqual(response[0]["data"][23], 1.0)
示例#37
0
        def _create_events(self, use_time=False):

            person = person_factory(team_id=self.team.pk,
                                    distinct_ids=["blabla", "anonymous_id"])
            secondTeam = Team.objects.create(api_token="token123")

            freeze_without_time = ["2019-12-24", "2020-01-01", "2020-01-02"]
            freeze_with_time = [
                "2019-12-24 03:45:34",
                "2020-01-01 00:06:34",
                "2020-01-02 16:34:34",
            ]

            freeze_args = freeze_without_time
            if use_time:
                freeze_args = freeze_with_time

            with freeze_time(freeze_args[0]):
                event_factory(
                    team=self.team,
                    event="sign up",
                    distinct_id="blabla",
                    properties={"$some_property": "value"},
                )

            with freeze_time(freeze_args[1]):
                event_factory(
                    team=self.team,
                    event="sign up",
                    distinct_id="blabla",
                    properties={"$some_property": "value"},
                )
                event_factory(team=self.team,
                              event="sign up",
                              distinct_id="anonymous_id")
                event_factory(team=self.team,
                              event="sign up",
                              distinct_id="blabla")
            with freeze_time(freeze_args[2]):
                event_factory(
                    team=self.team,
                    event="sign up",
                    distinct_id="blabla",
                    properties={
                        "$some_property": "other_value",
                        "$some_numerical_prop": 80,
                    },
                )
                event_factory(team=self.team,
                              event="no events",
                              distinct_id="blabla")

                # second team should have no effect
                event_factory(
                    team=secondTeam,
                    event="sign up",
                    distinct_id="blabla",
                    properties={"$some_property": "other_value"},
                )

            no_events = action_factory(team=self.team, name="no events")
            sign_up_action = action_factory(team=self.team, name="sign up")

            return sign_up_action, person
示例#38
0
    def test_post_validate_redirects(self, monkeypatch, pyramid_request,
                                     with_user):
        remember = pretend.call_recorder(
            lambda request, user_id: [("foo", "bar")])
        monkeypatch.setattr(views, "remember", remember)

        new_session = {}

        user_id = uuid.uuid4()
        user_service = pretend.stub(
            find_userid=pretend.call_recorder(lambda username: user_id),
            update_user=pretend.call_recorder(lambda *a, **kw: None),
        )
        pyramid_request.find_service = pretend.call_recorder(
            lambda iface, context: user_service)
        pyramid_request.method = "POST"
        pyramid_request.session = pretend.stub(
            items=lambda: [("a", "b"), ("foo", "bar")],
            update=new_session.update,
            invalidate=pretend.call_recorder(lambda: None),
            new_csrf_token=pretend.call_recorder(lambda: None),
        )

        pyramid_request.set_property(
            lambda r: str(uuid.uuid4()) if with_user else None,
            name="unauthenticated_userid",
        )

        form_obj = pretend.stub(
            validate=pretend.call_recorder(lambda: True),
            username=pretend.stub(data="theuser"),
        )
        form_class = pretend.call_recorder(lambda d, user_service: form_obj)

        pyramid_request.route_path = pretend.call_recorder(
            lambda a: '/the-redirect')

        now = datetime.datetime.utcnow()

        with freezegun.freeze_time(now):
            result = views.login(pyramid_request, _form_class=form_class)

        assert isinstance(result, HTTPSeeOther)
        assert pyramid_request.route_path.calls == [
            pretend.call('manage.projects')
        ]
        assert result.headers["Location"] == "/the-redirect"
        assert result.headers["foo"] == "bar"

        assert form_class.calls == [
            pretend.call(pyramid_request.POST, user_service=user_service),
        ]
        assert form_obj.validate.calls == [pretend.call()]

        assert user_service.find_userid.calls == [pretend.call("theuser")]
        assert user_service.update_user.calls == [
            pretend.call(user_id, last_login=now),
        ]

        if with_user:
            assert new_session == {}
        else:
            assert new_session == {"a": "b", "foo": "bar"}

        assert remember.calls == [pretend.call(pyramid_request, str(user_id))]
        assert pyramid_request.session.invalidate.calls == [pretend.call()]
        assert pyramid_request.find_service.calls == [
            pretend.call(IUserService, context=None),
            pretend.call(IUserService, context=None),
        ]
        assert pyramid_request.session.new_csrf_token.calls == [pretend.call()]
示例#39
0
    def test_interaction_export(
        self,
        setup_es,
        request_sortby,
        orm_ordering,
    ):
        """
        Test export of interaction search results with a policy feedback user.

        Checks that all interaction kinds except for policy feedback are included in the export.
        """
        # Faker generates job titles containing commas which complicates comparisons,
        # so all contact job titles are explicitly set
        company = CompanyFactory()
        CompanyInteractionFactory(
            company=company,
            contacts=[
                ContactFactory(company=company, job_title='Engineer'),
                ContactFactory(company=company, job_title=None),
                ContactFactory(company=company, job_title=''),
            ],
        )
        EventServiceDeliveryFactory(
            company=company,
            contacts=[
                ContactFactory(company=company, job_title='Managing director'),
            ],
        )
        InvestmentProjectInteractionFactory(
            company=company,
            contacts=[
                ContactFactory(company=company, job_title='Exports manager'),
            ],
        )
        ServiceDeliveryFactory(
            company=company,
            contacts=[
                ContactFactory(company=company, job_title='Sales director'),
            ],
        )
        CompanyInteractionFactoryWithPolicyFeedback(
            company=company,
            contacts=[
                ContactFactory(company=company,
                               job_title='Business development manager'),
            ],
            policy_areas=PolicyArea.objects.order_by('?')[:2],
            policy_issue_types=PolicyIssueType.objects.order_by('?')[:2],
        )

        setup_es.indices.refresh()

        data = {}
        if request_sortby:
            data['sortby'] = request_sortby

        url = reverse('api-v3:search:interaction-export')

        with freeze_time('2018-01-01 11:12:13'):
            response = self.api_client.post(url, data=data)

        assert response.status_code == status.HTTP_200_OK
        assert parse_header(response.get('Content-Type')) == ('text/csv', {
            'charset':
            'utf-8'
        })
        assert parse_header(response.get('Content-Disposition')) == (
            'attachment',
            {
                'filename': 'Data Hub - Interactions - 2018-01-01-11-12-13.csv'
            },
        )

        sorted_interactions = Interaction.objects.all().order_by(
            orm_ordering,
            'pk',
        )
        reader = DictReader(StringIO(response.getvalue().decode('utf-8-sig')))

        assert reader.fieldnames == list(
            SearchInteractionExportAPIView.field_titles.values())

        expected_row_data = [{
            'Date':
            interaction.date,
            'Type':
            interaction.get_kind_display(),
            'Service':
            get_attr_or_none(interaction, 'service.name'),
            'Subject':
            interaction.subject,
            'Link':
            f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["interaction"]}'
            f'/{interaction.pk}',
            'Company':
            get_attr_or_none(interaction, 'company.name'),
            'Company link':
            f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["company"]}'
            f'/{interaction.company.pk}',
            'Company country':
            get_attr_or_none(
                interaction,
                'company.address_country.name',
            ),
            'Company UK region':
            get_attr_or_none(interaction, 'company.uk_region.name'),
            'Company sector':
            get_attr_or_none(interaction, 'company.sector.name'),
            'Contacts':
            _format_expected_contacts(interaction),
            'Adviser':
            get_attr_or_none(interaction, 'dit_adviser.name'),
            'Service provider':
            get_attr_or_none(interaction, 'dit_team.name'),
            'Event':
            get_attr_or_none(interaction, 'event.name'),
            'Communication channel':
            get_attr_or_none(interaction, 'communication_channel.name'),
            'Service delivery status':
            get_attr_or_none(
                interaction,
                'service_delivery_status.name',
            ),
            'Net company receipt':
            interaction.net_company_receipt,
            'Policy issue types':
            join_attr_values(interaction.policy_issue_types.all()),
            'Policy areas':
            join_attr_values(interaction.policy_areas.all(), separator='; '),
            'Policy feedback notes':
            interaction.policy_feedback_notes,
        } for interaction in sorted_interactions]

        actual_row_data = [_format_actual_csv_row(row) for row in reader]
        assert actual_row_data == format_csv_data(expected_row_data)
from datetime import datetime
from os.path import dirname, join

from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.pa_liquorboard import PaLiquorboardSpider

test_response = file_response(
    join(dirname(__file__), "files", "pa_liquorboard.html"),
    url="https://www.lcb.pa.gov/About-Us/Board/Pages/Public-Meetings.aspx",
)
spider = PaLiquorboardSpider()

freezer = freeze_time("2019-02-08")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_start():
    assert parsed_items[0]["start"] > datetime(2000, 1, 1, 0, 0)
示例#41
0
    def test_cross_user(self):
        with freeze_time():
            token = self.device.generate_challenge()
            ok = self.device2.verify_token(token)

        self.assertFalse(ok)
示例#42
0
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import COMMITTEE, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.wayne_audit import WayneAuditSpider

freezer = freeze_time('2018-03-27')
freezer.start()
test_response = file_response(
    join(dirname(__file__), "files", "wayne_audit.html"),
    url='https://www.waynecounty.com/elected/commission/audit.aspx')
spider = WayneAuditSpider()
parsed_items = [item for item in spider.parse(test_response)]
freezer.stop()


@pytest.mark.parametrize('item', parsed_items)
def test_description(item):
    assert item['description'] == ''


@pytest.mark.parametrize('item', parsed_items)
def test_location(item):
    assert item['location'] == spider.location


@pytest.mark.parametrize('item', parsed_items)
示例#43
0
    def test_frequency_biyearly(self):
        with freeze_time('2021-09-01'):
            accrual_plan = self.env['hr.leave.accrual.plan'].with_context(
                tracking_disable=True).create({
                    'name':
                    'Accrual Plan For Test',
                    'level_ids': [(0, 0, {
                        'start_count': 1,
                        'start_type': 'day',
                        'added_value': 1,
                        'added_value_type': 'days',
                        'frequency': 'biyearly',
                        'maximum_leave': 10000,
                    })],
                })
            #this sets up an accrual on the 1st of January and the 1st of July
            allocation = self.env['hr.leave.allocation'].with_user(
                self.user_hrmanager_id).with_context(
                    tracking_disable=True).create({
                        'name':
                        'Accrual allocation for employee',
                        'accrual_plan_id':
                        accrual_plan.id,
                        'employee_id':
                        self.employee_emp.id,
                        'holiday_status_id':
                        self.leave_type.id,
                        'number_of_days':
                        0,
                        'allocation_type':
                        'accrual',
                    })
            self.setAllocationCreateDate(allocation.id, '2021-09-01 00:00:00')
            allocation.action_confirm()
            allocation.action_validate()
            self.assertFalse(
                allocation.nextcall,
                'There should be no nextcall set on the allocation.')
            self.assertEqual(allocation.number_of_days, 0,
                             'There should be no days allocated yet.')
            allocation._update_accrual()
            next_date = datetime.date(2022, 1, 1)
            self.assertEqual(
                allocation.number_of_days, 0,
                'There should be no days allocated yet. The accrual starts tomorrow.'
            )

        with freeze_time(next_date):
            next_date = datetime.date(2022, 7, 1)
            allocation._update_accrual()
            # Prorated
            self.assertAlmostEqual(allocation.number_of_days, 0.6576, 4,
                                   'There should be 0.6576 day allocated.')
            self.assertEqual(
                allocation.nextcall, next_date,
                'The next call date of the cron should be July 1st')

        with freeze_time(next_date):
            allocation._update_accrual()
            # Not Prorated
            self.assertAlmostEqual(allocation.number_of_days, 1.6576, 4,
                                   'There should be 1.6576 day allocated.')
示例#44
0
def interactions(setup_es):
    """Sets up data for the tests."""
    data = []
    with freeze_time('2017-01-01 13:00:00'):
        company_1 = CompanyFactory(name='ABC Trading Ltd')
        company_2 = CompanyFactory(name='Little Puddle Ltd')
        data.extend([
            CompanyInteractionFactory(
                subject='Exports meeting',
                date=dateutil_parse('2017-10-30T00:00:00Z'),
                company=company_1,
                contacts=[
                    ContactFactory(company=company_1,
                                   first_name='Lee',
                                   last_name='Danger'),
                    ContactFactory(company=company_1,
                                   first_name='Francis',
                                   last_name='Brady'),
                    ContactFactory(company=company_1,
                                   first_name='Zanger Za',
                                   last_name='Qa'),
                ],
                dit_adviser__first_name='Angela',
                dit_adviser__last_name='Lawson',
            ),
            CompanyInteractionFactory(
                subject='a coffee',
                date=dateutil_parse('2017-04-05T00:00:00Z'),
                company=company_2,
                contacts=[
                    ContactFactory(company=company_1,
                                   first_name='Try',
                                   last_name='Slanger'),
                ],
                dit_adviser__first_name='Zed',
                dit_adviser__last_name='Zeddy',
            ),
            CompanyInteractionFactory(
                subject='Email about exhibition',
                date=dateutil_parse('2016-09-02T00:00:00Z'),
                company=company_2,
                contacts=[
                    ContactFactory(company=company_1,
                                   first_name='Caroline',
                                   last_name='Green'),
                ],
                dit_adviser__first_name='Prime',
                dit_adviser__last_name='Zeddy',
            ),
            CompanyInteractionFactory(
                subject='talking about cats',
                date=dateutil_parse('2018-02-01T00:00:00Z'),
                company=company_2,
                contacts=[
                    ContactFactory(company=company_1,
                                   first_name='Full',
                                   last_name='Bridge'),
                ],
                dit_adviser__first_name='Low',
                dit_adviser__last_name='Tremon',
            ),
            CompanyInteractionFactory(
                subject='Event at HQ',
                date=dateutil_parse('2018-01-01T00:00:00Z'),
                company=company_2,
                contacts=[
                    ContactFactory(company=company_1,
                                   first_name='Diane',
                                   last_name='Pree'),
                ],
                dit_adviser__first_name='Trevor',
                dit_adviser__last_name='Saleman',
            ),
        ])

    setup_es.indices.refresh()

    yield data
示例#45
0
        def test_sessions_count_buckets(self):

            # 0 seconds
            with freeze_time("2012-01-11T01:25:30.000Z"):
                event_factory(team=self.team,
                              event="1st action",
                              distinct_id="2")
                event_factory(team=self.team,
                              event="1st action",
                              distinct_id="2")
                event_factory(team=self.team,
                              event="1st action",
                              distinct_id="4")
            with freeze_time("2012-01-11T01:25:32.000Z"):
                event_factory(team=self.team,
                              event="2nd action",
                              distinct_id="4")  # within 0-3 seconds
                event_factory(team=self.team,
                              event="1st action",
                              distinct_id="6")
                event_factory(team=self.team,
                              event="2nd action",
                              distinct_id="7")
            with freeze_time("2012-01-11T01:25:40.000Z"):
                event_factory(team=self.team,
                              event="2nd action",
                              distinct_id="6")  # within 3-10 seconds
                event_factory(team=self.team,
                              event="2nd action",
                              distinct_id="7")  # within 3-10 seconds

            with freeze_time("2012-01-15T04:59:34.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="2")
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="4")
            with freeze_time("2012-01-15T05:00:00.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="2")  # within 10-30 seconds
            with freeze_time("2012-01-15T05:00:20.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="4")  # within 30-60 seconds

            # within 1-3 mins
            with freeze_time("2012-01-17T04:59:34.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="1")
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="2")
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="5")
            with freeze_time("2012-01-17T05:01:30.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="1")
            with freeze_time("2012-01-17T05:07:30.000Z"):
                event_factory(
                    team=self.team, event="3rd action",
                    distinct_id="2")  # test many events within a range
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="2")
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="2")
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="2")  # within 3-10 mins
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="10")

            with freeze_time("2012-01-17T05:20:30.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="5")  # within 10-30 mins
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="9")
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="10")
            with freeze_time("2012-01-17T05:40:30.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="9")
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="10")  # within 30-60 mins
            with freeze_time("2012-01-17T05:58:30.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="9")  # -> within 30-60 mins

            # within 1+ hours
            with freeze_time("2012-01-21T04:59:34.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="2")
            with freeze_time("2012-01-21T05:20:30.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="2")
            with freeze_time("2012-01-21T05:45:30.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="2")
            with freeze_time("2012-01-21T06:00:30.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="2")

            response = sessions().run(
                SessionsFilter(data={
                    "date_from": "all",
                    "session": "dist"
                }), self.team)
            compared_response = sessions().run(
                SessionsFilter(data={
                    "date_from": "all",
                    "compare": True,
                    "session": "dist"
                }), self.team)
            for index, item in enumerate(response):
                if item["label"] == "30-60 minutes" or item[
                        "label"] == "3-10 seconds":
                    self.assertEqual(item["count"], 2)
                    self.assertEqual(compared_response[index]["count"], 2)
                else:
                    self.assertEqual(item["count"], 1)
                    self.assertEqual(compared_response[index]["count"], 1)
示例#46
0
    def test_check_gain(self):
        # 2 accruals, one based on worked time, one not
        # check gain
        with freeze_time('2021-08-30'):
            calendar_emp = self.env['resource.calendar'].create({
                'name':
                '40 Hours',
                'tz':
                self.employee_emp.tz,
                'attendance_ids': [(0, 0, {
                    'name': '%s_%d' % ('40 Hours', index),
                    'hour_from': 8,
                    'hour_to': 12,
                    'dayofweek': str(index),
                    'day_period': 'morning'
                }, {
                    'name': '%s_%d' % ('40 Hours', index),
                    'hour_from': 13,
                    'hour_to': 18,
                    'dayofweek': str(index),
                    'day_period': 'afternoon'
                }) for index in range(5)],
            })
            self.employee_emp.resource_calendar_id = calendar_emp.id

            accrual_plan_not_based_on_worked_time = self.env[
                'hr.leave.accrual.plan'].with_context(
                    tracking_disable=True).create({
                        'name':
                        'Accrual Plan For Test',
                        'level_ids': [(0, 0, {
                            'start_count': 1,
                            'start_type': 'day',
                            'added_value': 5,
                            'added_value_type': 'days',
                            'frequency': 'weekly',
                            'maximum_leave': 10000,
                        })],
                    })
            accrual_plan_based_on_worked_time = self.env[
                'hr.leave.accrual.plan'].with_context(
                    tracking_disable=True).create({
                        'name':
                        'Accrual Plan For Test',
                        'level_ids': [(0, 0, {
                            'start_count': 1,
                            'start_type': 'day',
                            'added_value': 5,
                            'added_value_type': 'days',
                            'frequency': 'weekly',
                            'maximum_leave': 10000,
                            'is_based_on_worked_time': True,
                        })],
                    })
            allocation_not_worked_time = self.env[
                'hr.leave.allocation'].with_user(
                    self.user_hrmanager_id).with_context(
                        tracking_disable=True).create({
                            'name':
                            'Accrual allocation for employee',
                            'accrual_plan_id':
                            accrual_plan_not_based_on_worked_time.id,
                            'employee_id':
                            self.employee_emp.id,
                            'holiday_status_id':
                            self.leave_type.id,
                            'number_of_days':
                            0,
                            'allocation_type':
                            'accrual',
                            'state':
                            'validate',
                        })
            allocation_worked_time = self.env['hr.leave.allocation'].with_user(
                self.user_hrmanager_id).with_context(
                    tracking_disable=True).create({
                        'name':
                        'Accrual allocation for employee',
                        'accrual_plan_id':
                        accrual_plan_based_on_worked_time.id,
                        'employee_id':
                        self.employee_emp.id,
                        'holiday_status_id':
                        self.leave_type.id,
                        'number_of_days':
                        0,
                        'allocation_type':
                        'accrual',
                        'state':
                        'validate',
                    })
            self.setAllocationCreateDate(allocation_not_worked_time.id,
                                         '2021-08-01 00:00:00')
            self.setAllocationCreateDate(allocation_worked_time.id,
                                         '2021-08-01 00:00:00')
            holiday_type = self.env['hr.leave.type'].create({
                'name':
                'Paid Time Off',
                'requires_allocation':
                'no',
                'responsible_id':
                self.user_hrmanager_id,
            })
            leave = self.env['hr.leave'].create({
                'name':
                'leave',
                'employee_id':
                self.employee_emp.id,
                'holiday_status_id':
                holiday_type.id,
                'date_from':
                '2021-09-02 00:00:00',
                'date_to':
                '2021-09-02 23:59:59',
            })
            leave.action_validate()
            self.assertFalse(
                allocation_not_worked_time.nextcall,
                'There should be no nextcall set on the allocation.')
            self.assertFalse(
                allocation_worked_time.nextcall,
                'There should be no nextcall set on the allocation.')
            self.assertEqual(allocation_not_worked_time.number_of_days, 0,
                             'There should be no days allocated yet.')
            self.assertEqual(allocation_worked_time.number_of_days, 0,
                             'There should be no days allocated yet.')

        next_date = datetime.date(2021, 9, 6)
        with freeze_time(next_date):
            # next_date = datetime.date(2021, 9, 13)
            self.env['hr.leave.allocation']._update_accrual()
            # Prorated
            self.assertAlmostEqual(allocation_not_worked_time.number_of_days,
                                   4.2857, 4,
                                   'There should be 4.2857 days allocated.')
            # 3.75 -> starts 1 day after allocation date -> 31/08-3/09 => 4 days - 1 days time off => (3 / 4) * 5 days
            # ^ result without prorata
            # Prorated
            self.assertAlmostEqual(allocation_worked_time.number_of_days,
                                   3.42857, 4,
                                   'There should be 3.42857 days allocated.')
            self.assertEqual(
                allocation_not_worked_time.nextcall,
                datetime.date(2021, 9, 13),
                'The next call date of the cron should be the September 13th')
            self.assertEqual(
                allocation_worked_time.nextcall, datetime.date(2021, 9, 13),
                'The next call date of the cron should be the September 13th')

        with freeze_time(next_date + relativedelta(days=7)):
            next_date = datetime.date(2021, 9, 20)
            self.env['hr.leave.allocation']._update_accrual()
            self.assertAlmostEqual(allocation_not_worked_time.number_of_days,
                                   9.2857, 4,
                                   'There should be 9.2857 days allocated.')
            self.assertEqual(
                allocation_not_worked_time.nextcall, next_date,
                'The next call date of the cron should be September 20th')
            self.assertAlmostEqual(allocation_worked_time.number_of_days,
                                   8.42857, 4,
                                   'There should be 8.42857 days allocated.')
            self.assertEqual(
                allocation_worked_time.nextcall, next_date,
                'The next call date of the cron should be September 20th')
示例#47
0
        def test_sessions_list(self):
            with freeze_time("2012-01-14T03:21:34.000Z"):
                event_factory(team=self.team,
                              event="1st action",
                              distinct_id="1")
                event_factory(team=self.team,
                              event="1st action",
                              distinct_id="2")
            with freeze_time("2012-01-14T03:25:34.000Z"):
                event_factory(team=self.team,
                              event="2nd action",
                              distinct_id="1")
                event_factory(team=self.team,
                              event="2nd action",
                              distinct_id="2")
            with freeze_time("2012-01-15T03:59:34.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="2")
            with freeze_time("2012-01-15T03:59:35.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="1")
            with freeze_time("2012-01-15T04:01:34.000Z"):
                event_factory(team=self.team,
                              event="4th action",
                              distinct_id="1",
                              properties={"$os": "Mac OS X"})
                event_factory(team=self.team,
                              event="4th action",
                              distinct_id="2",
                              properties={"$os": "Windows 95"})
            team_2 = Team.objects.create()
            Person.objects.create(team=self.team,
                                  distinct_ids=["1", "3", "4"],
                                  properties={"email": "bla"})
            # Test team leakage
            Person.objects.create(team=team_2,
                                  distinct_ids=["1", "3", "4"],
                                  properties={"email": "bla"})
            with freeze_time("2012-01-15T04:01:34.000Z"):
                response = sessions().run(
                    SessionsFilter(data={
                        "events": [],
                        "session": None
                    }), self.team)

            self.assertEqual(len(response), 2)
            self.assertEqual(response[0]["global_session_id"], 1)

            with freeze_time("2012-01-15T04:01:34.000Z"):
                response = sessions().run(
                    SessionsFilter(
                        data={
                            "events": [],
                            "properties": [{
                                "key": "$os",
                                "value": "Mac OS X"
                            }],
                            "session": None
                        }),
                    self.team,
                )
            self.assertEqual(len(response), 1)
示例#48
0
 def test_sessions_and_cohort(self):
     with freeze_time("2012-01-14T03:21:34.000Z"):
         event_factory(team=self.team,
                       event="1st action",
                       distinct_id="1")
         event_factory(team=self.team,
                       event="1st action",
                       distinct_id="2")
     with freeze_time("2012-01-14T03:25:34.000Z"):
         event_factory(team=self.team,
                       event="2nd action",
                       distinct_id="1")
         event_factory(team=self.team,
                       event="2nd action",
                       distinct_id="2")
     with freeze_time("2012-01-15T03:59:34.000Z"):
         event_factory(team=self.team,
                       event="3rd action",
                       distinct_id="2")
     with freeze_time("2012-01-15T03:59:35.000Z"):
         event_factory(team=self.team,
                       event="3rd action",
                       distinct_id="1")
     with freeze_time("2012-01-15T04:01:34.000Z"):
         event_factory(team=self.team,
                       event="4th action",
                       distinct_id="1",
                       properties={"$os": "Mac OS X"})
         event_factory(team=self.team,
                       event="4th action",
                       distinct_id="2",
                       properties={"$os": "Windows 95"})
     team_2 = Team.objects.create()
     Person.objects.create(team=self.team,
                           distinct_ids=["1", "3", "4"],
                           properties={"email": "bla"})
     # Test team leakage
     Person.objects.create(team=team_2,
                           distinct_ids=["1", "3", "4"],
                           properties={"email": "bla"})
     cohort = Cohort.objects.create(team=self.team,
                                    groups=[{
                                        "properties": {
                                            "email": "bla"
                                        }
                                    }])
     cohort.calculate_people()
     with freeze_time("2012-01-15T04:01:34.000Z"):
         response = sessions().run(
             SessionsFilter(
                 data={
                     "events": [],
                     "session":
                     None,
                     "properties": [{
                         "key": "id",
                         "value": cohort.pk,
                         "type": "cohort"
                     }],
                 }),
             self.team,
         )
     self.assertEqual(len(response), 1)
from city_scrapers_core.constants import ADVISORY_COMMITTEE, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.chi_mayors_pedestrian_advisory_council import (
    ChiMayorsPedestrianAdvisoryCouncilSpider)

test_response = file_response(
    join(dirname(__file__), "files",
         "chi_mayors_pedestrian_advisory_council.html"),
    url=(
        "http://chicagocompletestreets.org/getinvolved/mayors-advisory-councils/mpac-meeting-archives/"  # noqa
    ))
spider = ChiMayorsPedestrianAdvisoryCouncilSpider()

freezer = freeze_time("2019-06-07")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_count():
    assert len(parsed_items) == 8


@pytest.mark.parametrize('item', parsed_items)
def test_title(item):
    assert item['title'] == "Mayor's Pedestrian Advisory Council"
示例#50
0
        def test_sessions_avg_length_interval(self):
            with freeze_time("2012-01-14T03:21:34.000Z"):
                event_factory(team=self.team,
                              event="1st action",
                              distinct_id="1")
                event_factory(team=self.team,
                              event="1st action",
                              distinct_id="2")
            with freeze_time("2012-01-14T03:25:34.000Z"):
                event_factory(team=self.team,
                              event="2nd action",
                              distinct_id="1")
                event_factory(team=self.team,
                              event="2nd action",
                              distinct_id="2")
            with freeze_time("2012-01-25T03:59:34.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="1")
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="2")
            with freeze_time("2012-01-25T04:01:34.000Z"):
                event_factory(team=self.team,
                              event="4th action",
                              distinct_id="1")
                event_factory(team=self.team,
                              event="4th action",
                              distinct_id="2")

            with freeze_time("2012-03-14T03:21:34.000Z"):
                event_factory(team=self.team,
                              event="1st action",
                              distinct_id="2")
            with freeze_time("2012-03-14T03:25:34.000Z"):
                event_factory(team=self.team,
                              event="2nd action",
                              distinct_id="2")
            with freeze_time("2012-03-15T03:59:34.000Z"):
                event_factory(team=self.team,
                              event="3rd action",
                              distinct_id="2")
            with freeze_time("2012-03-15T04:01:34.000Z"):
                event_factory(team=self.team,
                              event="4th action",
                              distinct_id="2")

            # month
            month_response = sessions().run(
                SessionsFilter(
                    data={
                        "date_from": "2012-01-01",
                        "date_to": "2012-04-01",
                        "interval": "month",
                        "session": "avg"
                    }),
                self.team,
            )

            self.assertEqual(month_response[0]["data"][0], 180)
            self.assertEqual(month_response[0]["data"][2], 180)
            self.assertEqual(month_response[0]["labels"][0], "Tue. 31 January")
            self.assertEqual(month_response[0]["labels"][1],
                             "Wed. 29 February")
            self.assertEqual(month_response[0]["days"][0], "2012-01-31")
            self.assertEqual(month_response[0]["days"][1], "2012-02-29")

            # # week
            week_response = sessions().run(
                SessionsFilter(
                    data={
                        "date_from": "2012-01-01",
                        "date_to": "2012-02-01",
                        "interval": "week",
                        "session": "avg"
                    }),
                self.team,
            )
            self.assertEqual(week_response[0]["data"][1], 240.0)
            self.assertEqual(week_response[0]["data"][3], 120.0)
            self.assertEqual(week_response[0]["labels"][0], "Sun. 1 January")
            self.assertEqual(week_response[0]["labels"][1], "Sun. 8 January")
            self.assertEqual(week_response[0]["days"][0], "2012-01-01")
            self.assertEqual(week_response[0]["days"][1], "2012-01-08")

            # # # hour
            hour_response = sessions().run(
                SessionsFilter(
                    data={
                        "date_from": "2012-03-14",
                        "date_to": "2012-03-16",
                        "interval": "hour",
                        "session": "avg"
                    }),
                self.team,
            )
            self.assertEqual(hour_response[0]["data"][3], 240.0)
            self.assertEqual(hour_response[0]["data"][27], 120.0)
            self.assertEqual(hour_response[0]["labels"][0],
                             "Wed. 14 March, 00:00")
            self.assertEqual(hour_response[0]["labels"][1],
                             "Wed. 14 March, 01:00")
            self.assertEqual(hour_response[0]["days"][0],
                             "2012-03-14 00:00:00")
            self.assertEqual(hour_response[0]["days"][1],
                             "2012-03-14 01:00:00")
示例#51
0
    def test_logs_services_disabled(self):
        """ Test disabled logs for active / passive checks for services

        :return: None
        """
        self.setup_with_file('cfg/cfg_monitoring_logs.cfg',
                             'cfg/cfg_monitoring_logs_disabled.ini')
        assert self.conf_is_correct

        self._sched = self._scheduler

        host = self._scheduler.hosts.find_by_name("test_host_0")
        host.checks_in_progress = []
        host.act_depend_of = []  # ignore the router
        host.event_handler_enabled = False

        svc = self._scheduler.services.find_srv_by_name_and_hostname(
            "test_host_0", "test_ok_0")
        # Make notifications sent very quickly
        svc.notification_interval = 10.0
        svc.checks_in_progress = []
        svc.act_depend_of = []  # no hostchecks on critical checkresults
        svc.event_handler_enabled = False

        # Freeze the time !
        initial_datetime = datetime.datetime(year=2018,
                                             month=6,
                                             day=1,
                                             hour=18,
                                             minute=30,
                                             second=0)
        with freeze_time(initial_datetime) as frozen_datetime:
            assert frozen_datetime() == initial_datetime

            #  Get sure that host is UP
            self.check(frozen_datetime, host, 0, 'Host is UP', [])

            # Service is ok
            self.check(frozen_datetime, svc, 0, 'Service is OK', [])
            self.check(frozen_datetime, svc, 0, 'Service is OK', [])

            #  Service goes warning / SOFT
            self.check(frozen_datetime, svc, 1, 'Service is WARNING', [(
                'warning',
                'SERVICE ALERT: test_host_0;test_ok_0;WARNING;SOFT;1;Service is WARNING'
            )])

            #  Service goes warning / HARD
            # Get a service check, an alert and a notification
            self.check(frozen_datetime, svc, 1, 'Service is WARNING', [(
                'warning',
                'SERVICE ALERT: test_host_0;test_ok_0;WARNING;HARD;2;Service is WARNING'
            )])

            # Service notification raised
            self.check(frozen_datetime, svc, 1, 'Service is WARNING', [])

            self.check(frozen_datetime, svc, 1, 'Service is WARNING', [])

            # Service goes OK
            self.check(frozen_datetime, svc, 0, 'Service is OK', [(
                'info',
                'SERVICE ALERT: test_host_0;test_ok_0;OK;HARD;2;Service is OK')
                                                                  ])

            self.check(frozen_datetime, svc, 0, 'Service is OK', [])

            # Service goes CRITICAL
            self.check(frozen_datetime, svc, 2, 'Service is CRITICAL', [(
                'error',
                'SERVICE ALERT: test_host_0;test_ok_0;CRITICAL;SOFT;1;Service is CRITICAL'
            )])

            self.check(frozen_datetime, svc, 2, 'Service is CRITICAL', [(
                'error',
                'SERVICE ALERT: test_host_0;test_ok_0;CRITICAL;HARD;2;Service is CRITICAL'
            )])

            # Service goes OK
            self.check(frozen_datetime, svc, 0, 'Service is OK', [(
                'info',
                'SERVICE ALERT: test_host_0;test_ok_0;OK;HARD;2;Service is OK')
                                                                  ])

            self.check(frozen_datetime, svc, 0, 'Service OK', [])
示例#52
0
def test_liabilities_limit(member):
    config = Configuration.get_solo()
    config.liability_interval = 36
    config.save()

    test_date = timezone.now().date().replace(year=2010, month=12, day=31)

    with freeze_time(test_date) as frozen_time:
        Membership.objects.create(
            member=member,
            start=test_date.replace(year=2007, month=5, day=1),
            amount=20,
            interval=FeeIntervals.MONTHLY,
        )
        member.update_liabilites()

        assert member.balance == -880.0
        assert member.statute_barred_debt() == 0

        frozen_time.move_to(test_date.replace(year=2011, month=1, day=1))
        member.update_liabilites()

        assert member.balance == -900.0
        assert member.statute_barred_debt() == 160.0

        t = Transaction.objects.create(value_datetime=test_date,
                                       user_or_context="test")
        t.debit(account=SpecialAccounts.bank,
                amount=12,
                user_or_context="test")
        t.credit(
            account=SpecialAccounts.fees_receivable,
            amount=12,
            member=member,
            user_or_context="test",
        )
        t.save()

        assert member.balance == -888.0
        assert member.statute_barred_debt() == 148.0

        t = Transaction.objects.create(
            value_datetime=test_date.replace(year=2007, month=7, day=1),
            user_or_context="test",
        )
        t.debit(account=SpecialAccounts.bank,
                amount=13,
                user_or_context="test")
        t.credit(
            account=SpecialAccounts.fees_receivable,
            amount=13,
            member=member,
            user_or_context="test",
        )
        t.save()

        assert member.balance == -875.0
        assert member.statute_barred_debt() == 135.0

        t = Transaction.objects.create(
            value_datetime=test_date.replace(year=2007, month=12, day=31),
            user_or_context="test",
        )
        t.debit(account=SpecialAccounts.bank,
                amount=136,
                user_or_context="test")
        t.credit(
            account=SpecialAccounts.fees_receivable,
            amount=136,
            member=member,
            user_or_context="test",
        )
        t.save()

        assert member.balance == -739.0
        assert member.statute_barred_debt() == 0

        assert member.statute_barred_debt(relativedelta(years=1)) == 239.0
示例#53
0
    def test_logs_services(self):
        """ Test logs for active / passive checks for hosts

        :return: None
        """
        self.setup_with_file('cfg/cfg_monitoring_logs.cfg')
        assert self.conf_is_correct

        self._scheduler.pushed_conf.log_initial_states = True
        self._scheduler.pushed_conf.log_active_checks = True
        self._scheduler.pushed_conf.log_passive_checks = True

        host = self._scheduler.hosts.find_by_name("test_host_0")
        host.checks_in_progress = []
        host.act_depend_of = []  # ignore the router
        host.event_handler_enabled = True

        svc = self._scheduler.services.find_srv_by_name_and_hostname(
            "test_host_0", "test_ok_0")
        # Make notifications interval set to 5 minutes
        svc.notification_interval = 5
        svc.checks_in_progress = []
        svc.act_depend_of = []  # no hostchecks on critical checkresults
        svc.event_handler_enabled = True

        # Freeze the time !
        initial_datetime = datetime.datetime(year=2018,
                                             month=6,
                                             day=1,
                                             hour=18,
                                             minute=30,
                                             second=0)
        with freeze_time(initial_datetime) as frozen_datetime:
            assert frozen_datetime() == initial_datetime

            # Get sure that host is UP
            self.check(
                frozen_datetime, host, 0, 'Host is UP',
                [('info', 'ACTIVE HOST CHECK: test_host_0;UP;0;Host is UP')])

            # Service is ok
            self.check(frozen_datetime, svc, 0, 'Service is OK', [(
                'info',
                'ACTIVE SERVICE CHECK: test_host_0;test_ok_0;OK;0;Service is OK'
            )])
            self.check(frozen_datetime, svc, 0, 'Service is OK', [(
                'info',
                'ACTIVE SERVICE CHECK: test_host_0;test_ok_0;OK;1;Service is OK'
            )])

            # Service goes warning / SOFT
            self.check(frozen_datetime, svc, 1, 'Service is WARNING', [
                ('warning',
                 'ACTIVE SERVICE CHECK: test_host_0;test_ok_0;WARNING;1;Service is WARNING'
                 ),
                ('warning',
                 'SERVICE EVENT HANDLER: test_host_0;test_ok_0;WARNING;SOFT;1;eventhandler'
                 ),
                ('warning',
                 'SERVICE ALERT: test_host_0;test_ok_0;WARNING;SOFT;1;Service is WARNING'
                 ),
            ])

            # Service goes warning / HARD
            # Get a service check, an alert and a notification
            self.check(frozen_datetime, svc, 1, 'Service is WARNING', [
                ('warning',
                 'ACTIVE SERVICE CHECK: test_host_0;test_ok_0;WARNING;1;Service is WARNING'
                 ),
                ('warning',
                 'SERVICE ALERT: test_host_0;test_ok_0;WARNING;HARD;2;Service is WARNING'
                 ),
                ('warning',
                 'SERVICE EVENT HANDLER: test_host_0;test_ok_0;WARNING;HARD;2;eventhandler'
                 ),
                ('warning',
                 'SERVICE NOTIFICATION: test_contact;test_host_0;test_ok_0;'
                 'WARNING;1;notify-service;Service is WARNING'),
            ])

            # Notification not raised - too soon!
            self.check(frozen_datetime, svc, 1, 'Service is WARNING', [(
                'warning',
                'ACTIVE SERVICE CHECK: test_host_0;test_ok_0;WARNING;2;Service is WARNING'
            )])

            # Notification not raised - too soon!
            self.check(frozen_datetime, svc, 1, 'Service is WARNING', [(
                'warning',
                'ACTIVE SERVICE CHECK: test_host_0;test_ok_0;WARNING;2;Service is WARNING'
            )])

            # Service goes OK
            self.check(frozen_datetime, svc, 0, 'Service is OK', [
                ('info',
                 'ACTIVE SERVICE CHECK: test_host_0;test_ok_0;OK;2;Service is OK'
                 ),
                ('info',
                 'SERVICE ALERT: test_host_0;test_ok_0;OK;HARD;2;Service is OK'
                 ),
                ('info',
                 'SERVICE EVENT HANDLER: test_host_0;test_ok_0;OK;HARD;2;eventhandler'
                 ),
                ('info',
                 'SERVICE NOTIFICATION: test_contact;test_host_0;test_ok_0;OK;0;'
                 'notify-service;Service is OK')
            ])

            self.check(frozen_datetime, svc, 0, 'Service is OK', [(
                'info',
                'ACTIVE SERVICE CHECK: test_host_0;test_ok_0;OK;1;Service is OK'
            )])

            # Service goes CRITICAL
            self.check(frozen_datetime, svc, 2, 'Service is CRITICAL', [
                ('error',
                 'ACTIVE SERVICE CHECK: test_host_0;test_ok_0;CRITICAL;1;Service is CRITICAL'
                 ),
                ('error',
                 'SERVICE ALERT: test_host_0;test_ok_0;CRITICAL;SOFT;1;Service is CRITICAL'
                 ),
                ('error',
                 'SERVICE EVENT HANDLER: test_host_0;test_ok_0;CRITICAL;SOFT;1;eventhandler'
                 ),
            ])

            self.check(frozen_datetime, svc, 2, 'Service is CRITICAL', [
                ('error',
                 'ACTIVE SERVICE CHECK: test_host_0;test_ok_0;CRITICAL;1;Service is CRITICAL'
                 ),
                ('error',
                 'SERVICE ALERT: test_host_0;test_ok_0;CRITICAL;HARD;2;Service is CRITICAL'
                 ),
                ('error',
                 'SERVICE EVENT HANDLER: test_host_0;test_ok_0;CRITICAL;HARD;2;eventhandler'
                 ),
                ('error',
                 'SERVICE NOTIFICATION: test_contact;test_host_0;test_ok_0;'
                 'CRITICAL;1;notify-service;Service is CRITICAL')
            ])

            # Service goes OK
            self.check(frozen_datetime, svc, 0, 'Service is OK', [
                ('info',
                 'ACTIVE SERVICE CHECK: test_host_0;test_ok_0;OK;2;Service is OK'
                 ),
                ('info',
                 'SERVICE ALERT: test_host_0;test_ok_0;OK;HARD;2;Service is OK'
                 ),
                ('info',
                 'SERVICE EVENT HANDLER: test_host_0;test_ok_0;OK;HARD;2;eventhandler'
                 ),
                ('info',
                 'SERVICE NOTIFICATION: test_contact;test_host_0;test_ok_0;'
                 'OK;0;notify-service;Service is OK')
            ])

            self.check(frozen_datetime, svc, 0, 'Service OK', [
                ('info',
                 'ACTIVE SERVICE CHECK: test_host_0;test_ok_0;OK;1;Service OK')
            ])
示例#54
0
    def test_logs_hosts(self):
        """ Test logs for active / passive checks for hosts

        :return: None
        """
        self.setup_with_file('cfg/cfg_monitoring_logs.cfg')
        assert self.conf_is_correct

        self._scheduler.pushed_conf.log_initial_states = True
        self._scheduler.pushed_conf.log_active_checks = True
        self._scheduler.pushed_conf.log_passive_checks = True

        host = self._scheduler.hosts.find_by_name("test_host_0")
        # Make notifications interval set to 5 minutes
        host.notification_interval = 5
        host.checks_in_progress = []
        host.act_depend_of = []  # ignore the router
        host.event_handler_enabled = True

        # Freeze the time !
        initial_datetime = datetime.datetime(year=2018,
                                             month=6,
                                             day=1,
                                             hour=18,
                                             minute=30,
                                             second=0)
        with freeze_time(initial_datetime) as frozen_datetime:
            assert frozen_datetime() == initial_datetime

            # Host active checks
            self.check(
                frozen_datetime, host, 0, 'Host is UP',
                [('info', u'ACTIVE HOST CHECK: test_host_0;UP;0;Host is UP')])

            self.check(
                frozen_datetime, host, 0, 'Host is UP',
                [('info', u'ACTIVE HOST CHECK: test_host_0;UP;1;Host is UP')])

            # Host goes DOWN / SOFT
            self.check(frozen_datetime, host, 2, 'Host is DOWN', [
                ('error',
                 'ACTIVE HOST CHECK: test_host_0;DOWN;1;Host is DOWN'),
                ('error', 'HOST ALERT: test_host_0;DOWN;SOFT;1;Host is DOWN'),
                ('error',
                 'HOST EVENT HANDLER: test_host_0;DOWN;SOFT;1;eventhandler'),
            ])

            self.check(
                frozen_datetime, host, 2, 'Host is DOWN',
                [('error',
                  'ACTIVE HOST CHECK: test_host_0;DOWN;1;Host is DOWN'),
                 ('error', 'HOST ALERT: test_host_0;DOWN;SOFT;2;Host is DOWN'),
                 ('error',
                  'HOST EVENT HANDLER: test_host_0;DOWN;SOFT;2;eventhandler')])

            # Host goes DOWN / HARD
            self.check(frozen_datetime, host, 2, 'Host is DOWN', [
                ('error',
                 'ACTIVE HOST CHECK: test_host_0;DOWN;2;Host is DOWN'),
                ('error', 'HOST ALERT: test_host_0;DOWN;HARD;3;Host is DOWN'),
                ('error',
                 'HOST EVENT HANDLER: test_host_0;DOWN;HARD;3;eventhandler'),
                ('error',
                 'HOST NOTIFICATION: test_contact;test_host_0;DOWN;1;notify-host;Host is DOWN'
                 )
            ])

            # Notification not raised - too soon!
            self.check(frozen_datetime, host, 2, 'Host is DOWN', [
                ('error', 'ACTIVE HOST CHECK: test_host_0;DOWN;3;Host is DOWN')
            ])

            # Notification not raised - too soon!
            self.check(frozen_datetime, host, 2, 'Host is DOWN', [
                ('error', 'ACTIVE HOST CHECK: test_host_0;DOWN;3;Host is DOWN')
            ])

            # Host goes UP / HARD
            # Get an host check, an alert and a notification
            self.check(frozen_datetime, host, 0, 'Host is UP', [
                ('info', 'ACTIVE HOST CHECK: test_host_0;UP;3;Host is UP'),
                ('info', 'HOST ALERT: test_host_0;UP;HARD;3;Host is UP'),
                ('info',
                 'HOST EVENT HANDLER: test_host_0;UP;HARD;3;eventhandler'),
                ('info',
                 'HOST NOTIFICATION: test_contact;test_host_0;UP;0;notify-host;Host is UP'
                 )
            ])

            self.check(
                frozen_datetime, host, 0, 'Host is UP',
                [('info', 'ACTIVE HOST CHECK: test_host_0;UP;1;Host is UP')])

            self.check(
                frozen_datetime, host, 0, 'Host is UP',
                [('info', 'ACTIVE HOST CHECK: test_host_0;UP;1;Host is UP')])
示例#55
0
from city_scrapers_core.constants import COMMISSION, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time
from scrapy.settings import Settings

from city_scrapers.spiders.chi_community_development import ChiCommunityDevelopmentSpider

test_response = file_response(
    join(dirname(__file__), "files", "chi_community_development.html"),
    url=
    'https://www.chicago.gov/city/en/depts/dcd/supp_info/community_developmentcommission.html'
)
spider = ChiCommunityDevelopmentSpider()
spider.settings = Settings(values={"CITY_SCRAPERS_ARCHIVE": False})

freezer = freeze_time("2018-05-01")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_meeting_count():
    assert len(parsed_items) == 20


def test_unique_id_count():
    assert len(set([item['id'] for item in parsed_items])) == 20

示例#56
0
    def test_logs_hosts_disabled(self):
        """ Test disabled logs for active / passive checks for hosts

        :return: None
        """
        self.setup_with_file('cfg/cfg_monitoring_logs.cfg',
                             'cfg/cfg_monitoring_logs_disabled.ini')
        assert self.conf_is_correct

        self._sched = self._scheduler

        host = self._scheduler.hosts.find_by_name("test_host_0")
        # Make notifications sent very quickly
        host.notification_interval = 10.0
        host.checks_in_progress = []
        host.act_depend_of = []  # ignore the router
        host.event_handler_enabled = True

        # Freeze the time !
        initial_datetime = datetime.datetime(year=2018,
                                             month=6,
                                             day=1,
                                             hour=18,
                                             minute=30,
                                             second=0)
        with freeze_time(initial_datetime) as frozen_datetime:
            assert frozen_datetime() == initial_datetime

            #  Host active checks
            self.check(frozen_datetime, host, 0, 'Host is UP', [])

            self.check(frozen_datetime, host, 0, 'Host is UP', [])

            # Host goes DOWN / SOFT
            self.check(frozen_datetime, host, 2, 'Host is DOWN', [
                ('error', 'HOST ALERT: test_host_0;DOWN;SOFT;1;Host is DOWN')
            ])

            self.check(frozen_datetime, host, 2, 'Host is DOWN', [
                ('error', 'HOST ALERT: test_host_0;DOWN;SOFT;2;Host is DOWN')
            ])

            # Host goes DOWN / HARD
            self.check(frozen_datetime, host, 2, 'Host is DOWN', [
                ('error', 'HOST ALERT: test_host_0;DOWN;HARD;3;Host is DOWN')
            ])

            # Host notification raised
            self.check(frozen_datetime, host, 2, 'Host is DOWN', [])

            self.check(frozen_datetime, host, 2, 'Host is DOWN', [])

            #  Host goes UP / HARD
            #  Get an host check, an alert and a notification
            self.check(
                frozen_datetime, host, 0, 'Host is UP',
                [('info', 'HOST ALERT: test_host_0;UP;HARD;3;Host is UP')])

            self.check(frozen_datetime, host, 0, 'Host is UP', [])

            self.check(frozen_datetime, host, 0, 'Host is UP', [])
示例#57
0
        def test_calculate_usage(self) -> None:
            self.team.event_names = ["$pageview", "custom event"]
            self.team.event_properties = ["$current_url", "team_id", "value"]
            self.team.save()
            team2 = Team.objects.create()
            with freeze_time("2020-08-01"):
                # ignore stuff older than 30 days
                DashboardItem.objects.create(
                    team=self.team,
                    filters={
                        "events": [{
                            "id": "$pageview"
                        }],
                        "properties": [{
                            "key": "$current_url",
                            "value": "https://posthog.com"
                        }],
                    },
                )
                create_event(
                    distinct_id="test",
                    team=self.team,
                    event="$pageview",
                    properties={"$current_url": "https://posthog.com"},
                )
            with freeze_time("2020-10-01"):
                DashboardItem.objects.create(
                    team=self.team,
                    filters={
                        "events": [{
                            "id": "$pageview"
                        }],
                        "properties": [{
                            "key": "$current_url",
                            "value": "https://posthog.com"
                        }],
                    },
                )
                DashboardItem.objects.create(
                    team=self.team,
                    filters={
                        "events": [{
                            "id": "$pageview"
                        }],
                        "properties": [{
                            "key": "$current_url",
                            "value": "https://posthog2.com"
                        }],
                    },
                )
                DashboardItem.objects.create(
                    team=self.team,
                    filters={
                        "events": [{
                            "id": "custom event"
                        }],
                        "properties": [{
                            "key": "team_id",
                            "value": "3"
                        }]
                    },
                )
                DashboardItem.objects.create(
                    team=self.team,
                    filters={"events": [{
                        "id": "event that doesnt exist"
                    }]})
                # broken dashboard item
                DashboardItem.objects.create(team=self.team, filters={})
                create_event(
                    distinct_id="test",
                    team=self.team,
                    event="$pageview",
                    properties={"$current_url": "https://posthog.com"},
                )
                create_event(
                    distinct_id="test",
                    team=self.team,
                    event="$pageview",
                    properties={"$current_url": "https://posthog2.com"},
                )
                create_event(distinct_id="test",
                             team=self.team,
                             event="custom event",
                             properties={"team_id": "3"})

                # team leakage
                create_event(
                    distinct_id="test",
                    team=team2,
                    event="$pageview",
                    properties={"$current_url": "https://posthog.com"},
                )
                DashboardItem.objects.create(
                    team=team2,
                    filters={
                        "events": [{
                            "id": "$pageview"
                        }],
                        "properties": [{
                            "key": "$current_url",
                            "value": "https://posthog.com"
                        }],
                    },
                )

                calculate_event_property_usage_for_team(self.team.pk)
            team = Team.objects.get(pk=self.team.pk)
            self.assertEqual(
                team.event_names_with_usage,
                [
                    {
                        "event": "$pageview",
                        "usage_count": 2,
                        "volume": 2
                    },
                    {
                        "event": "custom event",
                        "usage_count": 1,
                        "volume": 1
                    },
                ],
            )
            self.assertEqual(
                team.event_properties_with_usage,
                [
                    {
                        "key": "$current_url",
                        "usage_count": 2,
                        "volume": 2
                    },
                    {
                        "key": "team_id",
                        "usage_count": 1,
                        "volume": 1
                    },
                    {
                        "key": "value",
                        "usage_count": 0,
                        "volume": 0
                    },
                ],
            )
def test_todays_buckets(date, expected, freezer):
    # assuming a ff.start_date of 2019-12-31
    with freeze_time(date, tick=True):
        ff = flippyflop.FlippyFlop(dummy_service, TEST_SPREADSHEET_ID)
        result = ff._todays_buckets()
        assert result == expected
    def test_learner_data_multiple_courses(self, pacing, grade,
                                           mock_course_api, mock_grades_api,
                                           mock_certificate_api,
                                           mock_enrollment_api):
        enrollment1 = factories.EnterpriseCourseEnrollmentFactory(
            enterprise_customer_user=self.enterprise_customer_user,
            course_id=self.course_id,
        )

        course_id2 = 'course-v1:edX+DemoX+DemoCourse2'
        enrollment2 = factories.EnterpriseCourseEnrollmentFactory(
            enterprise_customer_user=self.enterprise_customer_user,
            course_id=course_id2,
        )
        factories.DataSharingConsentFactory(
            username=self.enterprise_customer_user.username,
            course_id=course_id2,
            enterprise_customer=self.enterprise_customer,
            granted=True)

        enrollment3 = factories.EnterpriseCourseEnrollmentFactory(
            enterprise_customer_user=factories.EnterpriseCustomerUserFactory(
                user_id=factories.UserFactory(username='******', id=2).id,
                enterprise_customer=self.enterprise_customer,
            ),
            course_id=self.course_id,
        )
        factories.DataSharingConsentFactory(
            username='******',
            course_id=self.course_id,
            enterprise_customer=self.enterprise_customer,
            granted=True)

        def get_course_details(course_id):
            """
            Mock course details - set course_id to match input
            """
            return dict(pacing=pacing, course_id=course_id)

        mock_course_api.return_value.get_course_details.side_effect = get_course_details

        def get_course_certificate(course_id, username):
            """
            Mock certificate data - return depending on course_id
            """
            if '2' in course_id:
                return dict(
                    username=username,
                    is_passing=True,
                    grade=grade,
                )
            else:
                raise HttpNotFoundError

        mock_certificate_api.return_value.get_course_certificate.side_effect = get_course_certificate

        def get_course_grade(course_id, username):
            """
            Mock grades data - set passed depending on course_id
            """
            return dict(
                passed='2' in course_id,
                course_key=course_id,
                username=username,
            )

        mock_grades_api.return_value.get_course_grade.side_effect = get_course_grade

        # Mock enrollment data
        mock_enrollment_api.return_value.get_course_enrollment.return_value = dict(
            mode="verified")

        # Collect the learner data, with time set to NOW
        with freeze_time(self.NOW):
            learner_data = list(self.exporter.export())

        assert len(learner_data) == 6

        assert learner_data[0].course_id == self.course_key
        assert learner_data[1].course_id == self.course_id
        for report1 in learner_data[0:1]:
            assert report1.enterprise_course_enrollment_id == enrollment1.id
            assert not report1.course_completed
            assert report1.completed_timestamp is None
            assert report1.grade == LearnerExporter.GRADE_INCOMPLETE

        assert learner_data[2].course_id == self.course_key
        assert learner_data[3].course_id == self.course_id
        for report2 in learner_data[2:3]:
            assert report2.enterprise_course_enrollment_id == enrollment3.id
            assert not report2.course_completed
            assert report2.completed_timestamp is None
            assert report2.grade == LearnerExporter.GRADE_INCOMPLETE

        assert learner_data[4].course_id == self.course_key
        assert learner_data[5].course_id == course_id2
        for report3 in learner_data[4:5]:
            assert report3.enterprise_course_enrollment_id == enrollment2.id
            # assert report3.course_id == course_id2
            assert report3.course_completed
            assert report3.completed_timestamp == self.NOW_TIMESTAMP
            assert report3.grade == grade
示例#60
0
            # Get some data from the parent, if present
            parent = data[name].get('parent')
            if parent:
                kwargs['parent'] = CertificateAuthority.objects.get(
                    name=parent)
                kwargs['ca_crl_url'] = data[parent]['ca_crl_url']

                # also update data
                data[name]['crl'] = data[parent]['ca_crl_url']

            freeze_now = now
            if args.delay:
                freeze_now += data[name]['delta']

            with freeze_time(freeze_now):
                ca = CertificateAuthority.objects.init(
                    name=data[name]['name'],
                    password=data[name]['password'],
                    subject=data[name]['subject'],
                    expires=datetime.utcnow() + data[name]['expires'],
                    key_type=data[name]['key_type'],
                    key_size=data[name]['key_size'],
                    algorithm=data[name]['algorithm'],
                    pathlen=data[name]['pathlen'],
                    **kwargs)

            # Same values can only be added here because they require data from the already created CA
            crl_path = reverse('django_ca:crl', kwargs={'serial': ca.serial})
            ca.crl_url = '%s%s' % (testserver, crl_path)
            ca.save()