def __init__(self):

        self.monotonic_time = 0.0

        # list of functions that should be patched
        self.patchers = [
            mock_patch('time.monotonic', side_effect=self.monotonic),
            mock_patch('time.sleep', side_effect=self.sleep),
        ]
Beispiel #2
0
    def __init__(self, systemcalls):

        self.systemcalls = systemcalls

        # list of functions that should be patched
        self.patchers = [
            mock_patch('socket.socket', side_effect=self.__get_socket),
            mock_patch('selectors.DefaultSelector',
                       side_effect=self.__get_selector),
            mock_patch('time.monotonic',
                       side_effect=self.systemcalls.monotonic),
            mock_patch('time.time', side_effect=self.systemcalls.time),
            # mock_patch('time.sleep', side_effect=self.systemcalls.sleep),
        ]
Beispiel #3
0
    def test_predict(self):
        #Arrange
        num_prediction_steps = 1
        model, input_data, input_params, image_generation_params, prediction_results = TestPrediction.get_inputs(
            num_prediction_steps)

        with mock_patch('operation.utils.imload', side_effect=patch_imload):
            #Act
            predictor = Prediction(model, input_params,
                                   image_generation_params)
            predictions = predictor.predict(input_data, num_prediction_steps)
            num_results = image_generation_params.batch_size * num_prediction_steps

            self.assertTrue(len(predictions), num_results)
            self.assertCountEqual(
                input_data.loc[:num_results - 1,
                               image_generation_params.image_cols[0]].values,
                predictions.loc[:num_results,
                                image_generation_params.image_cols[0]].values)
            self.assertCountEqual(list(predictions), [
                constants.PANDAS_PREDICTION_COLUMN,
                constants.PANDAS_MATCH_COLUMN,
                image_generation_params.image_cols[0],
                image_generation_params.label_col
            ])
            self.assertCountEqual(
                [0], predictions[constants.PANDAS_MATCH_COLUMN].unique())
    def validate_base_model_creation(self, feature_model_name, module_path):
        #Arrange
        base_model = BaseModel(feature_model_name, input_shape)

        #Act & Assert
        with mock_patch(module_path) as base_mock:
            base_model.base_model()
            base_mock.assert_called_once()
    def cnn_validate_base_model_creation(self, feature_model_name,
                                         module_path):
        #Arrange
        base_model = BaseModel(feature_model_name, input_shape)

        #Act & Assert
        with mock_patch(module_path) as base_mock:
            base_model._prepare_model = MagicMock()
            base_model.cnn(dimensions)
            base_mock.assert_called_once()
    def test_batch_train_no_lr(self):
        #Arrange
        model, input_data, trainer = get_train_args()
        trainer._training_params.learning_rate = None
        trainer._training_params.number_of_epochs = 1
        trainer._image_generation_params.batch_size = 40
        trainer._dropbox_auth = None
        trainer._dropbox_dir = None

        #Mock the relevant calls
        model.train_on_batch = Mock(return_value = [random() for _ in model.metrics_names])
        K.set_value = MagicMock()

        with mock_patch('operation.utils.imload', side_effect = patch_imload):
            #Act
            _ = trainer.batch_train(model, input_data)

            #Assert
            K.set_value.assert_not_called()
Beispiel #7
0
    def test_predict_prediction_with_reduced_num_prediction_steps(self):
        #Arrange
        num_prediction_steps = 2
        num_results = 5
        model, input_data, input_params, image_generation_params, _ = TestPrediction.get_inputs(
            num_prediction_steps, num_results)

        with mock_patch('operation.utils.imload', side_effect=patch_imload):
            #Act
            predictor = Prediction(model, input_params,
                                   image_generation_params)
            predictions = predictor.predict(input_data[:num_results],
                                            num_prediction_steps)

            #Assert
            model.predict_generator.assert_called_once()
            _, args = model.predict_generator.call_args_list[0]
            self.assertEqual(
                1,  #Modified prediction steps
                args['steps'])
    def batch_train(self, batch_id = 0, epoch_id = 0, number_of_epochs = 1):
        #Arrange
        model, input_data, trainer = get_train_args()

        #Arrange batch size
        batch_size = 40
        trainer._image_generation_params.batch_size = batch_size
        number_of_batches = ceil(len(input_data) / batch_size)
        first_epoch_batches = number_of_batches - batch_id
        total_training_epochs = number_of_epochs - epoch_id
        total_training_batches = first_epoch_batches + number_of_batches * (total_training_epochs - 1)

        #Arrange batch and epoch parameters
        trainer._training_params.batch_id = batch_id
        trainer._training_params.epoch_id = epoch_id
        trainer._training_params.number_of_epochs = number_of_epochs

        #Mock the relevant calls
        model.fit_generator = Mock(return_value = [random() for _ in model.metrics_names])
        model.train_on_batch = Mock(return_value = [random() for _ in model.metrics_names])
        trainer._transformer.fit = MagicMock()
        K.set_value = MagicMock()

        with mock_patch('operation.utils.imload', side_effect = patch_imload):
            #Act
            response = trainer.batch_train(model, input_data)

            #Assert
            model.fit_generator.assert_not_called()

            #Assert train batch calls
            call_args_list = model.train_on_batch.call_args_list
            self.assertEqual(len(call_args_list), total_training_batches)

            #Assert model and learning rate
            self.assert_response_and_learning_rate(model, response)

            #Assert checkpoint calls
            self.assert_checkpoint(trainer._checkpoint_callback, total_training_batches, total_training_epochs)
    def test_train(self):
        #Arrange
        model, input_data, trainer = get_train_args()

        #Mock the relevant calls
        model.fit_generator = MagicMock()
        model.train_on_batch = MagicMock()
        K.set_value = MagicMock()

        with mock_patch('operation.utils.imload', side_effect = patch_imload):
            #Act
            response = trainer.train(model, input_data)

            #Assert
            model.fit_generator.assert_called_once()
            model.train_on_batch.assert_not_called()
            _, args = model.fit_generator.call_args_list[0]
            self.assertTrue(isinstance(args['generator'], ImageDataIterator))
            self.assertTrue(isinstance(args['validation_data'], ImageDataIterator))
            self.assertTrue(args['epochs'], trainer._training_params.number_of_epochs)

            #Assert model and learning rate
            self.assert_response_and_learning_rate(model, response)
    def batch_train_input_data_randomized(self, start_batch_id, is_randomized = True):
        #Arrange
        model, input_data, trainer = get_train_args()
        trainer._training_params.batch_id = start_batch_id
        trainer._training_params.epoch_id = 0
        trainer._training_params.number_of_epochs = 1
        trainer._image_generation_params.batch_size = 40
        trainer._dropbox_auth = None
        trainer._dropbox_dir = None

        #Mock the relevant calls
        model.train_on_batch = Mock(return_value = [random() for _ in model.metrics_names])
        trainer._transformer.fit = MagicMock()
        input_data.sample = MagicMock()

        with mock_patch('operation.utils.imload', side_effect = patch_imload):
            #Act
            _ = trainer.batch_train(model, input_data)

            #Assert
            if is_randomized:
                input_data.sample.assert_called_once()
            else:
                input_data.sample.assert_not_called()
Beispiel #11
0
def download_evts(calendar=None, in_loop=False):
    db = sqlite3.connect('evts.sqlite3')
    with mock_patch('pickle.dumps'):
        keys = list(_ev_entry(MagicMock()).keys())
    db.execute(
        f'create table if not exists events (id PRIMARY KEY,{",".join(keys[1:])})'
    )
    db.execute(
        f'create index if not exists id_date on events (root_id, startdate_index)'
    )
    db.execute(
        f'''create view if not exists events_recurring as
               select a.*, count(*) - 1 as local_recurring
               from events a
               left join events b on a.root_id = b.root_id
                   and b.startdate_index >= a.startdate_index-14
                   and b.startdate_index <= a.startdate_index+14
               group by a.id
               '''
    )
    now = datetime.now(tzlocal())
    tries_remaining = 2 if not in_loop else 5
    while tries_remaining:
        try:
            tries_remaining -= 1
            cals = s().calendarList().list().execute()['items']
            break
        except Exception as e:
            print(e)
            if not tries_remaining:
                raise
            if in_loop:
                print('retrying in 30 seconds...')
                sleep(30)
    obj = {
        'calendars': cals,
        'timestamp': now,
    }
    calmap = {cal['id']: cal for cal in cals}
    allCals = get_visible_cals(cals)
    try:
        old_obj = load_evts(print_warning=False, partial=True)
        for cal in old_obj['calendars']:
            calmap[cal['id']]['syncToken'] = cal.get('syncToken')
    except FileNotFoundError:
        pass

    if calendar is not None:
        calsToDownload = [calendar]
    else:
        calsToDownload = allCals.values()

    for calId in calsToDownload:
        if not isinstance(calId, str) or '@' not in calId:
            continue
        print(f'downloading {calId}...')
        kwargs = {
            'calendarId': calId,
            'singleEvents': True,
            'maxResults': 2500,
            'syncToken': calmap[calId].get('syncToken'),
        }
        pagenum = 0
        while True:
            pagenum += 1
            if pagenum > 1:
                print(f'  downloading page {pagenum}...')
            try:
                r = s().events().list(**kwargs).execute()
            except HttpError as e:
                if int(e.resp['status']) == 410:
                    print("  410'd, redownloading...")
                    db.execute(f'''delete from events where calendar = ?''', (calId,))
                    calmap[calId].pop('syncToken', None)
                    if 'syncToken' in kwargs:
                        del kwargs['syncToken']
                    if 'pageToken' in kwargs:
                        del kwargs['pageToken']
                    pagenum = 0
                    continue
                else:
                    raise
            except Exception as e:
                print(repr(e))
            entries = []
            deleting = []
            for e in r['items']:
                if e['status'] == 'cancelled':
                    deleting.append((e['id'],))
                else:
                    ev = Event.unpkg(e)
                    ev.calendar = calId
                    entries.append(_ev_entry(ev))
            db.executemany(
                f'''insert into events values ({",".join(f":{key}" for key in keys)})
                    on conflict(id) do update set {",".join(f"{key}=:{key}" for key in keys[1:])}''',
                entries,
            )
            db.executemany(f'''delete from events where id = ?''', deleting)
            if 'nextPageToken' in r:
                kwargs['pageToken'] = r['nextPageToken']
                continue
            if 'nextSyncToken' in r:
                calmap[calId]['syncToken'] = r['nextSyncToken']
            break
        db.commit()
    with open('evts.yaml', 'w') as f:
        yaml.dump(obj, f, default_flow_style=False)
    with open('evts.pickle', 'wb') as f:
        pickle.dump(obj, f, protocol=-1)
    db.commit()
    return obj