def user(): """ Creates new user with random email and password. Returns new user instance with password attribute. """ name = (make_random_str() + '@' + make_random_str(4) + '.' + make_random_str(3)).lower() password = make_random_str(20) client_ = APIClient() client_.post(reverse('register'), data={ 'username': name, 'password': password, 'password_repeat': password }, follow=True) user_ = ApiUser.objects.get(email=name) user_.password = password return user_
def trained_mrnn_ensemble(): """ Returns instance of TIMESERIES ensemble with two trained models """ ds = data_set_ts() ds2 = DataSet.objects.create(data=ds.data, filters=[], user=ds.user, name='test.ts.zip', key=make_random_str(8)) ensemble = TrainEnsemble.objects.create( user=ds.user, out_nonlin="SOFTMAX", train_dataset=ds, test_dataset=ds2, data_type=TrainEnsemble.TIMESERIES, net_type=TrainEnsemble.NET_DEEPNET, ) lm1 = LearnModel.objects.create( ensemble=ensemble, model_name='MRNN', model_params={'maxnum_iter': 20}, ) lm2 = LearnModel.objects.create( ensemble=ensemble, model_name='MRNN', model_params={'maxnum_iter': 20}, ) data = { 'iteration': 1, 'train_accuracy': 0.9, 'test_accuracy': 0.8, 'time': 60, } lm1.add_stat(data, '/s3.keya213') data.update({'iteration': 2, 'train_accuracy': 0.91}) lm1.add_stat(data, '/s3.keyasdfadsf') data.update({'iteration': 3, 'train_accuracy': 0.95}) lm1.add_stat(data, '/s3.keyasdfadsfsd') lm1.to_finish_state('1 2 3 4', '') data = { 'iteration': 1, 'train_accuracy': 0.5, 'test_accuracy': 0.4, 'time': 20, } lm2.add_stat(data, '/s3.key2sdsd') lm2.to_finish_state('1 3 3 4', '') return ensemble
def data_set(): """ Returns instance of DataSet, also creates DataFile and ApiUser """ df = data_file() filters = [ { "name": "shuffle" }, ] key = "uploads/datasets/1/" + make_random_str(8) + "/manualxts.zip" ds = DataSet.objects.create(data=df, filters=filters, user=df.user, name='test.csv.zip', key=key) return ds
def data_file_csv_columns(): """ Returns instance of DataFile, also creates user """ user_ = user() df_key = "uploads/1/" + make_random_str(8) + "/iris.csv.zip" meta = { 'version': 3, 'data_type': 'GENERAL', 'key': df_key, 'size': 100, 'archive_path': 'iris.csv', 'data_rows': 6, 'empty_rows': 0, 'num_columns': 4, 'delimeter': '\s*,\s*', 'with_header': False, 'last_column_info': { 'max': 2., 'min': 0., 'unique': 3, 'classes': { '0': 3, '1': 2, '2': 1 } }, "names": ["1", "2", "3", "4"], "dtypes": ["f", "i", "f", "S"] } timestamp = datetime.datetime.utcnow().replace(tzinfo=tz.tzutc()) df = DataFile.objects.create(user=user_, key=df_key, file_format='GENERAL', name='Data', state=DataFile.STATE_READY, meta=meta) for i in range(3): timestamp += datetime.timedelta(1) ParseLog.objects.create(timestamp=timestamp, message='Log entry #%s' % i, data_file=df) return df
def data_file(): """ Returns instance of DataFile, also creates user """ user_ = user() df_key = "uploads/1/" + make_random_str(8) + "/manualxts.zip" meta = { "archive_path": "manualx.ts", "data_rows": 32, "output_size": 2, "data_type": "TIMESERIES", "binary_output": True, "binary_input": False, "min_timesteps": 95, "empty_rows": 0, "version": 3, "key": df_key, "max_timesteps": 97, "input_size": 2, "classes": { "1": 121, "0": 2951 }, "size": 6002 } timestamp = datetime.datetime.utcnow().replace(tzinfo=tz.tzutc()) df = DataFile.objects.create(user=user_, key=df_key, file_format='TIMESERIES', name='test.ts', state=DataFile.STATE_READY, meta=meta) for i in range(3): timestamp += datetime.timedelta(1) ParseLog.objects.create(timestamp=timestamp, message='Log entry #%s' % i, data_file=df) return df
def restore_object(self, attrs, instance=None): user = self.context['view'].request.user if attrs.get('data'): name = attrs['name'] ext = {'TIMESERIES': '.ts', 'GENERAL': '.csv'} ext = ext[attrs['file_format']] file = SimpleUploadedFile(make_random_str() + ext, attrs['data'].encode('utf-8')) else: file = attrs['file'] name = attrs.get('name') if not name: name = file.name key = build_key(user.pk, file.name) if not file.name.lower().endswith(settings.DATA_FILE_PLAIN_EXT): key += '.zip' return DataFile(bucket=settings.S3_BUCKET, key=key, user=user, name=name, local_file=file, state=DataFile.STATE_UPLOADED)
def data_file_images(): user_ = user() df_key = "uploads/1/" + make_random_str(8) + "/data.zip" meta = { 'data_type': 'IMAGES', 'size': 100, 'classes': { 'class1': 2, 'class2': 3 } } timestamp = datetime.datetime.utcnow().replace(tzinfo=tz.tzutc()) df = DataFile.objects.create(user=user_, key=df_key, file_format='IMAGES', name='images my', state=DataFile.STATE_READY, meta=meta) for i in range(3): timestamp += datetime.timedelta(1) ParseLog.objects.create(timestamp=timestamp, message='Log entry #%s' % i, data_file=df) return df