示例#1
0
def test_data_set_create_invalid_parameter(data_set_type,
                                           quicksight_application_stub):
    # stub
    data_source = DataSource(
        quicksight_application=quicksight_application_stub, props=None)
    data_source.arn = "STUBBED_DATA_SOURCE_ARN"
    quicksight_application_stub.data_source = data_source
    sub_type = data_set_type

    obj = DataSet(
        data_source=data_source,
        data_set_sub_type=sub_type,
        props=None,
        quicksight_application=quicksight_application_stub,
    )

    [
        DataSetStubber.stub_create_data_source_error_invalid_parameter_call(
            sub_type) for _ in range(3)
    ]

    response = None
    with pytest.raises(tenacity.RetryError):
        # Function under test
        response = obj.create()

    assert not response
示例#2
0
def test_data_set_create_exist(data_set_type, quicksight_application_stub):
    # stub
    data_source = DataSource(
        quicksight_application=quicksight_application_stub, props=None)
    data_source.arn = "STUBBED_DATA_SOURCE_ARN"
    quicksight_application_stub.data_source = data_source
    sub_type = data_set_type

    obj = DataSet(
        data_source=data_source,
        data_set_sub_type=sub_type,
        props=None,
        quicksight_application=quicksight_application_stub,
    )

    DataSetStubber.stub_create_data_set_error_call(sub_type)
    DataSetStubber.stub_describe_data_set_call(sub_type)

    # Function under test
    response = obj.create()

    # This response is the response to describe_data_source as the code is remaps the response
    assert response
    # The describe_data_set reponse does not provide a status at the DataSet level, such
    # as "Status": "CREATION_SUCCESSFUL"
    # Therefore, we verify CreatedTime not being None
    assert response["CreatedTime"]
    assert obj.arn
示例#3
0
def test_data_set_missing_data_source(quicksight_application_stub):
    missing_data_source = None
    data_set = DataSet(
        data_source=missing_data_source,
        data_set_sub_type=data_set_type,
        props=None,
        quicksight_application=quicksight_application_stub,
    )
    with pytest.raises(ValueError):
        data_set.create()
示例#4
0
def test_data_set_invalid_sub_type(quicksight_application_stub):
    data_source = DataSource(
        quicksight_application=quicksight_application_stub, props=None)
    invalid_sub_type = "TEST_INVALID_SUB_TYPE"
    data_set = DataSet(
        data_source=data_source,
        data_set_sub_type=invalid_sub_type,
        props=None,
        quicksight_application=quicksight_application_stub,
    )
    with pytest.raises(Exception):
        data_set.create()
示例#5
0
def test_data_set_get_data(quicksight_application_stub, data_set_type):
    data_source = DataSource(
        quicksight_application=quicksight_application_stub, props=None)
    data_set = DataSet(
        data_source=data_source,
        data_set_sub_type=data_set_type,
        props=None,
        quicksight_application=quicksight_application_stub,
    )
    expected_name = f"DHT_Unit_Test-dataset-{data_set_type}"
    assert data_set.get_data() == {
        "id":
        f"{expected_name}",
        "name":
        expected_name,
        "arn":
        f"arn:aws:quicksight:us-east-1:MOCK_ACCOUNT:dataset/{expected_name}",
    }
示例#6
0
def test_data_set_create_all_data_set_types(data_set_type, quicksight_application_stub):
    data_source = DataSource(quicksight_application=quicksight_application_stub, props=None)

    # stub
    data_source.arn = "STUBBED_DATA_SOURCE_ARN"
    quicksight_application_stub.data_source = data_source

    data_set = DataSet(
        data_source=data_source,
        data_set_sub_type=data_set_type,
        data_set_name=f"_{data_set_type}_DataSet",
        props=None,
        quicksight_application=quicksight_application_stub
    )

    dump_state(data_set, 'Before create')
    DataSetStubber.stub_create_data_set(data_set_type)
    data_set.create()
    dump_state(data_set, 'After create')
示例#7
0
def test_data_set_init_all_data_set_types(data_set_type, quicksight_application_stub):
    data_source = DataSource(quicksight_application=quicksight_application_stub, props=None)
    data_set = DataSet(
        data_source=data_source,
        data_set_sub_type=data_set_type,
        data_set_name=f"_{data_set_type}_DataSet",
        props=None,
        quicksight_application=quicksight_application_stub
    )
    dump_state(data_set, 'After initialization')
示例#8
0
def test_data_set_get_data(quicksight_application_stub, data_set_type):
    data_source = DataSource(quicksight_application=quicksight_application_stub, props=None)
    data_set = DataSet(
        data_source=data_source,
        data_set_sub_type=data_set_type,
        data_set_name=f"_{data_set_type}_DataSet",
        props=None,
        quicksight_application=quicksight_application_stub
    )
    assert repr(data_set) == "{'id': 'DHT_Unit_Test_dataset', 'name': 'DHT_Unit_Test_dataset', 'arn': None}"
示例#9
0
文件: test.py 项目: qiaohan/tf-utils
def test_typenet():
	params = param_parse("cfgs/TypeNet.yml")
	#params = param_parse("cfgs/Test_TypeNet.yml")
	net = TypeNet(params,'test')
	trainds = DataSet(params.datalist_file,params.path_base,params.batch_size)
	net.set_dataset(trainds,trainds)
	with tf.Session() as sess:
		tf.global_variables_initializer().run()	
		#net.loadfromcaffe("pretrained/vgg16.npy",sess)
		net.load(sess)
		net.test(sess)
示例#10
0
文件: test.py 项目: qiaohan/tf-utils
def test_labelnet():
	params = param_parse("cfgs/LabelNet.yml")
	net = LabelNet(params,'train')
	trainds = DataSet(params.datalist_file,params.path_base,params.batch_size)
	net.set_dataset(trainds,trainds)
	with tf.Session() as sess:
		tf.global_variables_initializer().run()	
		#net.loadfromcaffe("pretrained/vgg16.npy",sess)
		net.load(sess)
		#net.savenpy("ckpt_npy/labelnet.npy",sess)
		net.test(sess)
示例#11
0
def test_data_set_delete_all_data_set_types(data_set_type, quicksight_application_stub):
    data_source = DataSource(quicksight_application=quicksight_application_stub, props=None)

    # stub
    data_source.arn = "STUBBED_DATA_SOURCE_ARN"
    quicksight_application_stub.data_source = data_source

    logger.info(f'Initializing dataset object for type: {data_set_type}')
    data_set = DataSet(
        data_source=data_source,
        data_set_sub_type=data_set_type,
        data_set_name=f"_{data_set_type}_DataSet",
        props=None,
        quicksight_application=quicksight_application_stub
    )
    logger.debug(f'After initializing dataset object for type: {data_set_type}')

    dump_state(data_set, 'Before delete')
    DataSetStubber.stub_delete_data_set(data_set_type)
    data_set.delete()
    dump_state(data_set, 'After delete')
示例#12
0
    def train(self, sess):

        self.build_model()

        # Prepare training data
        train_data = DataSet(self.data_path, image_size=self.image_size)
        num_batches = int(math.ceil(len(train_data) / self.batch_size))

        # initialize training
        sess.run(tf.global_variables_initializer())
        sess.run(tf.local_variables_initializer())

        sample_results = np.random.randn(self.num_chain * num_batches,
                                         self.image_size, self.image_size, 3)

        saver = tf.train.Saver(max_to_keep=50)

        writer = tf.summary.FileWriter(self.log_dir, sess.graph)

        # make graph immutable
        tf.get_default_graph().finalize()

        # store graph in protobuf
        with open(self.model_dir + '/graph.proto', 'w') as f:
            f.write(str(tf.get_default_graph().as_graph_def()))

        # train
        for epoch in range(self.num_epochs):
            start_time = time.time()
            for i in range(num_batches):

                obs_data = train_data[i * self.
                                      batch_size:min(len(train_data), (i + 1) *
                                                     self.batch_size)]

                # Step G0: generate X ~ N(0, 1)
                z_vec = np.random.randn(self.num_chain, self.z_size)
                g_res = sess.run(self.gen_res, feed_dict={self.z: z_vec})
                # Step D1: obtain synthesized images Y
                if self.t1 > 0:
                    syn = sess.run(self.langevin_descriptor,
                                   feed_dict={self.syn: g_res})
                # Step G1: update X using Y as training image
                if self.t2 > 0:
                    z_vec = sess.run(self.langevin_generator,
                                     feed_dict={
                                         self.z: z_vec,
                                         self.obs: syn
                                     })
                # Step D2: update D net
                d_loss = sess.run(
                    [self.des_loss, self.des_loss_update, self.apply_d_grads],
                    feed_dict={
                        self.obs: obs_data,
                        self.syn: syn
                    })[0]
                # Step G2: update G net
                g_loss = sess.run(
                    [self.gen_loss, self.gen_loss_update, self.apply_g_grads],
                    feed_dict={
                        self.obs: syn,
                        self.z: z_vec
                    })[0]

                # Metrics
                mse = sess.run([self.recon_err, self.recon_err_update],
                               feed_dict={
                                   self.obs: obs_data,
                                   self.syn: syn
                               })[0]
                sample_results[i * self.num_chain:(i + 1) *
                               self.num_chain] = syn
                print(
                    'Epoch #{:d}, [{:2d}]/[{:2d}], des loss: {:.4f}, gen loss: {:.4f}, '
                    'L2 distance: {:4.4f}'.format(epoch, i + 1, num_batches,
                                                  d_loss.mean(), g_loss.mean(),
                                                  mse))
                if i == 0 and epoch % self.log_step == 0:
                    save_sample_results(syn,
                                        "%s/des%03d.png" %
                                        (self.sample_dir, epoch),
                                        col_num=self.n_tile_col)
                    save_sample_results(g_res,
                                        "%s/gen%03d.png" %
                                        (self.sample_dir, epoch),
                                        col_num=self.n_tile_col)

            [des_loss_avg, gen_loss_avg, mse_avg, summary] = sess.run([
                self.des_loss_mean, self.gen_loss_mean, self.recon_err_mean,
                self.summary_op
            ])

            end_time = time.time()
            print(
                'Epoch #{:d}, avg.des loss: {:.4f}, avg.gen loss: {:.4f}, avg.L2 distance: {:4.4f}, '
                'lr.des: {:f} lr.gen: {:f} time: {:.2f}s'.format(
                    epoch, des_loss_avg, gen_loss_avg, mse_avg,
                    self.lr_des.eval(), self.lr_gen.eval(),
                    end_time - start_time))
            writer.add_summary(summary, epoch)
            writer.flush()

            if epoch % self.log_step == 0:
                make_dir(self.model_dir)
                saver.save(sess,
                           "%s/%s" % (self.model_dir, 'model.ckpt'),
                           global_step=epoch)
示例#13
0
    biases = new_biases(length=num_outputs)

    layer = inp

    if keep_prob:
        layer = tf.nn.dropout(inp, keep_prob) * keep_prob

    layer = tf.matmul(inp, weights) + biases

    if use_relu:
        layer = tf.nn.elu(layer)  # Ja!

    return layer


data = DataSet(14, 0.9, '../data/letters/', validation=False)

x = tf.placeholder(tf.float32, [None, data.img_size_flat], name='x')
y_true = tf.placeholder(tf.float32, [None, data.num_classes], name='y_true')
y_true_cls = tf.argmax(y_true, axis=1)

layer_fc2 = new_fc_layer(inp=x,
                         num_inputs=data.img_size_flat,
                         num_outputs=fc2_size,
                         use_relu=True,
                         keep_prob=DROP)

layer_fc3 = new_fc_layer(inp=layer_fc2,
                         num_inputs=fc2_size,
                         num_outputs=data.num_classes)
示例#14
0
    def __init__(self, resource_properties):
        # TODO: use the config data / file to figure out the supported_data_set_types
        supported_data_set_types = ['sentiment', 'image-text', 'text', 'topic']

        self.resource_properties = resource_properties
        self.global_state = get_global_state()

        # use config data file if provided
        config_file = resource_properties.get('ConfigDataFile', None)
        if config_file:
            data = read_config(config_file)
            self.global_state.update(data)

        # TODO:RENAME: 'StackName' key name as this could be used out of stack. ApplicationName?
        # TODO: create/use a uuid if no attack provided
        self.prefix = resource_properties.get('StackName', 'Sample_Sol')

        # TODO:RENAME: quicksight_template_arn -> quicksight_source_template_arn
        self.quicksight_template_arn = resource_properties.get(
            'QuickSightSourceTemplateArn',
            'Uninitialized QuickSightSourceTemplateArn')
        logger.debug(
            f'Using QuickSightSourceTemplateArn: {self.quicksight_template_arn }'
        )

        self.quicksight_principal_arn = resource_properties.get(
            'QuickSightPrincipalArn', 'Uninitialized QuickSightPrincipalArn')
        logger.debug(
            f'Using QuickSightPrincipalArn: {self.quicksight_principal_arn }')

        self.data_source = DataSource(quicksight_application=self,
                                      props=self.global_state)
        self.data_source.athena_workgroup = resource_properties.get(
            'WorkGroupName', 'primary')

        self.data_set_sub_types = supported_data_set_types
        self.data_sets = dict()
        for data_set_sub_type in self.data_set_sub_types:
            data_set = DataSet(quicksight_application=self,
                               data_source=self.data_source,
                               data_set_sub_type=data_set_sub_type,
                               data_set_name=f"_{data_set_sub_type}_dataset",
                               props=self.global_state)
            self.data_sets[data_set_sub_type] = data_set

        self.analysis = Analysis(
            quicksight_application=self,
            data_sets=self.data_sets,
            quicksight_template_arn=self.quicksight_template_arn,
            data_source=self.data_source,
            props=self.global_state)

        self.dashboard = Dashboard(
            quicksight_application=self,
            data_sets=self.data_sets,
            quicksight_template_arn=self.quicksight_template_arn,
            props=self.global_state)

        self.template = Template(quicksight_application=self,
                                 data_sets=self.data_sets,
                                 props=self.global_state)

        global_state_json = json.dumps(self.global_state,
                                       indent=2,
                                       sort_keys=True)
        logger.debug(
            f'QuicksightApi: after init, global data json: {global_state_json}'
        )
    def __init__(self, resource_properties):
        # TODO: use the config data / file to figure out the supported_data_set_types
        supported_data_set_types = [
            "image-text", "topic", "image-moderation-label", "geo",
            "topic-mapping", "feed"
        ]

        self.resource_properties = resource_properties
        self.global_state = get_global_state()

        # use config data file if provided
        config_file = resource_properties.get("ConfigDataFile", None)
        if config_file:
            data = read_config(config_file)
            self.global_state.update(data)

        self.prefix = resource_properties.get("StackName", "Sample_Sol")

        # TODO:RENAME: quicksight_template_arn -> quicksight_source_template_arn
        self.quicksight_template_arn = resource_properties.get(
            "QuickSightSourceTemplateArn",
            "Uninitialized QuickSightSourceTemplateArn")
        logger.debug(
            f"Using QuickSightSourceTemplateArn: {self.quicksight_template_arn }"
        )

        self.quicksight_principal_arn = resource_properties.get(
            "QuickSightPrincipalArn", "Uninitialized QuickSightPrincipalArn")
        logger.debug(
            f"Using QuickSightPrincipalArn: {self.quicksight_principal_arn }")

        self.data_source = DataSource(quicksight_application=self,
                                      props=self.global_state)
        self.data_source.athena_workgroup = resource_properties.get(
            "WorkGroupName", "primary")

        self.data_set_sub_types = supported_data_set_types
        self.data_sets = dict()
        for data_set_sub_type in self.data_set_sub_types:
            data_set = DataSet(
                quicksight_application=self,
                data_source=self.data_source,
                data_set_sub_type=data_set_sub_type,
                props=self.global_state,
            )
            self.data_sets[data_set_sub_type] = data_set

        self.analysis = Analysis(
            quicksight_application=self,
            data_sets=self.data_sets,
            quicksight_template_arn=self.quicksight_template_arn,
            data_source=self.data_source,
            props=self.global_state,
        )

        self.dashboard = Dashboard(
            quicksight_application=self,
            data_source=self.data_source,
            data_sets=self.data_sets,
            quicksight_template_arn=self.quicksight_template_arn,
            props=self.global_state,
        )

        self.template = Template(quicksight_application=self,
                                 data_sets=self.data_sets,
                                 props=self.global_state)

        global_state_json = json.dumps(self.global_state,
                                       indent=2,
                                       sort_keys=True)
        logger.debug(
            f"QuicksightApi: after init, global data json: {global_state_json}"
        )