Esempio n. 1
0
    def testFindMatchingProductions(self):
        # Providing no productions should result in no matches.
        gen = Generator()
        g = Graph()
        self.assertEquals( len(gen._findMatchingProductions(g, [])), 0)        
        
        # We have a production, but the LHS can't be found in the graph.
        # No solutions.
        g = Graph()
        g.addEdge(Vertex('g0', 'A'), Vertex('g1', 'B'))
        lhs = Graph()
        lhs.addEdge(Vertex('g0', 'C'), Vertex('g1', 'D'))
        rhs = Graph()
        p1 = Production(lhs, rhs)
        gen = Generator()
        self.assertEquals( len(gen._findMatchingProductions(g, [p1])), 0)        

        # One matching production, a simple vertex "A".
        g = Graph()
        g.addEdge(Vertex('g0', 'A'), Vertex('g1', 'B'))
        lhs = Graph()
        lhs.addVertex(Vertex('g0', 'A', '1'))
        rhs = Graph()
        p1 = Production(lhs, rhs)
        self.assertEquals( len(gen._findMatchingProductions(g, [p1])), 1)

        # Two matching productions.
        g = Graph()
        g.addEdge(Vertex('g0', 'A'), Vertex('g1', 'B'))
        lhs = Graph()
        lhs.addVertex(Vertex('g0', 'A', '2'))
        rhs = Graph()
        p1 = Production(lhs, rhs)
        p2 = Production(lhs, rhs)
        self.assertEquals( len(gen._findMatchingProductions(g, [p1, p2])), 2)
 def __init__(self, template_dir, basepackage):
     Generator.__init__(self,None,None,None,None)
     self.basepackage = basepackage
     if not template_dir:
         self.template_path='./templates/pom/template.txt'
     else:
         self.template_path=join(template_dir,'template.txt')
def logistic_regression(n,
                        mx1,
                        vx1,
                        my1,
                        vy1,
                        mx2,
                        vx2,
                        my2,
                        vy2,
                        optimizer='SGD'):
    """
    weights : shape=(k=3,1)
    Input
    -----
    optimizer : 'SGD' or 'NTM'
        'SGD' == 'Steepest Gradient Descent'
        'NTM' == 'Newton's Method'
    """
    inputs = []
    labels = []
    D1_label = 0.0
    D2_label = 1.0
    bias_term = 1.0
    for _ in range(n):
        # Data 1
        D1x = Generator.univariate_gaussian(mx1, vx1)
        D1y = Generator.univariate_gaussian(my1, vy1)
        inputs.append([bias_term, D1x, D1y])
        labels.append([D1_label])
        # Data 2
        D2x = Generator.univariate_gaussian(mx2, vx2)
        D2y = Generator.univariate_gaussian(my2, vy2)
        inputs.append([bias_term, D2x, D2y])
        labels.append([D2_label])
    inputs = Mat(inputs)
    labels = Mat(labels)
    # init weights
    weights = Mat([[-6.0], [1.0], [-0.1]])
    print('inputs shape:\t', inputs.shape)
    print('labels shape:\t', labels.shape)
    print('weights shape:\t', weights.shape)
    # optimization
    if optimizer == 'SGD':
        weights = steepest_gradient_descent(weights, inputs, labels)
    elif optimizer == 'NTM':
        weights = newton_method(weights, inputs, labels)
    else:
        raise AttributeError('{} is not a valid optimizor'.format(optimizer))
    # inference
    logits = inference(weights, inputs)
    # evaluate model
    CM = ConfusionMatrix(logits, labels)
    CM.show_matrix()
    CM.show_accuracy()
    CM.show_sensitivity()
    CM.show_specificity()
Esempio n. 4
0
 def on_create_train(self):
     msg = ""
     if self.selectedPathes:
         for p in self.selectedPathes:
             if os.path.isdir(p):
                 self.generator = Generator(p, self.logger)
                 self.generator.createCSVLabelMap()
     else:
         self.generator = Generator(self.directory, self.logger)
         self.generator.createCSVLabelMap()
 def build_generators(self):
     
     self.generator_A = Generator(
         n_filters = self.generator_filters,
         image_size = 128,
         image_channels = 3
     )
     
     self.generator_B = Generator(
         n_filters = self.generator_filters,
         image_size = 128,
         image_channels = 3
     )
Esempio n. 6
0
File: test.py Progetto: shrkwv/pwg
    def test_get_values_for_recursive_simple_functions(self):
        right_result = [
            '114BT00F', '114BT0OF', '114BTO0F', '114BTOOF', '11ABT00F',
            '11ABT0OF', '11ABTO0F', '11ABTOOF', '1L4BT00F', '1L4BT0OF',
            '1L4BTO0F', '1L4BTOOF', '1LABT00F', '1LABT0OF', '1LABTO0F',
            '1LABTOOF', 'L14BT00F', 'L14BT0OF', 'L14BTO0F', 'L14BTOOF',
            'L1ABT00F', 'L1ABT0OF', 'L1ABTO0F', 'L1ABTOOF', 'LL4BT00F',
            'LL4BT0OF', 'LL4BTO0F', 'LL4BTOOF', 'LLABT00F', 'LLABT0OF',
            'LLABTO0F', 'LLABTOOF', '114BT3KS4B', '114BT3KSAB', '114BTEKS4B',
            '114BTEKSAB', '11ABT3KS4B', '11ABT3KSAB', '11ABTEKS4B',
            '11ABTEKSAB', '1L4BT3KS4B', '1L4BT3KSAB', '1L4BTEKS4B',
            '1L4BTEKSAB', '1LABT3KS4B', '1LABT3KSAB', '1LABTEKS4B',
            '1LABTEKSAB', 'L14BT3KS4B', 'L14BT3KSAB', 'L14BTEKS4B',
            'L14BTEKSAB', 'L1ABT3KS4B', 'L1ABT3KSAB', 'L1ABTEKS4B',
            'L1ABTEKSAB', 'LL4BT3KS4B', 'LL4BT3KSAB', 'LL4BTEKS4B',
            'LL4BTEKSAB', 'LLABT3KS4B', 'LLABT3KSAB', 'LLABTEKS4B',
            'LLABTEKSAB'
        ]

        patterns = Pattern('|upper(leet(reverse(hobbies)))|')
        tokens = patterns.get_tokens()
        manipulated_result = []
        for token in tokens:
            # function = token.get_function()
            function_and_args = token.get_function_and_args()
            category = token.get_category()
            generator = Generator(patterns, self.profile)
            result = generator.get_values_for_simple_token(category)

            while function_and_args:
                cur_function, args = function_and_args.pop()
                result = Utils.manipulate_list(cur_function, result)
            manipulated_result += result

        self.assertEqual(right_result, manipulated_result)
Esempio n. 7
0
def run(fread, fwrite, models, factory):
    gold, lexicalized, realized = [], {}, {}
    for amr in utils.parse_corpus(fread, True):
        print amr['sentence']
        try:
            gen = Generator(amr=amr['amr'],
                            erg_factory=factory,
                            models=models,
                            beam_n=10)

            candidates = gen.run()
            for i in range(10):
                if i not in lexicalized:
                    lexicalized[i] = []

                if i < len(candidates):
                    tree = candidates[i].tree
                    lexicalized[i].append(tree.realize(root=tree.root,
                                                       text=''))
                    # print tree.realize(root=tree.root, text=''), ' \t', tree.prettify(root=tree.root, isRule=False)
                else:
                    lexicalized[i].append('-')
        except:
            print 'Error'
            for i in range(10):
                if i not in lexicalized:
                    lexicalized[i] = []
                lexicalized[i].append('-')
        gold.append(amr['sentence'])

    write(gold, os.path.join(fwrite, 'gold'))
    write(lexicalized, os.path.join(fwrite, 'lexicalized'))
Esempio n. 8
0
    def __init__(self, monet_path, photo_path, image_shape, batch_size):

        # Load Datasets
        self.monet_path = monet_path
        self.photo_path = photo_path
        self.monets = glob.glob(self.monet_path)
        self.photos = glob.glob(self.photo_path)
        self.num_monets = len(self.monets)
        self.num_photos = len(self.photos)
        self.batch_size = batch_size

        # Data shape
        self.image_shape = image_shape

        # Instantiate models
        self.generator = Generator(self.monets, self.photos, self.image_shape,
                                   self.batch_size)
        self.generator.build()
        self.generator.model.compile(loss='binary_crossentropy')

        print('-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=' * 3)

        self.discriminator = Discriminator(self.monets, self.photos,
                                           self.image_shape, self.batch_size)
        self.discriminator.build()
        self.discriminator.model.compile(loss='binary_crossentropy')
Esempio n. 9
0
def main():
    clientGenerator = Generator(UniformDistribution(8, 12))

    firstQueue = []
    secondQueue = []

    operators = [
        Operator(firstQueue, UniformDistribution(15, 25)),
        Operator(firstQueue, UniformDistribution(30, 50)),
        Operator(secondQueue, UniformDistribution(20, 60))
    ]

    processors = [
        Processor(firstQueue, UniformDistribution(15, 15)),
        Processor(secondQueue, UniformDistribution(30, 30))
    ]

    totalRequests = 3000

    tStart = time()
    res = modeling(clientGenerator, operators, processors, totalRequests)

    print('time (secs)', time() - tStart)
    for key in res.keys():
        print(key, res[key])

    print('lost', res['lost'] / totalRequests)
Esempio n. 10
0
    def testApplyProduction_Blackbox4(self):
        # More complex black-box test. This time we have several productions
        # in various configurations, and we build a big graph (50 vertices).

        input = """
        configuration {
            min_vertices = 50;
        }

        productions {
            # Start graph
            A->B, A->C;

            # Productions
            A->C, A->B ==> A->D->C, A->B;
            A->D ==> A->D->E;
            D->E ==> D->F->E, D->G;
            G ==> G->A->D;
        }
"""

        gen = Generator()
        f = gen._parseGrammarFile(input)
        logging.debug('start graph is...')
        logging.debug(f.startGraph)

        gen.applyProductions(f.startGraph, f.productions, f.config)

        logging.info(f.startGraph)
        self.assertEqual(f.startGraph.numVertices, 50)
Esempio n. 11
0
    def addPass():
        # ask the user what catagory the password is in, and where the password is used
        if PasswordManager.mstrPWStor.keys().__str__() == "dict_keys([])":
            catagory = input("What do you want to call your first catagory? ")
            passFor = input("Where is this password going to be used? ")
        else:
            while True:
                catagory = input(f"What category is this password; {useful.Strings.lstToStr(PasswordManager.mstrPWStor, ', ', False)}, or enter a new name to create a new catagory? ")
                passFor = input("Where is this password going to be used? ")
                break

        # ask the user what the password is, or generate one for them
        ui = getpass("Type a password, or put a number 1 to 3 for a password with that strength to be generated, 1 being super weak, 3 being super strong: ")
        while True:
            useful.Terminal.clear()
            if ui.isdigit():
                if 3 <= int(ui) + 2 <= 5:
                    if not catagory in PasswordManager.mstrPWStor:
                        PasswordManager.mstrPWStor[catagory] = {}
                    randPass = Password( Generator.genPass(int(ui) + 2) )
                    PasswordManager.mstrPWStor[catagory][passFor] = randPass.getPass()
                    break
            else:
                passTemp = Password(ui)
                if passTemp.check():
                    if not catagory in PasswordManager.mstrPWStor:
                        PasswordManager.mstrPWStor[catagory] = {}
                    PasswordManager.mstrPWStor[catagory][passFor] = passTemp.getPass()
                    break
            ui = getpass("That password was insecure, type a password, or put a number 1 to 3 for a password with that strength to be generated, 1 being super weak, 3 being super strong: ")
Esempio n. 12
0
def test(FLAGS):

    sample_size = FLAGS.eval_size
    z_size = FLAGS.zsize
    cuda = FLAGS.cuda
    g_path = FLAGS.gpath
    d_path = FLAGS.dpath
    map_location = 'cuda' if cuda else 'cpu'

    # Load the models
    dckpt = torch.load(d_path, map_location=map_location)
    gckpt = torch.load(g_path, map_location=map_location)

    D = Discriminator(784, 128, 1)
    G = Generator(100, 32, 784)

    D.load_state_dict(dckpt['state_dict'])
    G.load_state_dict(gckpt['state_dict'])

    # Define some latent vectors
    z = np.random.uniform(-1, 1, size=(sample_size, z_size))
    z = torch.from_numpy(z).float()

    if cuda:
        z = z.cuda()

    # Eval mode
    G.eval()

    rand_images = G(z)

    view_samples(0, [rand_images])
Esempio n. 13
0
    def __init__(self, experiment_name, vizualize, num_epochs, n_observations):
        # Create Experiment name dir for records
        self.experiment_name = experiment_name
        self.n_observations = n_observations

        self.viz = create_viz('{}_{}'.format(
            name_env, self.experiment_name)) if vizualize else None

        self.dataset, self.dataloader, self.device = load_dataset(
            self.viz, folder_name=self.experiment_name)

        self.netG = Generator(ngpu).to(self.device)
        self.netD = Discriminator(ngpu).to(self.device)

        self.start_epoch = self.filehandling_experiment()
        self.num_epochs = num_epochs

        # We create a fixed subset of random for the latent variable, this way we can evauate our progress.
        self.fixed_noise = torch.randn(64, nz, 1, 1, device=self.device)

        # Setup Adam optimizers for both G and D
        self.optimizerD = optim.RMSprop(self.netD.parameters(), lr=lr)
        self.optimizerG = optim.RMSprop(self.netG.parameters(), lr=lr)

        # Fixed noise for visualisation
        self.fixed_noise = torch.randn(64, nz, 1, 1, device=self.device)
Esempio n. 14
0
def main():
    client_generator = Generator(EvenDistribution(8, 12))

    first_queue = []
    second_queue = []

    operators = [
        Operator(first_queue, EvenDistribution(15,
                                               25)),  # самый производительный
        Operator(first_queue, EvenDistribution(30, 50)),
        Operator(second_queue,
                 EvenDistribution(20, 60))  # наименее производительный
    ]

    processors = [
        Processor(first_queue, EvenDistribution(15, 15)),  # ровно 15 минут
        Processor(second_queue, EvenDistribution(30, 30))  # ровно 30 минут
    ]

    total_requests = 300

    t_start = time()
    res = modeling(client_generator, operators, processors, total_requests)

    print('time seconds', time() - t_start)
    for key in res.keys():
        print(key, res[key])

    print('lost', res['lost'] / total_requests)
Esempio n. 15
0
def get_row_key(datetime_str, use_old_generator=True):
    rk_generator = DatetimeKeyGenerator()

    if use_old_generator:
        rk_generator = Generator()
    row_key = rk_generator.generateTimeRowKeyStart(datetime_str)
    return row_key
Esempio n. 16
0
    def testApplyProduction_Blackbox3(self):
        # Another black-box test. This time with a split LHS: A->B,A->C

        input = """
        # Grammar file for testing.

        configuration {
            min_vertices = 4;
        }

        productions {
            # Start graph
            A->B, A->C;

            # Productions
            A->C, A->B ==> A->D->C, A->B;
        }
"""

        gen = Generator()
        f = gen._parseGrammarFile(input)
        logging.debug('start graph is...')
        logging.debug(f.startGraph)

        gen.applyProductions(f.startGraph, f.productions, f.config)

        self.assertEqual(f.startGraph.numVertices, 4)
Esempio n. 17
0
 def setUp(self):
     self.data = open('namesBoys.txt', 'r')
     self.min_name_length = 2
     self.max_name_length = 5
     self.number_of_names = 5
     self.model_order = 2
     self.model = Generator(self.data, self.min_name_length, self.max_name_length, self.model_order, self.number_of_names)
Esempio n. 18
0
    def testApplyProductions(self):
        # Start graph already has the minimum number of vertices. Nothing done.
        g = Graph()
        c = {'min_vertices':0}
        gen = Generator()
        gen.applyProductions(g, None, c)
        self.assertEqual(len(g._vertices), 0)

        # No matching productions raises an error.
        c = {'min_vertices':1}
        self.assertRaises(RuntimeError, gen.applyProductions, g, [], c)

        # When we're done, g has more at least min_vertices.
        g.addEdge(Vertex('g0', 'A'), Vertex('g1', 'A'))
        c = {'min_vertices':10}
        # Production is A1->A2 ==> A1->A->A2
        lhs = Graph()
        lhs.addEdge(Vertex('l0', 'A', 1), Vertex('l1', 'A', 2))
        rhs = Graph()
        rhs.addEdge(Vertex('r0', 'A', 1), Vertex('r1', 'A'))
        rhs.addEdge('r1', Vertex('r2', 'A', 2))
        p = Production(lhs, rhs)
        gen.applyProductions(g, [p], c)
        logging.debug(g)
        self.assertEqual(len(g._vertices), 10)
Esempio n. 19
0
    def process_torrent(self, info):
        if self.log.isEnabledFor(logging.DEBUG):
            self.log.debug('metainfo: %s', pprint.pformat(info))

        self.generator = Generator(info, self.fileFinder, self.media_dirs,
                                   self.dest_dir)
        pieces = StringIO.StringIO(info['pieces'])

        # Iterate through pieces
        last_file_pos = 0
        for piece in self.generator.pieces_generator():
            if self.generator.torrent_corrupted:
                self.log.warning('torrent corrupted: %s', info['name'])
                break
            # Compare piece hash with expected hash
            piece_hash = hashlib.sha1(piece).digest()
            # seek the offset (skip unwanted files)
            if self.generator.new_candidate:
                #save the actual position of pieces corresponding to the
                #0th byte of any relevant file
                last_file_pos = pieces.tell()
            pieces.seek(
                self.generator.get_last_number_of_skipped_pieces() * 20,
                os.SEEK_CUR)

            if piece_hash != pieces.read(20):
                self.generator.corruption()
                pieces.seek(last_file_pos)
        # ensure we've read all pieces
        if pieces.read():
            self.generator.corruption()
Esempio n. 20
0
    def __init__(self):
        self.dataset, self.dataloader, self.device = load_dataset()

        self.netG = Generator(ngpu).to(self.device)
        self.netD = Discriminator(ngpu).to(self.device)

        # Initialise Weights
        self.netG.apply(weights_init)
        self.netD.apply(weights_init)

        # define loss function
        self.criterion = nn.BCELoss()

        # We create a fixed subset of random for the latent variable, this way we can evauate our progress.
        self.fixed_noise = torch.randn(64, nz, 1, 1, device=self.device)

        # Establish convention for real and fake labels during training
        self.real_label = 1
        self.fake_label = 0

        # Setup Adam optimizers for both G and D
        self.optimizerD = optim.Adam(self.netD.parameters(),
                                     lr=lr,
                                     betas=(beta1, 0.999))
        self.optimizerG = optim.Adam(self.netG.parameters(),
                                     lr=lr,
                                     betas=(beta1, 0.999))

        # Fixed noise for visualisation
        self.fixed_noise = torch.randn(64, nz, 1, 1, device=self.device)
Esempio n. 21
0
def sequential_estimate(mean, variance):
    """
    Sample Mean:
    Xbar(n) = Xbar(n-1) + (X(n)-Xbar(n-1))/n
    Sample Variance:
    S(n) = (n-2)/(n-1)*S(n-1) + (X(n)-Xbar(n-1))**2/n 

    Avoid numerical instability
    M(2,n) = sum(X-Xbar)**2
    M(2,n) = M(2,n-1) + (X(n)-Xbar(n-1))*(X(n)-Xbar(n))
    S(n) = M(2,n)/n-1
    var(n) - M(2,n)/n
    """
    estimated_mean = math.inf
    estimated_variance = math.inf
    last_estimated_mean = -math.inf
    M = math.inf
    iteration_idx = 0
    while abs(estimated_mean - last_estimated_mean)>1e-4:
        iteration_idx += 1
        data_point = Generator.univariate_gaussian(mean, variance)
        if iteration_idx==1:
            estimated_mean = data_point
            estimated_variance = 0.0
            M = 0.0
        else:
            last_estimated_mean = estimated_mean
            estimated_mean = estimated_mean + (data_point-estimated_mean)/iteration_idx
            M = M + (data_point-last_estimated_mean)*(data_point-estimated_mean)
            estimated_variance = M/(iteration_idx-1)
        print('Iteration: {}'.format(iteration_idx))
        print('New Data Point: {}'.format(data_point))
        print('Estimated Mean: {}'.format(estimated_mean))
        print('Estimated Variance: {}'.format(estimated_variance))
Esempio n. 22
0
def main():
    firstQueueGroup = [[], [], []]
    secondQueueGroup = [[], []]

    clientGenerator = Generator(UniformDistribution(1, 5), firstQueueGroup)

    operators = [
        Operator(firstQueueGroup[0], secondQueueGroup, UniformDistribution(2, 5)),    
        Operator(firstQueueGroup[1], secondQueueGroup, UniformDistribution(4, 8)),
        Operator(firstQueueGroup[2], secondQueueGroup, UniformDistribution(10, 15))    
    ]

    Attractions = [
        Attraction(secondQueueGroup[0], UniformDistribution(5, 9)),   
        Attraction(secondQueueGroup[1], UniformDistribution(10, 25))   
    ]

    totalVisitors = 300

    tStart = time()
    res = modeling(clientGenerator, operators, Attractions, totalVisitors)

    print('time (secs)', time() - tStart)
    for key in res.keys():
        print(key, res[key])

    print('lost', res['lost'] / totalVisitors)
Esempio n. 23
0
    def testDeleteMissingVertices(self):
        # lhs has no vertices(!). Nothing done.
        g = Graph()
        lhs = Graph()
        rhs = Graph()
        p = Production(lhs, rhs)
        gen = Generator()
        gen._deleteMissingVertices(g, p, {})
        self.assertEqual(len(g._vertices), 0)

        # lhs has vertices, but they all appear in the rhs. Nothing done.
        g.addVertex(Vertex('g0', 'A', 1))
        lhs.addVertex(Vertex('l0', 'A', 1))
        rhs.addVertex(Vertex('r0', 'A', 1))
        p = Production(lhs, rhs)
        gen._deleteMissingVertices(g, p, {'l0':'g0'})
        self.assertEqual(len(g._vertices), 1)

        # lhs has a vertex (A2) that don't appear in the rhs. It should be
        # deleted from g.
        g.addVertex(Vertex('g1', 'A', 2))
        lhs.addVertex(Vertex('l1', 'A', 2))
        p = Production(lhs, rhs)
        self.assertEqual(len(g._vertices), 2)
        gen._deleteMissingVertices(g, p, {'l0':'g0', 'l1':'g1'})
        self.assertEqual(len(g._vertices), 1)
Esempio n. 24
0
def generate(ir_path, vis_path, model_path, index, output_path = None):
	ir_img = imread(ir_path) / 255.0
	vis_img = imread(vis_path) / 255.0
	ir_dimension = list(ir_img.shape)
	vis_dimension = list(vis_img.shape)
	ir_dimension.insert(0, 1)
	ir_dimension.append(1)
	vis_dimension.insert(0, 1)
	vis_dimension.append(1)
	ir_img = ir_img.reshape(ir_dimension)
	vis_img = vis_img.reshape(vis_dimension)


	with tf.Graph().as_default(), tf.Session() as sess:
		SOURCE_VIS = tf.placeholder(tf.float32, shape = vis_dimension, name = 'SOURCE_VIS')
		SOURCE_ir = tf.placeholder(tf.float32, shape = ir_dimension, name = 'SOURCE_ir')
		# source_field = tf.placeholder(tf.float32, shape = source_shape, name = 'source_imgs')

		G = Generator('Generator')
		output_image = G.transform(vis = SOURCE_VIS, ir = SOURCE_ir)
		# D1 = Discriminator1('Discriminator1')
		# D2 = Discriminator2('Discriminator2')

		# restore the trained model and run the style transferring
		saver = tf.train.Saver()
		saver.restore(sess, model_path)
		output = sess.run(output_image, feed_dict = {SOURCE_VIS: vis_img, SOURCE_ir: ir_img})
		output = output[0, :, :, 0]
		imsave(output_path + str(index) + '.bmp', output)
Esempio n. 25
0
    def testApplyProduction(self):
        # A basic test that tests all four cases: add and remove vertex,
        # and add and remove edge.

        # Graph starts with A->B
        g = Graph()
        g.addEdge(Vertex('g0', 'A'), Vertex('g1', 'B'))
        g1 = g._vertices['g1']

        # Production lhs matches A->B
        lhs = Graph()
        lhs.addEdge(Vertex('l0', 'A', 1), Vertex('l1', 'B', 1))

        # Production rhs transforms that to A->C
        rhs = Graph()
        rhs.addEdge(Vertex('r0', 'A', 1), Vertex('r1', 'C'))
        p = Production(lhs,rhs)

        gen = Generator()
        gen._applyProduction(g, p, {'l0':'g0','l1':'g1'})

        # g has a new vertex, <v2,C>.
        self.assertEqual(len(g._vertices), 2)
        self.assertEqual(g._vertices['v1'].label, 'C')

        # <g0,A> points to <v2,C>
        self.assertEqual(len(g._edges['g0']), 1)
        self.assertEqual(g._edges['g0'][0].id, 'v1')
        self.assertEqual(g._vertices['v1'].label, 'C')

        # <g0,A> no longer points to <g1,B>
        self.assertNotIn(g1, g._edges['g0'])

        # Vertex <g1,B> has been deleted.
        self.assertNotIn('g1', g._vertices)
Esempio n. 26
0
    def test_evolution(self):
        generator = Generator()
        card = UnitCard(Unit, 1, 60, 1, 0.01, 0.01)

        generator.evolution(card, 100)

        self.assertEqual(2, card.max_count)
Esempio n. 27
0
def make_model(src_vocab,
               tgt_vocab,
               N=6,
               d_model=512,
               d_ff=2048,
               h=8,
               dropout=0.1):
    "Helper: Construct a model from hyperparameters."
    c = copy.deepcopy
    attn = MultiHeadedAttention(h, d_model)
    ff = PositionwiseFeedForward(d_model, d_ff, dropout)
    position = PositionalEncoding(d_model, dropout)
    model = EncoderDecoder(
        Encoder(EncoderLayer(d_model, c(attn), c(ff), dropout), N),
        Decoder(DecoderLayer(d_model, c(attn), c(attn), c(ff), dropout), N),
        nn.Sequential(Embeddings(d_model, src_vocab), c(position)),
        nn.Sequential(Embeddings(d_model, tgt_vocab), c(position)),
        Generator(d_model, tgt_vocab))

    # This was important from their code.
    # Initialize parameters with Glorot / fan_avg.
    for p in model.parameters():
        if p.dim() > 1:
            nn.init.xavier_uniform_(p)
            """https://zhuanlan.zhihu.com/p/74274453
            #權值初始化 Xavier均勻分佈"""
    return model
Esempio n. 28
0
    def testAddNewVertices(self):
        # Production rhs has no vertices, so nothing done.
        g = Graph()
        lhs = Graph()
        rhs = Graph()
        p = Production(lhs, rhs)
        gen = Generator()
        self.assertEqual(len(g._vertices), 0)
        gen._addNewVertices(g, p, {})
        self.assertEqual(len(g._vertices), 0)
        
        # Production rhs has vertices, but they all appear in the LHS. Hence
        # they aren't new and nothing is done.
        lhs.addVertex(Vertex('l1', 'A', 1))
        rhs.addVertex(Vertex('r1', 'A', 1))
        self.assertEqual(len(g._vertices), 0)
        gen._addNewVertices(g, p, {})
        self.assertEqual(len(g._vertices), 0)

        # rhs has one new vertex not in the lhs.
        rhsMapping = {}
        rhs.addVertex(Vertex('r2', 'B', 2))
        self.assertEqual(len(g._vertices), 0)
        gen._addNewVertices(g, p, rhsMapping)
        self.assertEqual(len(g._vertices), 1)
        self.assertIn('v0', g._vertices)               # new vertex is v0
        self.assertEqual(g._vertices['v0'].label, 'B') # with label B
        self.assertEqual(g._vertices['v0'].number, 2)  # with number 2
        self.assertIn('r2', rhsMapping)                # now appears in rhsMapping
        self.assertEqual(rhsMapping['r2'], 'v0')       # r2 mapped to v0 (the newly added vertex) in graph
Esempio n. 29
0
    def __init__(self):
        torch.random.manual_seed(42)
        self.dim_latent_g = 128
        self.dim_output_g = 28 * 28
        self.dim_hidden_g = 128
        self.dim_output_d = 1
        self.dim_input_d = 28 * 28
        self.batch_size = 64
        self.learning_rate = 1e-4
        self.num_epochs = 50  # default for time saving on colab
        self.display_freq = 50

        self.d_train_iter = 2  # This is possible to have a relation with TTsUR
        self.g_train_iter = 1

        self.dist_latent = torch.distributions.normal.Normal(loc=0, scale=1)  # Gaussian(0, 1)
        self.g = Generator(self.dist_latent, self.dim_latent_g, self.dim_hidden_g, self.dim_output_g).cuda()
        self.d = Discriminator(self.dim_input_d, self.dim_output_d).cuda()

        self.transforms = transforms.Compose(
            [transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,)), ]
        )

        self.dataset = datasets.MNIST(root="dataset/", transform=self.transforms, download=True)
        self.loader = DataLoader(self.dataset, batch_size=self.batch_size, shuffle=True)

        # TTsUR => different learning rates
        self.optimizer_G = optim.Adam(self.g.parameters(), lr=self.learning_rate / 2)
        self.optimizer_D = optim.Adam(self.d.parameters(), lr=self.learning_rate * 2)

        self.criterion = nn.BCEWithLogitsLoss()  # This changed a little bit some loss values
Esempio n. 30
0
def test_disc_loss(max_tests=10):
    z_dim = 64
    gen = Generator(z_dim).to(device)
    gen_opt = torch.optim.Adam(gen.parameters(), lr=lr)
    disc = Discriminator().to(device)
    disc_opt = torch.optim.Adam(disc.parameters(), lr=lr)
    num_steps = 0

    for real, _ in dataloader:
        cur_bath_size = len(real)
        real = real.view(cur_bath_size, -1).to(device)
        # Zero out the gradient before backpropagation
        disc_opt.zero_grad()

        disc_loss = get_disc_loss(gen, disc, criterion, real, cur_bath_size,
                                  z_dim, device)
        assert (disc_loss - 0.68).abs() < 0.05
        # Update the gradients
        disc_loss.backward(retain_graph=True)

        assert gen.gen[0][0].weight.grad is None

        old_weight = disc.disc[0][0].weight.data.clone()
        # Updata optimizer
        disc_opt.step()
        new_weight = disc.disc[0][0].weight.data

        assert not torch.all(torch.eq(old_weight, new_weight))
        num_steps += 1
        if num_steps >= max_tests:
            break
Esempio n. 31
0
    def test_menu(self):
        generator = Generator()
        space = MagicMock()
        generator.init_levels()

        generator.menu(space)

        self.assertTrue(space.clear.called)
Esempio n. 32
0
 def __init__(self, name, head):
     Generator.__init__(self, name, head)
     self.stimulusTimes = []
     self.currentStimulus = 0
Esempio n. 33
0
 def __init__(self, name, head, position=[0, 0, 0], mean=0, stddev=0):
     Generator.__init__(self, name, head, position)
     self.mean = mean
     self.stddev = stddev
Esempio n. 34
0
 def __init__(self):
     Generator.__init__(self, NameGenerator.filenames)
Esempio n. 35
0
    output_file = args['--output']
    ast = None
    try:
        with open(input_file, 'r') as f:
            Parser.build("Program")
            source = f.read()
            ast = Parser.parse(source)
    except IOError:
        print 'Error opening file %s. Please check the file or ' \
              'the directory.' % input_file
        sys.exit(1)

    if ast is None:
        error_list = list(Parser.error_list.keys())
        error_list.sort()
        for key in error_list:
            sys.stdout.write(Parser.error_list[key])
        sys.stdout.flush()
        sys.exit(-1)

    if args['-O']:
        ast = Optimizer.optimize(ast)

    try:
        with open(output_file, 'w') as f:
            f.write(Generator.generate(ast))
    except IOError:
        print 'Error writing to file %s. Please check the file or ' \
              'the directory.' % output_file
        sys.exit(1)
Esempio n. 36
0
 def __init__(self, name, head, position = [0,0,0], frequency = 2, phaseShift = 0):
     Generator.__init__(self, name, head, position)
     self.frequency = frequency
     self.phaseShift = phaseShift
     self.currentPhase = self.phaseShift
     self.lastTimePoint = 0