Exemple #1
0
def encrypt_file(binary):
    temp_binary = [0] * len(binary)
    for i in range(len(binary)):
        temp_binary[i] = int(binary[i])

    key = generate(0.5, 0.5, 3, 0.5, math.pi / 2, len(temp_binary))

    for i in range(len(temp_binary)):
        temp_binary[i] = logical_xor(temp_binary[i], key[i])

    key = generate(0.5, 0.5, 3, 0.5, math.pi / 2, 256)
    for i in range(128):
        key[i] = key[128 + i]

    i = 0
    j = 0
    while (128 * i + j < len(temp_binary)):
        temp_binary[128 * i + j] = int(
            logical_xor(temp_binary[128 * i + j], key[j]))
        j += 1
        if (j >= 128):
            i += 1
            for j in range(128):
                key[j] = int(temp_binary[128 * (i - 1) + j])
            j = 0

    for i in range(len(temp_binary)):
        temp_binary[i] = int(temp_binary[i])
    return temp_binary
Exemple #2
0
def evaluate(data_iter, model,vecs,TEXT,LABELS,criterion,emb_dim):
    model.eval()
    corrects, avg_loss, t5_corrects, rr = 0, 0, 0, 0
    for batch_count,batch in enumerate(data_iter):
        #print('avg_loss:', avg_loss)
        inp, target = batch.text, batch.label
        inp.data.t_()#, target.data.sub_(1)  # batch first, index align
        inp3d = torch.cuda.FloatTensor(inp.size(0),inp.size(1),emb_dim)
        for i in range(inp.size(0)):
          for j in range(inp.size(1)):
            inp3d[i,j,:] = vecs[TEXT.vocab.itos[inp[i,j].data[0]]]
        #if args.cuda:
        #    feature, target = feature.cuda(), target.cuda()

        outp = batch.label.t()
        outp3d = torch.cuda.FloatTensor(outp.size(0),outp.size(1),emb_dim)
        for i in range(outp.size(0)):
          for j in range(outp.size(1)):
            outp3d[i,j,:] = vecs[LABELS.vocab.itos[outp[i,j].data[0]]]

        preds, attns = model(Variable(inp3d),Variable(outp3d,requires_grad=False))
        loss,grad,numcorrect = memoryEfficientLoss(preds, batch.label, model.generate,criterion,eval=True)

        avg_loss += loss

    size = len(data_iter.dataset)
    avg_loss = avg_loss/size
    model.train()
    print("EVAL: ",avg_loss)
    generate(data_iter, model, vecs, TEXT, LABELS, 300)

    return avg_loss#, accuracy, corrects, size, t5_acc, t5_corrects, mrr);
def main():

    choice, alter_pos = greeting_message()

    if (choice == "2"):
        file_address = input("type the address of your file!\n")
        data_str, gen_str = getDataFromFile(file_address)
    elif (choice == "1"):
        data_str = input("enter the data message!\n")
        gen_str = input("enter the generator key!\n")
    else:
        print("wrong choice please try again!")
        choice, alter_pos = greeting_message()

    transmitted_data = ""
    verified_data = ""

    if (alter_pos == -1):
        transmitted_data = generator.generate(data_str, gen_str)
        addDataToFile(transmitted_data)
        verified_data = verifier.verifier(transmitted_data, gen_str)
        print(verified_data, end="\n")
        #print (int("5"))

    elif (alter_pos != -1):
        transmitted_data = generator.generate(data_str, gen_str)
        addDataToFile(transmitted_data)
        altered_transmitted_data = alter.alter(transmitted_data, alter_pos)
        verified_data = verifier.verifier(altered_transmitted_data, gen_str)
        print(verified_data, end="\n")
Exemple #4
0
def load_and_find_results(db_file_name, count, schema, schema_data):
    key = 'fio'
    generate(db_file_name, count, schema, schema_data)
    fp_list = load_fp_from_file(db_file_name)
    query_obj = random.choice(fp_list)
    query = getattr(query_obj, key)
    fp_map = defaultdict(list)
    fp_custom_map_good = HashTable()
    for el in fp_list:
        el.set_hash_type('good')
        fp_map[getattr(el, key)].append(el)
        fp_custom_map_good.add(el)
    print(linear_search(fp_list, key, query))
    print(sort_and_binary_seach(fp_list, key, query))
    print(binary_search(fp_list, key, query))
    print(check_time(fp_map.__getitem__)(query))
    print(check_time(fp_custom_map_good.get)(Hashes.good_hash(query)))
    fp_custom_map_bad = HashTable()
    for el in fp_list:
        el.set_hash_type('bad')
        fp_custom_map_bad.add(el)
    query_obj.set_hash_type('bad')
    print(check_time(fp_custom_map_bad.get)(Hashes.bad_hash(query)))


#load_and_find_results(DATABASE_FILE_NAME, 100, schema_office_worker, data_office_worker)
    def test_function_definition(self):
        tree = parse_statement("function foo()\n    bar = baz\n")
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(
            result["backend"],
            _wrap_back(
                "async function foo() {\n    var bar = baz;\n}\n\nmodule.exports = {\n\tfoo\n};\n"
            ))

        tree = parse_statement("function()\n    bar = baz\n")
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(result["backend"],
                         _wrap_back("async () => {\n    var bar = baz;\n}\n"))

        tree = parse_statement("function foo(bar, baz)\n    bar = baz\n")
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(
            result["backend"],
            _wrap_back(
                "async function foo(bar, baz) {\n    var bar = baz;\n}\n\nmodule.exports = {\n\tfoo\n};\n"
            ))
    def test_variables(self):
        tree = parse_statement("foo = 'abcd'\nbar = foo\n")
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(result["backend"],
                         _wrap_back("var foo = \"abcd\";\nvar bar = foo;\n"))

        tree = parse_statement("foo = 5.45\n")
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(result["backend"], _wrap_back("var foo = 5.45;\n"))

        tree = parse_statement("foo = 5\n")
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(result["backend"], _wrap_back("var foo = 5;\n"))

        tree = parse_statement("foo = 5\nfoo = foo ++\n")
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(result["backend"],
                         _wrap_back("var foo = 5;\nfoo = foo++;\n"))
    def test_function_in_jsx_map_array(self):
        tree = parse_statement(
            "#frontend\n<input\n\tonChange=function(event)\n\t\tfoo()\n\tvalue=\"bar\"\n/>\n"
        )
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(
            result["frontend"],
            _wrap_front(
                "new Component(\"input\", {\n    onChange: async (event) => {\n        await foo();\n    },\n    value: \"bar\",\n\n}, []);\n"
            ))

        tree = parse_statement(
            "foo = {\n\tonChange: function(event)\n\t\tfoo()\n\tvalue: \"bar\"\n"
        )
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(
            result["backend"],
            _wrap_back(
                "var foo = {\n    \"onChange\": async (event) => {\n        await foo();\n    },\n    \"value\": \"bar\",\n}\n"
            ))

        tree = parse_statement(
            "foo = [\n\tfunction(event)\n\t\tfoo()\n\t\"bar\"\n]\n")
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(
            result["backend"],
            _wrap_back(
                "var foo = [\n    async (event) => {\n        await foo();\n    },\n    \"bar\",\n\n];\n"
            ))
Exemple #8
0
def generate():
    files = request.form.getlist("file[]")
    custom = request.form['custom']
    session['custom'] = custom
    valid_files = funcs.get_files()
    print("KD", valid_files)
    files = [x["path"] for x in filter(lambda x: (x['name'] in files or x['required']) and x['state'] == 'ok', valid_files)]
    # return repr(files)
    # return "OK"

    cmd = request.form['cmd']

    if cmd == "generate":
        cnt = generator.generate(files, custom)
        name = "slicer.ini"
        resp = make_response(cnt)
        resp.headers["Content-Disposition"] = "attachment; filename={0}".format(name)
        # return "A"
        return resp
    elif cmd == "preview cfg":
        cnt = generator.generate(files, custom, forSlicer=False)
        return Markup(cnt.replace("\n", "<br/>"))
    elif cmd == "preview":
        cnt = generator.generate(files, custom, forSlicer=True)
        return Markup(cnt.replace("\n", "<br/>"))
Exemple #9
0
def fun_del_recommend(user=None, book_id="", node_id="", relation_id="", want="del_recommend"):
    if user is None:
        return [1, "请登陆!"]
    if book_id == "" or node_id == "" or relation_id == "":
        return [1, "参数错误!"]
    try:
        relation_obj = Relation(_id=relation_id)
        tmp_info = relation_obj.relation_set
        tmp_info_catalog = tmp_info[0]
        tmp_info_article = tmp_info[1]
        catalog_obj = generate(tmp_info_catalog[1], tmp_info_catalog[0])
        article_obj = generate(tmp_info_article[1], tmp_info_article[0])
        limit = catalog_obj.authority_verify(user)
        if want == "del_recommend":
            if test_auth(limit, A_DEL) is False:
                return [1, "您无权操作!"]
            article_type = article_obj.__class__.__name__
            if article_type == "Blog":
                article_obj.remove_from_catalog(catalog_obj, node_id, relation_obj)
            elif article_type == "Catalog":
                article_obj.remove_subcatalog(node_id, relation_obj)
            else:
                return [1, "尚不支持类型的该操作!"]
            return [0, "删除成功!"]
        elif want == "mark_recommend":
            if test_auth(limit, A_MANAGE) is False:
                return [1, "您无权设置!"]
            catalog_obj.spec_blog_to(node_id, relation_obj)
            return [0, "设置成功!"]
        else:
            return [1, "不支持当前操作!"]
    except Exception, err:
        logging.error(traceback.format_exc())
        logging.error("Del relation, id %s" % relation_id)
        return [1, "操作出错!"]
Exemple #10
0
def decode_file(binary):
    temp_binary = [0] * len(binary)
    for i in range(len(binary)):
        temp_binary[i] = int(binary[i])

    key = generate(0.5, 0.5, 3, 0.5, math.pi / 2, len(binary))
    vectorInit = generate(0.5, 0.5, 3, 0.5, math.pi / 2, 256)
    for i in range(128):
        vectorInit[i] = int(vectorInit[128 + i])

    i = 0
    j = 0
    while (128 * i + j < len(temp_binary)):
        temp_binary[128 * i + j] = int(
            logical_xor(temp_binary[128 * i + j], key[128 * i + j]))
        temp_binary[128 * i + j] = int(
            logical_xor(temp_binary[128 * i + j], vectorInit[j]))
        j += 1
        if (j >= 128):
            i += 1
            for j in range(128):
                vectorInit[j] = int(binary[128 * (i - 1) + j])
            j = 0

    for i in range(len(temp_binary)):
        temp_binary[i] = int(temp_binary[i])
    return temp_binary
Exemple #11
0
def init():
    global remaining
    global solved
    global sub_grid
    global lvl
    lvl = 0.4 + 0.1 * mode
    sub_grid = [9] * 9
    generate(lvl)
    remaining = solver.count_zeros(grid)
    solved = generator_solved
    p.init()
    p.display.set_caption('Sudoku with solver')
    p.display.set_icon(p.image.load('icon.png'))
    screen.fill((204, 204, 204))
    draw_grid()
    draw_line()
    set_grid()
    draw_lvls()
    draw_instructions()
    for i in range(9):
        original[i] = grid[i].copy()
    for i in range(9):
        for j in range(9):
            sub_grid[j] -= grid[i].count(j + 1)
    draw_sub_grid()
Exemple #12
0
def main():
    try:
        print "Reading %s" % conf_file_path
        conf_file = open(conf_file_path, "r")
    except:
        print "Could not open configuration file. Exiting..."
        die()
    try:
        print "Parsing configuration file."
        conf = load(conf_file)
    except:
        print "Error parsing the cofiguration file. Exiting..."
        die()
    devices = []
    for device, properties in conf.items():
        k = dev(device, properties)
        devices.append(k)
    if "stop" in sys.argv:
        for device in devices:
            call(["sudo tc qdisc del dev %s root" % device.name], shell=True)
        die()
    print "Generating shell scripts"
    generator.generate(devices, file_directory)
    print "Shell scripts successfully generated."
    print "Checking command exists in cron jobs"
    check_cron()
    for device in devices:
        print "Configuring %s" % device.name
        call(["sudo bash %s/%s.sh" % (file_directory, device.name)],
             shell=True)
        print
    print "Done ! Exiting."
Exemple #13
0
def gen_client (templates):
	for name, value in ops.items():
		if name == 'getspec':
			# It's not real if it doesn't have a stub function.
			continue
		print(generate(templates['cbk'], name, cbk_subs))
		print(generate(templates['cont-func'], name, fop_subs))
		print(generate(templates['fop'], name, fop_subs))
Exemple #14
0
def gen_client (templates):
	for name, value in ops.iteritems():
		if name == 'getspec':
			# It's not real if it doesn't have a stub function.
			continue
		print(generate(templates['cbk'], name, cbk_subs))
		print(generate(templates['cont-func'], name, fop_subs))
		print(generate(templates['fop'], name, fop_subs))
Exemple #15
0
def summon(id):
    myPet = Pet()
    summon = generator.generate('creature')
    myPet.setStats(generator.generate('mc.name')['text'], summon['core'])
    message = generator.extract_text(summon) + ' Its name is {}.'.format(
        myPet.name)
    savePet(myPet, id)
    return message
Exemple #16
0
 def on_any_event(self, event):
     try:
         print("File changed, regenerating")
         os.chdir(self.base_dir)
         generate(self.in_path, self.out_path, self.template_dir)
         os.chdir(self.out_path)
     except:
         traceback.print_exc()
Exemple #17
0
def reset():
    generate(lvl)
    init()
    global won
    global remaining
    global prev_time
    won = False
    remaining = solver.count_zeros(grid)
    prev_time = p.time.get_ticks() // 1000
Exemple #18
0
def gen_defaults():
    for name in ops:
        if name in fd_data_modify_op_fop_template:
            print(generate(FD_DATA_MODIFYING_OP_FOP_CBK_TEMPLATE, name, cbk_subs))
            print(generate(FD_DATA_MODIFYING_RESUME_OP_FOP_TEMPLATE, name, fop_subs))
            print(generate(FD_DATA_MODIFYING_OP_FOP_TEMPLATE, name, fop_subs))
        elif name in loc_stat_op_fop_template:
            print(generate(LOC_STAT_OP_FOP_CBK_TEMPLATE, name, cbk_subs))
            print(generate(LOC_STAT_OP_FOP_TEMPLATE, name, fop_subs))
Exemple #19
0
def main():
    # argparse
    args = get_args()

    # Context Setting
    # Get context.
    from nnabla.ext_utils import get_extension_context
    logger.info("Running in %s" % args.context)
    ctx = get_extension_context(
        args.context, device_id=args.device_id)
    nn.set_default_context(ctx)

    model_path = args.model

    if args.train:
        # Data Loading
        logger.info("Initialing DataSource.")
        train_iterator = facade.facade_data_iterator(
            args.traindir,
            args.batchsize,
            shuffle=True,
            with_memory_cache=False)
        val_iterator = facade.facade_data_iterator(
            args.valdir,
            args.batchsize,
            random_crop=False,
            shuffle=False,
            with_memory_cache=False)

        monitor = nm.Monitor(args.logdir)
        solver_gen = S.Adam(alpha=args.lrate, beta1=args.beta1)
        solver_dis = S.Adam(alpha=args.lrate, beta1=args.beta1)

        generator = unet.generator
        discriminator = unet.discriminator

        model_path = train(generator, discriminator, args.patch_gan,
                           solver_gen, solver_dis,
                           args.weight_l1, train_iterator, val_iterator,
                           args.epoch, monitor, args.monitor_interval)

    if args.generate:
        if model_path is not None:
            # Data Loading
            logger.info("Generating from DataSource.")
            test_iterator = facade.facade_data_iterator(
                args.testdir,
                args.batchsize,
                shuffle=False,
                with_memory_cache=False)
            generator = unet.generator
            generate(generator, model_path, test_iterator, args.logdir)
        else:
            logger.error("Trained model was NOT given.")
Exemple #20
0
def gen_defaults ():
	for name in list(ops.keys()):
		print(generate(FAILURE_CBK_TEMPLATE, name, cbk_subs))
	for name in list(ops.keys()):
		print(generate(CBK_RESUME_TEMPLATE, name, cbk_subs))
	for name in list(ops.keys()):
		print(generate(CBK_TEMPLATE, name, cbk_subs))
	for name in list(ops.keys()):
		print(generate(RESUME_TEMPLATE, name, fop_subs))
	for name in list(ops.keys()):
		print(generate(FOP_TEMPLATE, name, fop_subs))
Exemple #21
0
def gen_defaults():
    for name in ops.iterkeys():
        print generate(FAILURE_CBK_TEMPLATE, name, cbk_subs)
    for name in ops.iterkeys():
        print generate(CBK_RESUME_TEMPLATE, name, cbk_subs)
    for name in ops.iterkeys():
        print generate(CBK_TEMPLATE, name, cbk_subs)
    for name in ops.iterkeys():
        print generate(RESUME_TEMPLATE, name, fop_subs)
    for name in ops.iterkeys():
        print generate(FOP_TEMPLATE, name, fop_subs)
Exemple #22
0
def gen_defaults ():
	for name in ops.iterkeys():
		print generate(FAILURE_CBK_TEMPLATE,name,cbk_subs)
	for name in ops.iterkeys():
		print generate(CBK_RESUME_TEMPLATE,name,cbk_subs)
	for name in ops.iterkeys():
		print generate(CBK_TEMPLATE,name,cbk_subs)
	for name in ops.iterkeys():
		print generate(RESUME_TEMPLATE,name,fop_subs)
	for name in ops.iterkeys():
		print generate(FOP_TEMPLATE,name,fop_subs)
Exemple #23
0
def gen_defaults():
    for name in ops:
        if name in fd_data_modify_op_fop_template:
            print generate(FD_DATA_MODIFYING_OP_FOP_CBK_TEMPLATE, name,
                           cbk_subs)
            print generate(FD_DATA_MODIFYING_RESUME_OP_FOP_TEMPLATE, name,
                           fop_subs)
            print generate(FD_DATA_MODIFYING_OP_FOP_TEMPLATE, name, fop_subs)
        elif name in loc_stat_op_fop_template:
            print generate(LOC_STAT_OP_FOP_CBK_TEMPLATE, name, cbk_subs)
            print generate(LOC_STAT_OP_FOP_TEMPLATE, name, fop_subs)
def gen_defaults():
    for name in list(ops.keys()):
        print(generate(FAILURE_CBK_TEMPLATE, name, cbk_subs))
    for name in list(ops.keys()):
        print(generate(CBK_RESUME_TEMPLATE, name, cbk_subs))
    for name in list(ops.keys()):
        print(generate(CBK_TEMPLATE, name, cbk_subs))
    for name in list(ops.keys()):
        print(generate(RESUME_TEMPLATE, name, fop_subs))
    for name in list(ops.keys()):
        print(generate(FOP_TEMPLATE, name, fop_subs))
    def test_boolean(self):
        tree = parse_statement("foo = false")
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(result["backend"], _wrap_back("var foo = false;\n"))

        tree = parse_statement("foo = true")
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(result["backend"], _wrap_back("var foo = true;\n"))
Exemple #26
0
def korona():
    model = MODELS['andrej_korona']

    tlong = generate(model, items=20, separator=' ')
    tshort = generate(model, items=12, separator='\n', max_chars=140)

    opus = Opus(conf=dict(korona=True), text_long=tlong, text_short=tshort)
    db.session.add(opus)
    db.session.commit()

    return render_template('korona.html', title='Koronavirusy',
                           opus=opus, hits=Opus.hits())
Exemple #27
0
    def export(self):

        """Export to a chips file"""

        dlg = wx.FileDialog(self, "Export", style = wx.FD_SAVE)
        if dlg.ShowModal() == wx.ID_OK:
            filename = dlg.GetPath()
            generator.generate(
                filename,
                self.netlist, 
                self.port_positions, 
                self.wires
            )
Exemple #28
0
def index():
    mixer = int(request.args.get('mixer', 1))
    model = MODEL_MIXER[mixer]

    tlong = generate(model, items=20, separator=' ')
    tshort = generate(model, items=12, separator='\n', max_chars=140)

    opus = Opus(conf=dict(mixer=mixer), text_long=tlong, text_short=tshort)
    db.session.add(opus)
    db.session.commit()

    return render_template('generator.html',
                           title='Hlavně neblábolit',
                           opus=opus,
                           hits=Opus.hits())
Exemple #29
0
def loop_daily_load(start_year, end_year, start_month, end_month, start_day, end_day):
    # Update the database as NASA adds data
    import MakeDailyCsv
    import LoadDailyMysql
    from generator import generate

    year_list = generate(start_year, end_year, 4)
    month_list = generate(start_month, end_month, 2)
    day_list = generate(start_day, end_day, 2)

    for year in year_list:
        for month in month_list:
            for day in day_list:
                MakeDailyCsv.make_daily_csv(year, month, day)
                LoadDailyMysql.Load_Daily_Mysql(year, month, day)
    def test_class_method_call(self):
        tree = parse_statement("foo = bar.baz(\n\n)\n")
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(result["backend"],
                         _wrap_back("var foo = await bar.baz(\n\n);\n"))

        tree = parse_statement("foo = bar.baz.biz.buzz(\n\n)\n")
        processed = process_tree(tree)
        result = generate(processed, "js")
        result = result["code"]
        self.assertEqual(
            result["backend"],
            _wrap_back("var foo = await bar.baz.biz.buzz(\n\n);\n"))
    def test_mysql(self):
        tree = parse_statement(
            "table = Table(\n\t\"table1\"\n\t[\n\t]\n\tTable.SOURCE_MYSQL\n)\n"
        )
        processed = process_tree(tree)
        output = generate(processed, "js")
        imports = output['external_imports']
        self.assertEqual(imports, [{"module": "mysql", "version": "2.18.1"}])

        tree = parse_statement(
            "table = Table(\n\t\"table1\"\n\t[\n\t]\n\tTable.SOURCE_FILE\n)\n")
        processed = process_tree(tree)
        output = generate(processed, "js")
        imports = output['external_imports']
        self.assertEqual(imports, [])
Exemple #32
0
def generator():
    """ This method generates a set of random number that obeys the benford law
        according to the description provided.
    """
    data = ''
    if request.method == 'POST':
        min = request.form['dataMin']
        max = request.form['dataMax']
        dataSize = request.form['dataSize']

        if min == "" or max == "" or dataSize== "":
            #flash('Please fill the data description !')
            return render_template('generate.html')
        else:
            min = int(min)
            max = int(max)
            dataSize = int(dataSize)
            result = gen.generate(max, min, dataSize)
            for k in range(len(result)):
                if data != '':
                    data += ","
                data += str(result[k])

            with open('upload/dataGenerate.csv', 'w') as f:
                f.write(data)
                f.close()
            return render_template('generate.html', result = result)
Exemple #33
0
def multiplicative():
    data = generate(size, ones_p, packet_size)
    # print(data)
    multiplicativeScramble(data)
    broken_packets.append(stats(data, packet_size, ones_p, "multiplicative"))
    # print(data)
    multiplicativeDescramble(data)
Exemple #34
0
 def __afterMove(self):
     self.__print(generator.generate(self.map))
     self.status = judgeEnd.judgeEnd(self.map, self.end)
     if self.status == 2:
         print('\nWin!\n_____________________________')
     elif self.status == 1:
         print('\nGame Over!\n_____________________________')
Exemple #35
0
def negation():
    data = generate(size, ones_p, packet_size)
    # print(data)
    negationScramble(data)
    broken_packets.append(stats(data, packet_size, ones_p, "negation"))
    # print(data)
    negationDescramble(data)
Exemple #36
0
def additive():
    data = generate(size, ones_p, packet_size)
    # print(data)
    additiveScramble(data)
    broken_packets.append(stats(data, packet_size, ones_p, "additive"))
    # print(data)
    additiveDescramble(data)
Exemple #37
0
def write_ink_record(dest_name, start, end):
    filename = "%s.tfrecords" % (dest_name)
    writer = tf.python_io.TFRecordWriter(filename)
    num_sym = 0
    syms = 0
    num_not = 0
    nots = 0

    for i in range(start, end):
        if i % 1000 == 0:
            print(i)

        ink, class_index = generator.generate()

        feature = {
            'class_index':
            tf.train.Feature(int64_list=tf.train.Int64List(
                value=[class_index])),
            'shape':
            tf.train.Feature(int64_list=tf.train.Int64List(
                value=list(ink.shape))),
            'ink':
            tf.train.Feature(float_list=tf.train.FloatList(
                value=ink.flatten()))
        }
        example = tf.train.Example(features=tf.train.Features(feature=feature))
        writer.write(example.SerializeToString())

    writer.close()
Exemple #38
0
def mutate(parent, room_ids, divisor=10):
    classes = sample(parent.keys(), randint(0,
                                            math.ceil(len(parent) / divisor)))
    for i in classes:
        parent[i] = generator.generate(room_ids)

    return parent
Exemple #39
0
def run_generator():
    current_layer = 0
    current_file = 0
    last_master = None
    while True:
        print "Getting pieces..."
        print "Current layer: " + str(current_layer)
        img, pieces = dt.get_pieces(current_layer)
        print "Confirm pieces with (K), (N) to try detection again or (C) to abort"
        key = cv.waitKey()

        if key == ord("k"):
            last_master, dct = gt.generate(pieces, current_layer, last_master)

            with open("Generated/assembly" + str(current_file) + ".json", "w") as file_w:
                json.dump(dct, file_w)

            cv.imwrite("Generated/assembly" + str(current_file) + ".jpg", img)

            json_strgs.append(json.dumps(dct))
            print "File saved, for a new layer press (L), for same layer press (N), to finalize press (X)"
            current_file += 1

            key = cv.waitKey()

            if key == ord("l"):
                current_layer += 1

            if key == ord("x"):
                return True

            continue

        if key == ord("c"):
            return False
Exemple #40
0
def shift():
    data = generate(size, ones_p, packet_size)
    # print(data)
    shiftScramble(data, packet_size)
    broken_packets.append(stats(data, packet_size, ones_p, "shift"))
    # print(data)
    shiftDescramble(data, packet_size)
Exemple #41
0
def gen_defaults():
    for name in ops:
        if name in utime_ops:
            print(generate(FOPS_CBK_COMMON_TEMPLATE, name, cbk_subs))
            print(generate(FOPS_COMMON_TEMPLATE, name, fop_subs))
        if name in utime_read_op:
            print(generate(FOPS_CBK_COMMON_TEMPLATE, name, cbk_subs))
            print(generate(FOPS_READ_TEMPLATE, name, fop_subs))
        if name in utime_write_op:
            print(generate(FOPS_CBK_COMMON_TEMPLATE, name, cbk_subs))
            print(generate(FOPS_WRITE_TEMPLATE, name, fop_subs))
        if name in utime_setattr_ops:
            print(generate(FOPS_CBK_COMMON_TEMPLATE, name, cbk_subs))
            print(generate(FOPS_SETATTR_TEMPLATE, name, fop_subs))
        if name in utime_copy_file_range_ops:
            print(generate(FOPS_CBK_COMMON_TEMPLATE, name, cbk_subs))
            print(generate(FOPS_COPY_FILE_RANGE_TEMPLATE, name, fop_subs))
Exemple #42
0
def gen_cases ():
	code = ""
	for name, value in ops.iteritems():
		if "journal" not in [ x[0] for x in value ]:
			continue
		# Add the CASE fragment for this fop.
		code += generate(fragments["CASE"],name,fop_subs)
	return code
Exemple #43
0
 def stream_pw():
     yield (render_template('result.html', title='Result'))
     yield ('\n<ul class="centeredList">\n')
     for i in range(pw_count):
         curr_pw = generator.generate(pw_length, random)
         # print (curr_pw)
         yield ('  <input class="result" type="text" value=%s readonly onclick="this.select();">\n' % curr_pw)
     yield ('</ul>\n')
Exemple #44
0
def fun_get_node_info(user=None, book=None, node_id="0"):
    res_dict = {
        "book_id": book._id,
        "book_name": book.name,
        "node_id": node_id,
        "node_title": "",
        "node_section": "",
        "node_main": {},
        "node_articles": [],
        "node_catalogs": [],
        "node_main_script": [],
        "node_article_count": 0,
        "node_subcatalog_count": 0,
        "node_spec_count": 0,
    }
    if book is None:
        raise Exception
    node_info = book.get_node_dict(node_id)

    # print node_info.load_all()
    if node_info.load_all() == {}:
        raise Exception
    res_dict["node_title"] = node_info["title"]
    res_dict["node_section"] = node_info["section"]
    res_dict["node_article_count"] = node_info["article_count"]
    res_dict["node_subcatalog_count"] = node_info["subcatalog_count"]
    res_dict["node_spec_count"] = node_info["spec_count"]

    main_article = book.get_node_list(node_id, "main").load_all()
    all_article = book.get_node_list(node_id, "articles").load_all()
    # print 'all article relation ', all_article
    all_catalog = book.get_node_list(node_id, "catalogs").load_all()
    if len(main_article) != 0:
        # get main article, we need get releation, and
        try:
            main_relation = Relation(_id=main_article[-1])
        except Exception, err:
            logging.error(traceback.format_exc())
            logging.error("Relation not exist, %s" % main_article[-1])
        else:
            tmp_obj_and_type = main_relation.relation_set[1]
            AF_Object = generate(tmp_obj_and_type[1], tmp_obj_and_type[0])
            if AF_Object is not None:
                tmp_main_article = {
                    "article_type": AF_Object.__class__.__name__,
                    "article_id": AF_Object._id,
                    "article_view_body": AF_Object.view_body,
                    "article_author_id": AF_Object.author_id,
                    "article_author_name": AF_Object.author_name,
                    "article_group_id": AF_Object.group_id,
                    "article_father_id": AF_Object.father_id,
                    "article_father_type": AF_Object.father_type,
                    "article_release_time": AF_Object.release_time,
                    "article_title": AF_Object.name,
                    "article_relation_id": main_relation._id,
                }
                res_dict["node_main"] = [tmp_main_article]
                res_dict["node_main_script"] = fun_load_code_js(tmp_main_article["article_view_body"])
Exemple #45
0
def generate(cfg,ATTEMPTS,keepgoing=False):
    fails = 0
    while (keepgoing or fails<ATTEMPTS):
        s = generator.generate(cfg)
        if s:return s
        else:fails+=1
        print 'generation failed:',fails,'/',ATTEMPTS
        if fails > 50:break
    return False #did not generate
def gen_functions ():
	code = ""
	for name, value in ops.iteritems():
		if "journal" not in [ x[0] for x in value ]:
			continue
		fop_subs[name]["@FUNCTION_BODY@"] = get_special_subs(value)
		# Print the FOP fragment with @FUNCTION_BODY@ in the middle.
		code += generate(fragments["FOP"], name, fop_subs)
	return code
Exemple #47
0
def gen_fdl ():
	entrypoints = []
	for name, value in ops.iteritems():
		if "journal" not in [ x[0] for x in value ]:
			continue
		len_code, ser_code = get_special_subs(value)
		fop_subs[name]["@LEN_CODE@"] = len_code[:-1]
		fop_subs[name]["@SER_CODE@"] = ser_code[:-1]
		print generate(LEN_TEMPLATE,name,fop_subs)
		print generate(SER_TEMPLATE,name,fop_subs)
		print generate(CBK_TEMPLATE,name,cbk_subs)
		print generate(CONTINUE_TEMPLATE,name,fop_subs)
		print generate(FOP_TEMPLATE,name,fop_subs)
		entrypoints.append(name)
	print "struct xlator_fops fops = {"
	for ep in entrypoints:
		print "\t.%s = fdl_%s," % (ep, ep)
	print "};"
Exemple #48
0
 def post(self):
     
     for i in range(1):
         initial, solution = generator.generate('Extreme')
         while generator.extremeSort(initial) == False:
             initial, solution = generator.generate('Extreme')
             
         latest = Puzzle.all().filter('level =', 'Extreme').order('-number').get()
         if latest:
             number = latest.number + 1
         else:
             number = 1
         
         puzzle = Puzzle()
         puzzle.start = initial
         puzzle.values = solution
         puzzle.number = number
         puzzle.level = 'Extreme'
         puzzle.put()
Exemple #49
0
def loop_daily_load(start_year, end_year, start_month,
                    end_month, start_day, end_day):
    # Update the database as NASA adds data
    import Importdaily
    import LoadDailyMysql
    from generator import generate
    import progressbar
    import pandas as pd
    import datetime
    import numpy

    bar = progressbar.ProgressBar(maxval=365 * 5,
                                  widgets=[
                                      progressbar.Bar('=', '[', ']'),
                                      ' ', progressbar.Percentage()])
    bar.start()
    ii = 1

    year_list = generate(start_year, end_year, 4)
    month_list = generate(start_month, end_month, 2)
    day_list = generate(start_day, end_day, 2)

    geocodes = pd.read_csv('/Users/rsfletch/Geocode3.csv', quotechar='"',
                           names=['lat', 'lon', 'Lvl1', 'Lvl2', 'geo_id'])
    a = numpy.zeros((len(geocodes), 2))
    for year in year_list:
        for month in month_list:
            for day in day_list:
                a[:, 0] = Importdaily.Get_Daily_Rain(year, month, day)
                a[:, 1] = str(datetime.datetime(int(year), int(month),
                                                int(day)))
                b = pd.DataFrame(a)
                table = pd.concat([geocodes, b], axis=1)
                save_str = "/Volumes/" + \
                           "Seagate Backup Plus Drive/" + \
                           "Climate Data/csv/DailyRainfall/" + year + \
                           "_" + month + "_" + day + ".csv"
                table.to_csv(save_str, index=False, header=False)

                LoadDailyMysql.Load_Daily_Mysql(year, month, day)
                bar.update(ii)
                ii = ii + 1
    bar.finish
Exemple #50
0
 def __create_image(self, inpt, hashfun):
     """Creates the avatar based on the input and
     the chosen hash function."""
     if hashfun not in generator.HASHES.keys():
         print ("Unknown or unsupported hash function. Using default: %s"
                % self.DEFAULT_HASHFUN)
         algo = self.DEFAULT_HASHFUN
     else:
         algo = hashfun
     return generator.generate(inpt, algo)
Exemple #51
0
def test_generate(data, regexp_type, expected=None, tail_handler=None,
                  tail_regexp_type=None):
    """`data`: list of strings used to generate the regular expression.
    `regexp_type`: "strict" | "lax"
    `expected`: if provided, string containing the expected regular expression.
    """
    exp = generator.generate(data, regexp_type, tail_handler, tail_regexp_type)
    logging.info(str(data) + " -> " + exp)
    if (None != expected):
        check_equal(expected, exp)
def main():
    from Tkinter import Tk
    from generator import generate

    root = Tk()
    root.title("Polyomino Panel")

    pp = PolyominoPanel(root, generate(5), 3)
    pp.pack()
 
    root.mainloop()  
Exemple #53
0
    def _create_duplicate_options(self, method, truth_values, invalid=False):
        options = self._create_options(method, true_values)
        testing = []
        for field in self.request_template:
            for test in method_mapping[method]:
                try:
                    if field[test] in truth_values:
                        options_dup = copy.copy(options)
                        options_dup[field['name']] = generator.generate(
                            field['type'],
                            invalid=invalid)
                        if 'SLICE_NAME' in options_dup.keys(): # 1. SLICE name must be unique, 2. Created SLICE cannot be deleted
                            options_dup['SLICE_NAME'] = generator.generate('string', invalid=invalid)
                        elif 'SLIVER_INFO_URN' in options_dup.keys(): # SLIVER_INFO_URN must be unique
                            options_dup['SLIVER_INFO_URN'] = generator.generate('urn', invalid=invalid)
                        elif 'PROJECT_NAME' in options_dup.keys(): # PROJECT_NAME must be unique
                            options_dup['PROJECT_NAME'] = generator.generate('string', invalid=invalid)
                        elif 'MEMBER_USERNAME' in options_dup.keys(): # MEMBER_USERNAME must be unique
                            options_dup['MEMBER_USERNAME'] = generator.generate('username', invalid=invalid)
                        elif 'KEY_PUBLIC' in options_dup.keys(): # KEY_PUBLIC must be unique
                            options_dup['KEY_PUBLIC'] = generator.generate('key', invalid=invalid)

                        testing.append(options_dup)
                except KeyError:
                    pass
        return testing
def main():
    conf = CConfiguration()
    conf.pzInputMxs = "/Users/Jing/Workspace/3DMotionDB/input/scenes/LivingRoom02/living_room_02.mxs"
    conf.pzOutputDir = "/Users/Jing/Workspace/3DMotionDB/output/exp030414"
    conf.pzOutConf = "/Users/Jing/Workspace/3DMotionDB/output/exp030414/configuration.xml"

    conf.obsType = CObservationType.TragetCentered
    conf.numbOfCameras = 2
    conf.numbOfTargets = 3
    conf.numbOfViews = 2

    conf.sceneType = CSceneType.BBoxConstrainted
    conf.motionType = CMotionType.Rotation
    conf.minFocalDistance = 0.4

    conf.fps = 24
    conf.xRes = 288
    conf.yRes = 192

    print("begin generation")
    generate(conf)
    print("finish generation")
Exemple #55
0
 def _create_options(self, method, truth_values, urn=None):
     options = {}
     for field in self.request_template:
         for test in method_mapping[method]:
             try:
                 if field[test] in truth_values:
                     if urn and self.name in ['key', 'member'] and field['name'] in ['KEY_MEMBER', 'MEMBER_URN']:
                         options[field['name']] = urn
                     else:
                         options[field['name']] = generator.generate(field['type'])
             except KeyError:
                 pass
     return options
def gen_server (templates):
	fops_done = []
	for name in fop_table.keys():
		info = fop_table[name].split(",")
		kind = info[0]
		flags = info[1:]
		if ("fsync" in flags) or ("queue" in flags):
			flags.append("need_fd")
		for fname in flags:
			print "#define NSR_CG_%s" % fname.upper()
		print generate(templates[kind+"-complete"],name,cbk_subs)
		print generate(templates[kind+"-continue"],name,fop_subs)
		print generate(templates[kind+"-fan-in"],name,cbk_subs)
		print generate(templates[kind+"-dispatch"],name,fop_subs)
		print generate(templates[kind+"-fop"],name,fop_subs)
		for fname in flags:
			print "#undef NSR_CG_%s" % fname.upper()
		fops_done.append(name)
	# Just for fun, emit the fops table too.
	print("struct xlator_fops fops = {")
	for x in fops_done:
		print("	.%s = nsr_%s,"%(x,x))
	print("};")
Exemple #57
0
    def generate_polyominoes(self):
	n = int(self.cell_number_var.get())

        self.elapsed_time_var.set('')
        self.polyomino_number_var.set('')
        self.update_idletasks()

        start = datetime.now()
        polyominoes = generate(n)
        end = datetime.now()

        self.elapsed_time_var.set(end - start)
        self.polyomino_number_var.set(len(polyominoes))
        self.update_idletasks()
Exemple #58
0
def generate():
    chainLength=c.d.chainLength.get() #Get variables
    songLength=c.d.songLength.get()
    filename=c.d.inputFile.get()
    outputPath=c.d.outputFile.get()
    tracknum=c.d.track.get()
    instrument=c.d.instrument.get()
    tempo=c.d.tempo.get()
    inPattern=midi.read_midifile(filename) #check if track is empty
    if isEmptyTrack(tracknum,inPattern):
        tracknum = findFirstNonEmptyTrack()
        c.d.track.set(tracknum)
    generator.generate(chainLength,songLength,filename,outputPath,tracknum,\
    instrument,tempo) #generate song
    outPattern=midi.read_midifile(outputPath) #generate Art
    trk=outPattern[0]
    generator.outputNotes(trk,'Onotes.txt')
    albumArt.createAlbumArt('Onotes.txt')
    pic=Image.open('art.png')
    art=ImageTk.PhotoImage(pic)
    c.d.artWindow=Label(image=art)
    c.d.artWindow.grid(row=3,columnspan=4)
    c.d.artWindow.image=art
Exemple #59
0
def gen_fdl ():
	entrypoints = []
	for name, value in ops.iteritems():
		if "journal" not in [ x[0] for x in value ]:
			continue

		# generate all functions for all the fops
		# except for the ones in selective_generate for which
		# generate only the functions mentioned in the
		# selective_generate table
		gen_funcs = "len,serialize,callback,continue,fop"
		if name in selective_generate:
			gen_funcs = selective_generate[name].split(",")

		len_code, ser_code = get_special_subs(value)
		fop_subs[name]["@LEN_CODE@"] = len_code[:-1]
		fop_subs[name]["@SER_CODE@"] = ser_code[:-1]
		if 'len' in gen_funcs:
			print generate(LEN_TEMPLATE,name,fop_subs)
		if 'serialize' in gen_funcs:
			print generate(SER_TEMPLATE,name,fop_subs)
		if name == 'writev':
			print "#define DESTAGE_ASYNC"
		if 'callback' in gen_funcs:
			print generate(CBK_TEMPLATE,name,cbk_subs)
		if 'continue' in gen_funcs:
			print generate(CONTINUE_TEMPLATE,name,fop_subs)
		if 'fop' in gen_funcs:
			print generate(FOP_TEMPLATE,name,fop_subs)
		if name == 'writev':
			print "#undef DESTAGE_ASYNC"
		entrypoints.append(name)
	print "struct xlator_fops fops = {"
	for ep in entrypoints:
		print "\t.%s = fdl_%s," % (ep, ep)
	print "};"
Exemple #60
0
 def post(self):
     template_name = self.get_argument('template', '')
     profile = self.get_argument('profile', '')
     additional_info = self.get_argument('additional-info', '')
     try:
       profDict = json.loads(profile)
       addinfo_dict = json.loads(additional_info)
       filename = generator.generate(profDict, addinfo_dict, template_name, callback=self.on_generate)
     except Exception as e:
       text = "Error occurred during processing:\n%s\nStacktrace:\n" % e
       text = text + traceback.format_exc()
       to_addr = "*****@*****.**"
       from_addr = "*****@*****.**"
       subject = "[ResumeBoss] [ERROR] Unexpected error"
       mail.mail(text, from_addr, to_addr, subject)
       raise