示例#1
0
文件: aldo.py 项目: AlbertCalmus/aldo
def improvise(message):
    print(" >>> " + message.from_user.first_name + " : " + message.text)
    if "📝" in message.text:
        db = Db(sqlite3.connect('booba.db'), Sql())
        generator = Generator('booba', db, Rnd())
        msg = generator.generate(' ') + "\n" + generator.generate(' ')
        bot.send_message(message.chat.id, msg)
示例#2
0
def generate(name, count):
    db = Db(sqlite3.connect(name + '.db'), Sql())
    generator = Generator(name, db, Rnd())
    markov_text = []
    for i in range(0, count):
        markov_text.append(generator.generate(WORD_SEPARATOR))

    return markov_text
示例#3
0
def pontificate(bot, trigger):
    print "Generating sentence..."
    markLen = [int(x) for x in trigger.split() if x.isdigit()]

    db = Db(sqlite3.connect(NAME + '.db'), Sql())
    gen = Generator(NAME, db, Rnd())

    for i in range(0, DEFAULT_LENGTH):
        bot.say(gen.generate(WORD_SEPARATOR))
示例#4
0
文件: gen.py 项目: jk-/gen
def main():
    generator = Generator()

    data = util.file_contents('app.gen')
    instructions = data.split("\n")

    for instruction in instructions:
        generator.add_instruction(instruction)

    generator.process()
示例#5
0
 def genMaze(self, size, alg, ID, name):
     size = int(size)
     maze = Generator(name, size, size)
     if alg == 'RB':
         time2gen = maze.reveseBacktracking()
     elif alg == 'BT':
         time2gen = maze.BinaryTree()
     else:
         return 'invalid input'
     maze.drawMaze()
     maze.saveMaze(time2gen)
示例#6
0
    def __init__(self, f, phase):
        self._file = f
        self._phase = phase
        self.success = True

        self.output = GenericOutput(f.name)

        self.lex = Scanner(f)
        self.fork = TokenForkWrapper(self.lex, self.output.token)
        self.syn = Parser(prodcution_handler=self.output, error_handler=self.output)
        self.sem = SemanticAnalyzer(output=self.output)
        self.gen = Generator()
def generate_music(saved_dictionary):
    vocab_stoi = saved_dictionary['vocab']
    vocab_itos = saved_dictionary['vocab_itos']
    gen = Generator(len(vocab_stoi), 64)
    gen.load_state_dict(saved_dictionary['model_state_dict'])
    gen.eval()
    # store the ABC as a string
    song = sample_sequence(gen,
                           vocab_stoi,
                           vocab_itos,
                           500,
                           0.6,
                           output_file=False,
                           print_out=False)
    #write the abc string into a file for conversions
    with open("./tempABC/song.abc", 'w') as writer:
        writer.write(song)
示例#8
0
    def test_generated_sequence_is_correct(self):
        self.db.count_values = [
            OrderedDict([('the', 2), ('a', 1)]),
            OrderedDict([('mat', 1), ('cat', 1)]),
            OrderedDict([('sat', 2)]),
            OrderedDict([('on', 1), ('under', 4)]),
            OrderedDict([('my', 2), ('the', 2)]),
            OrderedDict([('mat', 1), ('cat', 1)]),
            OrderedDict([('$', 1)])
        ]

        self.rnd.vals = [1, 2, 2, 1, 4, 1, 1]

        self.assertEqual(
            Generator('name', self.db, self.rnd).generate(' '),
            'the cat sat on the mat')
        self.assertEqual(
            self.db.get_word_count_args,
            [['^'], ['the'], ['cat'], ['sat'], ['on'], ['the'], ['mat']])
        self.assertEqual(self.rnd.maxints, [3, 2, 2, 5, 4, 2, 1])
示例#9
0
def MarkovGenerator(sentence_list, depth, db_filepath, db=None, rnd=None):
    """Generator that generates new sentences from a list of sentences.
	Arguments:
		sentence_list		List of strings, each being a single sentence to learn from
		depth				Depth of analysis at which to build the Markov chain
		db_filepath			Path to file where sqlite database will be stored
		db (optional)		Db object (for mocking in unit tests)
		rnd (optional)		Rnd object (for mocking in unit tests)"""
    if not db:
        db = Db(sqlite3.connect(db_filepath), Sql())
        db.setup(depth)
    if not rnd:
        rnd = Rnd()

    parser = Parser(DB_NAME, db, SENTENCE_SEPARATOR).parse_list(sentence_list)
    generator = Generator(DB_NAME, db, rnd)
    while True:
        sentence = generator.generate(WORD_SEPARATOR).strip()
        if len(sentence) == 0:
            continue  # avoid generating the empty string
        else:
            yield sentence
示例#10
0
def load_model():
    netG = Generator(1, feature_size).to(device)
    chkpt = torch.load("models/gan_model_2/_1_netG_37.pth")
    netG.load_state_dict(chkpt)
    netG.eval()
    return netG
示例#11
0
from condition import *
from customer import Customer
from gen import Generator

# Inputs - change as necessary
CUSTOMER_BANK_ACCOUNT_NUMBER = "13659275"
CUSTOMER_BANK_SORT_CODE = "23-05-33"
CUSTOMER_PREVIOUS_PINS = ["1948", "4729", "6758", "3648"]

# customer and generator construction
c = Customer(CUSTOMER_BANK_ACCOUNT_NUMBER, CUSTOMER_BANK_SORT_CODE,
             CUSTOMER_PREVIOUS_PINS)
gen = Generator()

# Add more conditions as necessary
gen.add_cond(MoreThan2Consec())
gen.add_cond(ConsecSeq())
gen.add_cond(PrevPins(depth=3))
gen.add_cond(InAccNum())
gen.add_cond(InSortCode())

# running the generator
print(gen.gen(c))
示例#12
0
文件: app.py 项目: dewmal/lyrics_gen
                sent = nlp(sent)
                song_vec_sents.append(sent.vector)
        except:
            print(song)
        song_vec_sents = np.array(song_vec_sents)

        return song_vec_sents

    def __len__(self):
        return source_limit  # len(self.df)


dt_x = LyricsDataSet(lyrics).__getitem__(50)
print(dt_x.shape)

netG = Generator(1, feature_size).to(device)
netD = Descriminator(1, feature_size).to(device)

print(netG)
print(netD)

# criterion
bce = nn.BCELoss()

# optimizers
optimizerG = optim.Adam(netG.parameters(), lr=learning_rate, betas=(beta_1, 0.999))
optimizerD = optim.Adam(netD.parameters(), lr=learning_rate, betas=(beta_1, 0.999))

# misc
real_labels = torch.ones(batch_size, device=device)
fake_labels = torch.zeros(batch_size, device=device)
示例#13
0
from rnd import Rnd
import sys
import sqlite3
import codecs

SENTENCE_SEPARATOR = '_'
WORD_SEPARATOR = ' '

if __name__ == '__main__':
    args = sys.argv
    usage = 'Usage: %s (parse <name> <depth> <path to txt file>|gen <name> <count>)' % (
        args[0], )

    if (len(args) < 3):
        db = Db(sqlite3.connect('booba' + '.db'), Sql())
        generator = Generator('booba', db, Rnd())
        print(generator.generate(WORD_SEPARATOR))
        print(generator.generate(WORD_SEPARATOR))

    else:
        mode = args[1]
        name = args[2]

        if mode == 'parse':
            if (len(args) != 5):
                raise ValueError(usage)

            depth = int(args[3])
            file_name = args[4]

            db = Db(sqlite3.connect(name + '.db'), Sql())
示例#14
0
def generate(number, name):
    count = number
    db = Db(sqlite3.connect(name + '.db'), Sql())
    generator = Generator(name, db, Rnd())
    for i in range(0, count):
        yield generator.generate(WORD_SEPARATOR)
示例#15
0
def create_tweet():
	db = Db(sqlite3.connect(NAME + '.db'), Sql())
	generator = Generator(NAME, db, Rnd())
	tweet_candidate = generator.generate(WORD_SEPARATOR)
	return tweet_candidate if check_length(tweet_candidate)  else create_tweet()
示例#16
0
import h5py

filename = "TestICs.0.hdf5"
n_DM = int(1e5)
n_gas = int(1e5)
M_halo = 1e12
M_disk = 1e10
R_NFW = 20
c_NFW = 40
R_gas = 10
max_gas = 30
Z_gas = 2
G = 4.302e-6  # kpc Ms km/s

print("Generating particles, this may take some time")
gen = Generator(n_DM, n_gas, M_halo, M_disk, R_NFW, c_NFW, R_gas, max_gas,
                Z_gas, G)

print("Writing to HDF5")
op = h5py.File(filename, 'w')

print("Writing head")
write_head(op, [n_gas, n_DM, 0, 0, 0, 0],
           [M_disk / n_gas, M_halo / n_DM, 0, 0, 0, 0],
           0,
           z=1)
print("Writing gas")
write_block(op, 0,
            np.array([gen.gas_x, gen.gas_y, gen.gas_z]).T,
            np.array([gen.gas_v_x, gen.gas_v_y, gen.gas_v_z]).T,
            np.arange(0, n_gas))
print("Writing dm")
c_nfw = float(nfw['NFWc'])
max_gas = float(gas['MaxGas'])
max_star = float(star['MaxStar'])

ic_filename = config['GadgetFiles']['InitCondFile'] + ".hdf5"
ul_cm = float(config['Units']['UnitLength_in_cm'])
um_g = float(config['Units']['UnitMass_in_g'])
uv_cms = float(config['Units']['UnitVelocity_in_cm_per_s'])

G = 6.674e-8 * (1 / (ul_cm * (uv_cms**2) / um_g))

# Actually generate particles

print("Generating Particles...")
print("Gas...")
gen_gas = Generator(0, n_gas, M_halo, M_gas, R_nfw, c_nfw, R_gas, max_gas,
                    Z_gas, G)
print("Stars...")
gen_star = Generator(0, n_star, M_halo, M_star, R_nfw, c_nfw, R_star, max_gas,
                     Z_gas, G)

print("Writing IC File... {}".format(ic_filename))

op = h5py.File(ic_filename, 'w')
write_head(op, [n_gas, 0, 0, 0, n_star, 0],
           [M_gas / n_gas, 0, 0, 0, M_star / n_star, 0],
           0,
           z=1)

# Gas
write_block(op, 0,
            np.array([gen_gas.gas_x, gen_gas.gas_y, gen_gas.gas_z]).T,
示例#18
0
文件: natrix.py 项目: imihajlow/ccpu
        for _ in range(6):  # TODO add a flag to check if tree was modified
            t = CastTransformer().transform(t)
            t = SubscriptTransformer().transform(t)
            t = MemberAccessTransformer().transform(t)
            t = SizeofExprTransformer().transform(t)
            t = ConstTransformer(True).transform(t)

        lt = LiteralTransformer(ni)
        t = lt.transform(t)
        if args.tree:
            print()
            print("Tree after transform:")
            print(t.pretty())
        cg = CallGraph()
        cg.visit(t)
        g = Generator(cg, lt, ni, backend, not args.no_subsections, lit,
                      args.stack)
        args.o.write(format(g.generate(t)))
        args.o.write("\n")
    except NatrixError as e:
        file, line = lit.translatePosition(e.position)
        sys.stderr.write("Error: {}:{}: {}\n".format(file, line, e.msg))
        sys.exit(1)
    except VisitError as e:
        file, line = lit.translatePosition(e.orig_exc.position)
        sys.stderr.write("Error: {}:{}: {}\n".format(file, line,
                                                     e.orig_exc.msg))
        sys.exit(1)
    except LarkError as e:
        file, line = lit.translateLine(e.line)
        sys.stderr.write("Syntax error in {}:{} {}\n".format(
            file, line, str(e)))
示例#19
0
        args[0], )

    if (len(args) < 3):
        raise ValueError(usage)

    mode = args[1]
    name = args[2]

    if mode == 'parse':
        if (len(args) != 5):
            raise ValueError(usage)

        depth = int(args[3])
        file_name = args[4]

        db = Db(sqlite3.connect(name + '.db'), Sql())
        db.setup(depth)

        txt = codecs.open(file_name, 'r', 'utf-8').read()
        Parser(name, db, SENTENCE_SEPARATOR, WORD_SEPARATOR).parse(txt)

    elif mode == 'gen':
        count = int(args[3])
        db = Db(sqlite3.connect(name + '.db'), Sql())
        generator = Generator(name, db, Rnd())
        for i in range(0, count):
            print(generator.generate(WORD_SEPARATOR))

    else:
        raise ValueError(usage)
示例#20
0
 def __init__(self, cmd_input):
     self.cli = CLI(cmd_input)
     self.generator = Generator()
     self.fill_options()