Esempio n. 1
0
def simple(env):
    env.Load(os.path.join(ENVIRONMENTS_DIR, '2tables.xml'))

    fixed_names = map(get_name, env.GetBodies())
    tables = filter(lambda body: 'table' in get_name(body), env.GetBodies())
    surfaces = map(compute_surface, tables)
    surface_map = {s.name: s for s in surfaces}

    robot = env.GetRobots()[0]
    set_manipulator_conf(robot.GetManipulator('leftarm'), REST_LEFT_ARM)
    open_gripper(robot.GetManipulator('leftarm'))
    set_manipulator_conf(robot.GetManipulator('rightarm'),
                         mirror_arm_config(robot, REST_LEFT_ARM))
    close_gripper(robot.GetManipulator('rightarm'))
    robot.SetDOFValues([.15], [robot.GetJointIndex('torso_lift_joint')])
    set_base_conf(robot, (0, 0, 0))
    robot.SetAffineTranslationLimits(*(2 * np.array([[-1, -1, 0], [1, 1, 0]])))

    objA = box_body(env, 'objA', .07, .05, .2, color=BLUE)
    env.Add(objA)
    while True:
        pose = surface_map['table1'].sample_placement(objA)
        if pose is not None:
            set_pose(objA, pose)
            break

    goal_surfaces = {
        get_name(objA): 'table2',
    }

    movable_names = filter(
        lambda name: name not in fixed_names, map(get_name, env.GetBodies()))

    return RealProblem(movable_names=movable_names,
                       surfaces=surfaces, goal_surfaces=goal_surfaces)
def extract_wallybody(dirpath, anns_path):
    img_prc = ImageProcessing()
    anns = open(anns_path, 'r')

    paths = glob.glob('{}/*.jpg'.format(dirpath))
    names = os.listdir(dirpath)

    # Body 가 들어있는 이미지의 정보를 가져옵니다
    lines = anns.readlines()
    body_dict = {}
    for line in lines[1:]:
        fpath, x1, x2, y1, y2 = line.split(',')
        x1, x2, y1, y2 = map(lambda ele: int(ele.strip()), [x1, x2, y1, y2])
        name = utils.get_name(fpath)
        # first
        if not name in body_dict.keys():
            body_dict[name] = [(x1, x2, y1, y2)]
        else:
            body_dict[name].append((x1, x2, y1, y2))

    fg_imgs_list = []
    bg_imgs_list = []
    fgs = []
    # get wally face list
    for p, path in enumerate(paths):
        name = utils.get_name(path)
        img = np.asarray(Image.open(path).convert('RGB'))
        # extract wally
        if name in body_dict.keys():
            for i, coord in enumerate(body_dict[name]):
                x1, x2, y1, y2 = coord
                fg = img[y1:y2, x1:x2, :]

                fgs.append(fg)
                fg_imgs, fg_coords = img_prc.guarantee_stride_cropping(
                    img, (400, 400), [x1, y1, x2, y2], (25, 25))
                if len(fg_imgs) == 0:
                    print path, x2 - x1, y2 - y1
                else:
                    fg_imgs = img_prc.resize_npImages(fg_imgs, (80, 80))
                    fg_imgs_list.append(fg_imgs)

                img = copy.deepcopy(img)
                # fill rectangle for extract back grounds images
                cv2.rectangle(img, (x1, y1), (x2, y2), (0, 0, 0), -1)

        bg_imgs, bg_coords = img_prc.stride_cropping(img, 200, 200, 400, 400)
        bg_imgs = img_prc.resize_npImages(bg_imgs, (80, 80))
        bg_imgs_list.append(bg_imgs)

    fgs = np.vstack(fg_imgs_list)
    bgs = np.vstack(bg_imgs_list)

    return fgs, bgs
Esempio n. 3
0
def crawler():
    counter = 1
    for url_ref in config.FULL_URLS:
        resp = requests.get(url_ref)
        if resp.status_code == 200:
            _, name = get_name(url_ref)
            # Ensure folder exists
            folter_path = create_folder([config.LYRICS_FOLDER, name])
            # Get all links
            parsed_html = BeautifulSoup(resp.content, features='html.parser')
            lyrics_links = parsed_html.select('.listalbum-item a')
            LOG.info(f"Number of {name.upper()} songs: {len(lyrics_links)}")

            lyric_paths = [extract_link(link) for link in lyrics_links]

            for lyric_path in lyric_paths:

                try:
                    writer, song_name = get_name(lyric_path)
                    if name != writer:
                        alt_folder = create_folder(
                            [config.LYRICS_FOLDER, writer])
                        lyrics_file = alt_folder.joinpath(song_name + '.txt')
                        file_found = lyrics_file.is_file()
                    else:
                        writer = name
                        lyrics_file = folter_path.joinpath(song_name + '.txt')
                        file_found = lyrics_file.is_file()

                    if not file_found:
                        # url = config.BASE_URL + lyric_path
                        text = get_lyrics(lyric_path).strip()
                        LOG.info("Downloading (" + str(counter).zfill(3) +
                                 f") [{writer}]: {song_name}")
                        counter += 1

                        with open(lyrics_file, "w") as f:
                            f.write(text)
                        time.sleep(config.CRAWLER_WAIT +
                                   config.CRAWLER_WAIT * random.random())

                except IndexError:
                    LOG.error(
                        f"Access denied while scraping: {lyric_path} \n"
                        f"Try increasing the waiting time.\n"
                        f"Finishing the scrapping for the moment. Try to access on your browser to unblock access"
                    )
                    return
                except Exception as err:
                    print(f"ERROR: {lyric_path}: {err}")

        else:
            LOG.warning(f"Unable to load: {url_ref}")
Esempio n. 4
0
def run_gcPercent(fasta, twobit, path_to_results):
    utils.check_existence_or_raise(twobit)
    wig = os.path.join(path_to_results,
                       'gc5Base.' + utils.get_name(twobit) + '.wig')
    tmp = utils.atomic_tmp_file(wig)
    params = [
        'hgGcPercent', '-wigOut', '-doGaps', '-win=5', '-file=' + tmp,
        '-verbose=0',
        utils.get_name(fasta), path_to_results
    ]
    subprocess.call(" ".join(params), shell=True)
    utils.atomic_install(tmp, wig)
    return wig
Esempio n. 5
0
def  run_sorting(blat_psl, results) :
    print 'blat_psl is' ,blat_psl
    utils.check_existence_or_raise(blat_psl)
    sorted_psl = os.path.join(results,utils.get_name(blat_psl)+'.sorted.psl')
    tmp = utils.atomic_tmp_file(sorted_psl)
    params = ['sort', '-k 10,10', blat_psl, '>', tmp]
    subprocess.call(" ".join(params), shell=True)
    utils.atomic_install(tmp, sorted_psl)
    sorted_psl_only_entries = os.path.join(results,utils.get_name(blat_psl)+'.sorted.only_entries.psl') 
    tmp = utils.atomic_tmp_file(sorted_psl_only_entries)
    subprocess.call('tail -n +5 '+ sorted_psl + '| head -n -1 > ' + tmp, shell=True)
    utils.atomic_install(tmp, sorted_psl_only_entries)
    return sorted_psl_only_entries
Esempio n. 6
0
def get_sector_index_data():
    kospi, kosdaq = get_index()
    
    kospi_df = pd.DataFrame({
            'index': kospi,
            'name': get_name(kospi)
        })

    kosdaq_df = pd.DataFrame({
            'index': kosdaq,
            'name': get_name(kosdaq)
        })

    df = pd.concat([kospi_df, kosdaq_df])
    make_csv(df, 'sector_index_')
Esempio n. 7
0
def main(args=sys.argv[1:]):
    args, tf_args, generator_args, model_args, train_args, lr_args, nms_args = parse_args(
        args)
    from utils import set_tf_environ
    set_tf_environ(**vars(tf_args))

    import tensorflow as tf
    import keras.backend as K
    from utils import get_session, get_name, record_hyperparameters
    from model import build_model, model_path
    K.set_session(get_session())

    name = get_name(train_args.__dict__.pop('name'))
    log_dir, model_dir = model_path(name)
    print(name)
    writer = tf.summary.FileWriter(log_dir)
    record_hyperparameters(args, writer)

    train_generator, valid_generator = make_generators(**vars(generator_args))
    model = build_model(**vars(model_args), nms_args=vars(nms_args))
    with open('%s/config.yml' % model_dir, 'w') as f:
        f.write(model.to_yaml())
    try:
        train(model,
              train_generator,
              valid_generator,
              name=name,
              writer=writer,
              lr=vars(lr_args),
              **vars(train_args))
    except KeyboardInterrupt:
        pass

    return model, name
Esempio n. 8
0
def hello_world():
    if request.method == 'POST':
        form = request.form
        try:
            if '.sh' in form['curl'] or 'formDesignApi/S/' not in form['curl']:
                flash('注入攻击?不存在的~', 'danger')
            elif form['curl'].startswith('curl') and '|' not in form['curl']:
                curl = form['curl'].replace('--data-raw', '--data')
                name = get_name(curl)
                r.hset(form['time'], name, curl)
                flash('add success', 'success')
                app.logger.info('{} record success'.format(name))
            else:
                flash('别乱输东西好吧= =', 'danger')
        except Exception as e:
            flash('凉凉,建议联系管理员,{}'.format(e), 'danger')

    dic = {
        foo: ','.join(list(r.hgetall(foo).keys()))
        for foo in range(0, 24) if ','.join(list(r.hgetall(foo).keys())) != ''
    }
    return render_template('index.html',
                           success_log=r.hgetall('success'),
                           fail_log=r.hgetall('fail'),
                           info=dic)
Esempio n. 9
0
def active_client(host, port='8021', auth='ClueCon',
                  apps=None):
    '''A context manager which delivers an active `Client` containing a started
    `EventListener` with applications loaded that were passed in the `apps` map
    '''
    client = Client(
        host, port, auth, listener=get_listener(host, port, auth)
    )
    client.listener.connect()
    client.connect()
    # load app set
    if apps:
        for value, app in apps.items():
            client.load_app(app, on_value=value if value else
                            utils.get_name(app))
    # client setup/teardown
    client.listener.start()
    yield client

    # unload app set
    if apps:
        for value, app in apps.items():
            client.unload_app(app)

    client.listener.disconnect()
    client.disconnect()
Esempio n. 10
0
def get_start_end(file):
    print(utils.get_name(file))
    result = {}
    fitfile = FitFile(file)

    # Get all data messages that are of type record
    for i, record in enumerate(fitfile.get_messages("record")):
        # Go through all the data entries in this record
        for record_data in record:
            if not str(record_data.name) in list(result.keys()):
                result[str(record_data.name)] = {}

            result[record_data.name][i] = record_data.value
        result = fill_na(result, i)

    df = pd.DataFrame().from_dict(result)
    good = False
    while not good:
        ax = df.loc[:, ["cadence", "power", "heart_rate"]].plot()
        plt.show()
        start = int(input("start? "))
        end = int(input("end? "))
        ax = df.loc[start:end, ["cadence", "power", "heart_rate"]].plot()
        plt.show()
        good = input("good? (y/n) ") == "y"
    info = utils.get_info(file)
    info["start(sec)"] = str(start)
    info["end(sec)"] = str(end)
    utils.set_info(file, info)
    df.to_csv(file.replace(".fit", ".csv"))
    sns.lmplot(x="power", y="heart_rate", data=df)
Esempio n. 11
0
def cylinder(env):
    from manipulation.bodies.bodies import mesh_cylinder_body

    surfaces = initialize_two_tables(env)
    fixed_names = map(get_name, env.GetBodies())
    surface_map = {s.name: s for s in surfaces}

    #objA = mesh_cylinder_body(env, .03, .1, name='objA', color=BLUE)
    #objA = mesh_cylinder_body(env, .05, .05, name='objA', color=BLUE)
    #objA = mesh_cylinder_body(env, .265/2, .025, name='objA', color=BLUE) # Plate
    objA = mesh_cylinder_body(env, .2 / 2, .025, name='objA', color=BLUE)

    env.Add(objA)
    while True:
        pose = surface_map['table1'].sample_placement(objA)
        if pose is not None:
            set_pose(objA, pose)
            break

    goal_surfaces = {get_name(objA): 'table2'}

    movable_names = filter(lambda name: name not in fixed_names,
                           map(get_name, env.GetBodies()))

    return RealProblem(movable_names=movable_names,
                       surfaces=surfaces,
                       goal_surfaces=goal_surfaces)
Esempio n. 12
0
def apify(filename, tablename=None):
    tablename = tablename or utils.get_name(filename)
    logger.info('Importing {0} to table {1}'.format(filename, tablename))
    utils.drop_table(tablename)
    utils.load_table(filename, tablename)
    utils.index_table(tablename, config.CASE_INSENSITIVE)
    logger.info('Finished importing {0}'.format(filename))
Esempio n. 13
0
 def __init__(self, population_size=0):
     self.pop = []
     for i in range(population_size):
         p = Person(age=int(random.random() * 50),
                    gender=random.choice(['m', 'f']))
         p.set_name(get_name(p.get_gender()))
         self.add_person(p)
Esempio n. 14
0
def active_client(host, port='8021', auth='ClueCon',
                  apps=None):
    '''A context manager which delivers an active `Client` containing a started
    `EventListener` with applications loaded that were passed in the `apps` map
    '''
    client = Client(
        host, port, auth, listener=get_listener(host, port, auth)
    )
    client.listener.connect()
    client.connect()
    # load app set
    if apps:
        for value, app in apps.items():
            client.load_app(app, on_value=value if value else
                            utils.get_name(app))
    # client setup/teardown
    client.listener.start()
    yield client

    # unload app set
    if apps:
        for value, app in apps.items():
            client.unload_app(app)

    client.listener.disconnect()
    client.disconnect()
Esempio n. 15
0
 def __init__(self, parameters=None, models_path=None, model_path=None):
     """
     Initialize the model. We either provide the parameters and a path where
     we store the models, or the location of a trained model.
     """
     if model_path is None:
         assert parameters and models_path
         # Create a name based on the parameters
         self.parameters = parameters
         self.name = get_name(parameters)
         # Model location
         model_path = os.path.join(models_path, self.name)
         self.model_path = model_path
         self.parameters_path = os.path.join(model_path, 'parameters.pkl')
         self.mappings_path = os.path.join(model_path, 'mappings.pkl')
         # Create directory for the model if it does not exist
         if not os.path.exists(self.model_path):
             os.makedirs(self.model_path)
         # Save the parameters to disk
         with open(self.parameters_path, 'wb') as f:
             cPickle.dump(parameters, f)
     else:
         assert parameters is None and models_path is None
         # Model location
         self.model_path = model_path
         self.parameters_path = os.path.join(model_path, 'parameters.pkl')
         self.mappings_path = os.path.join(model_path, 'mappings.pkl')
         # Load the parameters and the mappings from disk
         with open(self.parameters_path, 'rb') as f:
             self.parameters = cPickle.load(f)
         self.reload_mappings()
     self.components = {}
Esempio n. 16
0
 def save(self,path):
     utils.make_dir(path)
     name=utils.get_name(path)
 	for proj_type in DIRS:
 	    utils.make_dir(path+'/'+proj_type)
     for i,frame in enumerate(self.frames):
         frame.save(path,name+str(i))
Esempio n. 17
0
def main():
    print("Image:")
    image = input("")

    features = finding_face_landmark.finding_face_landmark(image)
    if (len(features) == 0):
        exit(0)

    data_file_name = "features.csv"
    X, Y, Q = utils.get_data(data_file_name, 2000)

    x_min, x_max = utils.get_min_max(X)
    X = utils.normalize_features(x_min, x_max, X)

    test_file_name = "test.csv"
    T, P, L = utils.get_data_test(test_file_name, x_min, x_max, len(X), Q, Y)

    model_file_name = './my_test_model.ckpt'
    neural_network = n.Neural_Network(X, Y, model_file_name)
    # neural_network.training()
    # neural_network.test(T,P)

    features = utils.normalize_features(x_min, x_max, features)

    predict = neural_network.predict([features])
    image_path = Q[predict][0].strip()

    metadata = 'C:\\ProjekatSoft\\wiki_crop\\wiki.mat'
    name = utils.get_name(image_path, metadata)

    percent = utils.get_percent(features, X[predict:predict + 1, :15][0])
    utils.show_image('C:\\ProjekatSoft\\wiki_crop\\' + image_path, name,
                     percent)
Esempio n. 18
0
    def reproduce(self):
        reproduce_rate = int(self.size() * 8 / 10)
        reproduce_rateo = 0.4

        males = [
            p for p in self.pop
            if p.get_gender() == 'm' and p.get_age() > 20 and p.get_age() < 60
        ]
        females = [
            p for p in self.pop
            if p.get_gender() == 'f' and p.get_age() > 20 and p.get_age() < 60
        ]
        if (len(males) > 1 and len(females) > 1):
            for i in range(reproduce_rate):
                if (random.random() > reproduce_rateo):
                    male = random.choice(males)
                    female = random.choice(females)
                    baby = Person(get_name().split()[0] + " " +
                                  male.get_name().split()[-1],
                                  0,
                                  gender=random.choice(['m', 'f']))
                    self.add_person(baby)
                    #log.write("{0} is born from {1} and {2}\n".format(baby.get_name(), male.get_name(), female.get_name()))
        else:
            pass
Esempio n. 19
0
 def __init__(self, parameters=None, models_path=None, model_path=None):
     """
     初始化模型,若已有训练好的模型,则读取它的参数和映射
     """
     if model_path is None:
         assert parameters and models_path
         # 模型文件夹以具体的参数命名
         self.parameters = parameters
         self.name = get_name(parameters)
         # 模型路径
         model_path = os.path.join(models_path, self.name)
         self.model_path = model_path
         self.mappings_path = os.path.join(model_path, 'mappings.pkl')
         self.parameters_path = os.path.join(model_path, 'parameters.pkl')
         # 为模型建立文件夹,如果其不存在
         if not os.path.exists(self.model_path):
             os.makedirs(self.model_path)
         # 将参数保存到本地
         with open(self.parameters_path, 'wb') as f:
             cPickle.dump(parameters, f)
     else:
         assert parameters is None and models_path is None
         # 模型路径
         self.model_path = model_path
         self.mappings_path = os.path.join(model_path, 'mappings.pkl')
         self.parameters_path = os.path.join(model_path, 'parameters.pkl')
         # 从本地读取参数和映射
         with open(self.parameters_path, 'rb') as f:
             self.parameters = cPickle.load(f)
         self.reload_mappings()
Esempio n. 20
0
 def __init__(self, parameters=None, models_path=None, model_path=None):
     """
     Initialize the model. We either provide the parameters and a path where
     we store the models, or the location of a trained model.
     """
     if model_path is None:
         assert parameters and models_path
         # Create a name based on the parameters
         self.parameters = parameters
         self.name = get_name(parameters)
         # Model location
         model_path = os.path.join(models_path, self.name)
         self.model_path = model_path
         self.parameters_path = os.path.join(model_path, 'parameters.pkl')
         self.mappings_path = os.path.join(model_path, 'mappings.pkl')
         # Create directory for the model if it does not exist
         if not os.path.exists(self.model_path):
             os.makedirs(self.model_path)
         # Save the parameters to disk
         with open(self.parameters_path, 'wb') as f:
             self.parameters = cPickle.dump(parameters, f)
     else:
         assert parameters is None and models_path is None
         # Model location
         self.model_path = model_path
         self.parameters_path = os.path.join(model_path, 'parameters.pkl')
         self.mappings_path = os.path.join(model_path, 'mappings.pkl')
         # Load the parameters and the mappings from disk
         with open(self.parameters_path, 'rb') as f:
             self.parameters = cPickle.load(f)
         self.reload_mappings()
     self.components = {}
def run(dir, maf, sizes_folder, species_file):
    grimm_synt_input_file = os.path.join(dir, utils.get_name(maf)+'.grimm_synt')
   
    print utils.get_time()
    print 'converting maf to input for grimm...'
    params = ['./generate_input_grimm_synt', maf, sizes_folder, grimm_synt_input_file, species_file]
    subprocess.check_call(params)
    
    print utils.get_time()
    print 'generating anchors for grimm_synt...'
    anchors_folder = os.path.join(dir,'grimm_synt_anchors')
    utils.create_dir_if_not_exists(anchors_folder)
    params = ['grimm_synt', '-A', '-f', grimm_synt_input_file, '-d', anchors_folder]
    subprocess.check_call(params)
    print utils.get_time()
    print 'running grimm_synt...'
    grimm_synt_output = os.path.join(dir,'grimm_synt_output')
    utils.create_dir_if_not_exists(grimm_synt_output)
    #used this for comparing mhc regions
    #params = ['grimm_synt','-f', os.path.join(anchors_folder, 'unique_coords.txt'),'-d',grimm_synt_output, '-m 1000 -g 1000 -c']
    #params = ['grimm_synt','-f', os.path.join(anchors_folder, 'unique_coords.txt'),'-d',grimm_synt_output,'-m', '300000', '-g', '300000', '-c']
    params = ['grimm_synt','-f', os.path.join(anchors_folder, 'unique_coords.txt'),'-d',grimm_synt_output,'-m', '100000', '-g', '100000', '-c']
    subprocess.check_call(params)
    print 'synteny blocks are at',os.path.join(grimm_synt_output,'blocks.txt')
    print utils.get_time()
    print 'creating bigBed files...'
    create_bigBed(grimm_synt_output, sizes_folder, species_file)
    print utils.get_time()
    print 'running grimm...'
    params = ['grimm', '-f', os.path.join(grimm_synt_output,'mgr_macro.txt'), '-o', os.path.join(dir,'grimm.output')]
    subprocess.call(" ".join(params), shell=True)
    print 'grimm output is saved to', os.path.join(dir,'grimm.output')
    print utils.get_time()
    
    print 'done.'
Esempio n. 22
0
    def __init__(self,
                 vocab_size,
                 dim,
                 num_roles,
                 num_fillers,
                 use_bias=True,
                 name=''):
        name = get_name(name, 'HRR_word')
        super(HRRWordEmbedding, self).__init__(vocab_size,
                                               dim,
                                               use_bias=use_bias,
                                               name=name)
        self.num_roles = num_roles
        self.num_fillers = num_fillers

        with tf.variable_scope(name):
            self.r_basis = tf.get_variable('r_basis',
                                           shape=[num_roles, dim],
                                           dtype=tf.float32)
            self.f_basis = tf.get_variable('f_basis',
                                           shape=[num_roles, num_fillers, dim],
                                           dtype=tf.float32)
            self.s = tf.get_variable(
                's',
                shape=[vocab_size, num_roles, num_fillers],
                dtype=tf.float32)
            self.f = tf.reduce_sum(tf.expand_dims(self.s, axis=-1) *
                                   self.f_basis,
                                   axis=2,
                                   name='f')  # size: vs x nr x d
            self.rbf = circular_conv(self.r_basis, self.f,
                                     name='rbf')  # r-bind-f. size: vs x nr x d
            # replace the old weight
            self.weight = tf.reduce_sum(self.rbf, axis=1,
                                        name='weight')  # size: vs x d
Esempio n. 23
0
def apify(filename, tablename=None):
    tablename = tablename or utils.get_name(filename)
    logger.info('Importing {0} to table {1}'.format(filename, tablename))
    utils.drop_table(tablename)
    utils.load_table(filename, tablename)
    utils.index_table(tablename, config.CASE_INSENSITIVE)
    logger.info('Finished importing {0}'.format(filename))
def run_faToBit(fasta, path_to_results):
    utils.check_existence_or_raise(fasta)
    twobit = os.path.join(path_to_results, utils.get_name(fasta)+'.2bit')
    tmp = utils.atomic_tmp_file(twobit)
    params = ['faToTwoBit', '-noMask', fasta, tmp]
    subprocess.call(" ".join(params), shell=True)
    utils.atomic_install(tmp,twobit)
    return twobit
def run_wigToBigWig(fasta, wig, sizes, path_to_results):
    utils.check_existence_or_raise(wig)
    bigWig=os.path.join(path_to_results, utils.get_name(fasta)+'.gc5Base'+'.bw')
    tmp = utils.atomic_tmp_file(bigWig)
    params = ['wigToBigWig', wig, sizes, tmp]
    subprocess.call(" ".join(params), shell=True)
    utils.atomic_install(tmp,bigWig)
    return bigWig
Esempio n. 26
0
    def __init__(self, vocab_size, dim, name=''):
        self.vocab_size = vocab_size
        self.dim = dim

        with tf.variable_scope(get_name(name, 'emb')):
            self.weight = tf.get_variable("weight",
                                          shape=[self.vocab_size, self.dim],
                                          dtype=tf.float32)
Esempio n. 27
0
    def __init__(self, dim, num_roles, name=''):
        self.dim = dim
        self.num_roles = num_roles

        with tf.variable_scope(get_name(name, 'HRR_chunk')):
            self.binder = ChunkBindingLayer(dim, num_roles)
            self.encoder = ChunkEncodingLayer(num_roles)
            self.pred = ChunkPredictionLayer(dim)
Esempio n. 28
0
def run_pslToBigBed(blat_psl, sizes, results):
    utils.check_existence_or_raise(blat_psl)
    blat_bed=os.path.join(results,utils.get_name(blat_psl)+'.bed')
    tmp = utils.atomic_tmp_file(blat_bed)
    params = ['pslToBed', blat_psl, tmp]
    subprocess.call(" ".join(params), shell=True)
    utils.atomic_install(tmp, blat_bed)
    sorted_bed = os.path.join(results,utils.get_name(blat_psl)+'.sorted.bed')
    tmp = utils.atomic_tmp_file(sorted_bed)
    params = ['bedSort', blat_bed, tmp]
    subprocess.call(" ".join(params), shell=True)
    utils.atomic_install(tmp, sorted_bed)
    bigBed = os.path.join(results,utils.get_name(blat_psl)+'.bigBed')
    tmp = utils.atomic_tmp_file(bigBed)
    params = ['bedToBigBed', sorted_bed, sizes, tmp]
    subprocess.call(" ".join(params), shell=True)
    utils.atomic_install(tmp, bigBed)
Esempio n. 29
0
def run_blat(fasta, ncrna, results):
    utils.check_existence_or_raise(fasta)
    blat_psl = os.path.join(results, utils.get_name(fasta) + '.psl')
    tmp = utils.atomic_tmp_file(blat_psl)
    params = ['blat', "-q=rna", fasta, ncrna, tmp]
    subprocess.call(" ".join(params), shell=True)
    utils.atomic_install(tmp, blat_psl)
    return blat_psl
def run_gcPercent(fasta, twobit, path_to_results):
    utils.check_existence_or_raise(twobit)
    wig=os.path.join(path_to_results, 'gc5Base.'+utils.get_name(twobit)+'.wig')	
    tmp = utils.atomic_tmp_file(wig)
    params = ['hgGcPercent', '-wigOut', '-doGaps', '-win=5', '-file='+tmp, '-verbose=0', utils.get_name(fasta), path_to_results]
    subprocess.call(" ".join(params), shell=True)
    utils.atomic_install(tmp,wig)
    return wig
Esempio n. 31
0
def add_weight(file):
    info = utils.get_info(file)
    kg = input("wat is het gewicht van " + utils.get_name(file) + " ? ")

    if str(kg) == "0":
        kg = "Onbekend"
    info["gewicht(kg)"] = kg
    utils.set_info(file, info)
Esempio n. 32
0
def run_faToBit(fasta, path_to_results):
    utils.check_existence_or_raise(fasta)
    twobit = os.path.join(path_to_results, utils.get_name(fasta) + '.2bit')
    tmp = utils.atomic_tmp_file(twobit)
    params = ['faToTwoBit', '-noMask', fasta, tmp]
    subprocess.call(" ".join(params), shell=True)
    utils.atomic_install(tmp, twobit)
    return twobit
Esempio n. 33
0
def read_img_action(path):
    raw_names=utils.get_files(path+"xy/")
    names_size=len(raw_names)
    names=["act"+str(i)+".png" for i in range(names_size)]
    names=[utils.get_name(frame_path) for frame_path in names]
    print(names)
    new_proj=[read_projection_frame(path,name) for name in names]
    return ProjectionAction(new_proj)
Esempio n. 34
0
def run_wigToBigWig(fasta, wig, sizes, path_to_results):
    utils.check_existence_or_raise(wig)
    bigWig = os.path.join(path_to_results,
                          utils.get_name(fasta) + '.gc5Base' + '.bw')
    tmp = utils.atomic_tmp_file(bigWig)
    params = ['wigToBigWig', wig, sizes, tmp]
    subprocess.call(" ".join(params), shell=True)
    utils.atomic_install(tmp, bigWig)
    return bigWig
Esempio n. 35
0
def register_user(addr):
    url = 'http://%s:%s/api/signin' % (addr, utils.get_port())
    headers = {'User-Agent': utils.get_user_agent()}
    user = {"login": utils.get_name(), "password": utils.get_password()}
    r = ensure_success(
        lambda: requests.post(url, headers=headers, data=user, verify=False))

    user["session"] = r.content.decode("UTF-8")
    return user
Esempio n. 36
0
 def __init__(self, vocab_size, dim, use_bias=True, name=''):  #False):
     name = get_name(name, 'tied_emb')
     super(TiedIOEmbedding, self).__init__(vocab_size, dim, name=name)
     self.use_bias = use_bias
     if self.use_bias:
         with tf.variable_scope(name):
             self.bias = tf.get_variable('bias',
                                         shape=[self.vocab_size],
                                         dtype=tf.float32)
Esempio n. 37
0
def gender(file):
    info = utils.get_info(file)
    while "sex" not in info:
        sex = input("what is the sex of " + utils.get_name(file) + "? (m/w) ")
        if sex == "m" or sex == "w":
            info["sex"] = sex
        else:
            print("please enter m or w ")
    utils.set_info(file, info)
Esempio n. 38
0
 def add_task(cls):
     """Add new entry"""
     employee, _ = models.Employee.get_or_create(name=utils.get_name())
     task = models.Task.create(employee=employee,
                               title=utils.get_title(),
                               time=utils.get_time(),
                               notes=utils.get_notes())
     task.show()
     input("The entry has been added. Press enter to return to the menu")
Esempio n. 39
0
 def get_sres_by_cypher(self, qry):
     '''[summary]
         Match/Search subgraph by run cypher.
         Arguments:
             qry {[Str]} -- [The query cypher language]
         Returns:
             [List] -- [a List of Dictionaries(start_node, relationship, end_node)]
     '''
     rs = []
     try:
         r_ori = g.run(qry)
         for r in r_ori:
             for _, v in r.items():
                 _type = get_name(v)
                 if _type in ['Node']:
                     rs.append({
                         'type': 'Node',
                         'labels': list(v.labels),
                         'properties': dict(v),
                     })
                 elif _type in ['Path']:
                     for p in walk(v):
                         _p_type = get_name(p)
                         if _p_type in ['Node']:
                             rs.append({
                                 'type': 'Node',
                                 'labels': list(p.labels),
                                 'properties': dict(p),
                             })
                         else:
                             rs.append({
                                 'type': 'Relationship',
                                 'labels': _p_type,
                                 'properties': dict(p),
                             })
                 else:
                     rs.append({
                         'type': 'Relationship',
                         'labels': _type,
                         'properties': dict(v),
                     })
     except Exception as Err:
         return Err
     return rs
Esempio n. 40
0
 def unload_app(self, ns):
     """Unload all callbacks associated with a particular app
     namespace object
     """
     name = utils.get_name(ns)
     app = self._apps.pop(name)
     finalize = getattr(app, '_finalize', False)
     if finalize:
         try:
             next(finalize)
         except StopIteration:
             pass
     return self.listener.remove_callbacks(app.cid)
def run_filtering(sorted_psl, results) :	
    utils.check_existence_or_raise(sorted_psl)
    stats = os.path.join(results,'filtering_stats')
    utils.create_dir_if_not_exists(stats)
    filtered_psl = os.path.join(results,utils.get_name(sorted_psl)+'.filtered.psl')    
    tmp = utils.atomic_tmp_file(filtered_psl)
    params = ['pslCDnaFilter','-localNearBest=0.1 -ignoreNs -minCover=0.6 -repsAsMatch',
			' -dropped='+os.path.join(stats,'dropped.out'), '-weirdOverlapped='+os.path.join(stats,'weird_overlapped.out'),
				'-alignStats='+os.path.join(stats,'align_stats.out'), '-statsOut='+os.path.join(stats,'overall_stats.out'),
					sorted_psl, tmp]
    subprocess.call(" ".join(params), shell=True)
    utils.atomic_install(tmp, filtered_psl)
    return filtered_psl
Esempio n. 42
0
	def reproduce(self):
		reproduce_rate=int(self.size()*8/10)
		reproduce_rateo=0.4

		males = [p for p in self.pop if p.get_gender()=='m' and p.get_age() > 20 and p.get_age()<60]
		females = [p for p in self.pop if p.get_gender()=='f' and p.get_age() > 20 and p.get_age()<60]
		if(len(males) > 1 and len(females) > 1):
			for i in range(reproduce_rate):
				if(random.random()>reproduce_rateo):
					male=random.choice(males)
					female=random.choice(females)
					baby=Person(get_name().split()[0]+" "+male.get_name().split()[-1], 0, gender=random.choice(['m','f']))
					self.add_person(baby)
					#log.write("{0} is born from {1} and {2}\n".format(baby.get_name(), male.get_name(), female.get_name()))
		else:
			pass
Esempio n. 43
0
File: tasks.py Progetto: 18F/autoapi
def apify(filename, tablename=None):
    try:
        filenames = glob.glob(filename, recursive=True)
    except TypeError:  # recursive glob in Python 3.5+ only
        filenames = glob.glob(filename)
    if len(filenames) > 1 and tablename:
        raise Exception("Can't specify a `tablename` for >1 file")
    for filename in filenames:
        _tablename = tablename or utils.get_name(filename)
        logger.info('Importing {0} to table {1}'.format(filename, _tablename))
        try:
            utils.drop_table(_tablename)
        except sa.exc.OperationalError as e:
            logger.debug('DROP TABLE {} failed, may not exist?'.format(
                _tablename))
            logger.debug(str(e))
        try:
            utils.load_table(filename, _tablename)
        except Exception as e:
            logger.error('Failed to load table from file {}'.format(filename))
            logger.error(str(e))
        logger.info('Finished importing {0}'.format(filename))
Esempio n. 44
0
def registration(request):
    openid = request.session.get('openid', None)
    next = request.GET.get(REDIRECT_FIELD_NAME, '')
    if not openid or openid is None:
        return HttpResponseRedirect(u"%s?%s" % (reverse('oid_login'), urllib.urlencode({REDIRECT_FIELD_NAME: urlquote(next)})))
    pinitial = openid.sreg
    logger.debug(openid.sreg)
    logger.debug(openid.ax_resp)
    if openid.ax_resp:
        for k, v in openid.ax_resp.items():
            if not pinitial.get(k):
                pinitial[k] = v
    logger.debug(u"Окончательные данные \n %s" % pinitial)
    initial = {}
    for k, v in pinitial.items():
        initial[get_name(k)] = v
    if request.method == 'POST':
        form = OpenidRegisterForm(request.POST)
    else:
        form = OpenidRegisterForm(initial)
        
    if form.is_valid():
        user = User.objects.create_user(form.cleaned_data['username'], form.cleaned_data['email'])
        user.backend = "authopenid.backends.OpenIDBackend"
        if user is not None:
            uassoc = UserAssociation(openid_url=str(openid), user_id=user.id)
            uassoc.save(send_email=False)
            cauth.login(request, user)
            return HttpResponseRedirect(urlquote(next))
    return render_to_response('authopenid/registration.html',
        {
            'form': form,
            REDIRECT_FIELD_NAME: next
         },
        request
    )
Esempio n. 45
0
	def __init__(self, name=get_name(), age=0, genes=[], gender=None):
		self.name=name
		self.age=age
		self.genes=genes
		self.gender=gender
Esempio n. 46
0
    def load_app(self, ns, on_value=None, **prepost_kwargs):
        """Load annotated callbacks and from a namespace and add them
        to this client's listener's callback chain.

        :param ns: A namespace-like object containing functions marked with
            @event_callback (can be a module, class or instance).
        :params str on_value: id key to be used for registering app callbacks
            with `EventListener`
        """
        listener = self.listener
        name = utils.get_name(ns)
        app = self._apps.get(name, None)
        if not app:
            # if handed a class, instantiate appropriately
            app = ns() if isinstance(ns, type) else ns
            prepost = getattr(app, 'prepost', False)
            if prepost:
                args, kwargs = utils.get_args(app.prepost)
                funcargs = tuple(weakref.proxy(getattr(self, argname))
                                 for argname in args if argname != 'self')
                ret = prepost(*funcargs, **prepost_kwargs)
                if inspect.isgenerator(ret):
                    # run init step
                    next(ret)
                    app._finalize = ret

            # assign a 'consumer id'
            cid = on_value if on_value else utils.uuid()
            self.log.info("Loading call app '{}' for listener '{}'"
                          .format(name, listener))
            icb, failed = 1, False
            # insert handlers and callbacks
            for ev_type, cb_type, obj in marks.get_callbacks(app):
                if cb_type == 'handler':
                    # TODO: similar unloading on failure here as above?
                    listener.add_handler(ev_type, obj)

                elif cb_type == 'callback':
                    # add default handler if none exists
                    if ev_type not in listener._handlers:
                        self.log.info(
                            "adding default session lookup handler for event"
                            " type '{}'".format(ev_type)
                        )
                        listener.add_handler(
                            ev_type,
                            listener.lookup_sess
                        )
                    added = listener.add_callback(ev_type, cid, obj)
                    if not added:
                        failed = obj
                        listener.remove_callbacks(cid, last=icb)
                        break
                    icb += 1
                    self.log.debug("'{}' event callback '{}' added for id '{}'"
                                   .format(ev_type, obj.__name__, cid))

            if failed:
                raise TypeError("app load failed since '{}' is not a valid"
                                "callback type".format(failed))
            # register locally
            self._apps[name] = app
            app.cid, app.name = cid, name

        return app.cid
Esempio n. 47
0
	def __init__(self, population_size=0):
		self.pop = []
		for i in range(population_size):
			p=Person(age=int(random.random()*50), gender=random.choice(['m','f']))
			p.set_name(get_name(p.get_gender()))
			self.add_person(p)
Esempio n. 48
0
def read_im_action(path):
    names=utils.get_files(path+"xy/")
    names=[utils.get_name(frame_path) for frame_path in names]
    frames=[read_frame(path,name) for name in names]
    return FinalAction(frames)
Esempio n. 49
0
def read_final_action(path):
    name=utils.get_name(path)
    print(name)
    proj_action=proj.read_img_action(path+"/",False)
    frames=[FinalFrame(fr.projections) for fr in proj_action.frames]
    return FinalAction(name,frames)
Esempio n. 50
0
 def get_filename(self):
     value = self.get("uri")
     try:
         return os.path.splitext(utils.get_name(value))[0]
     except:
         return value
Esempio n. 51
0
from django.contrib.sites.models import Site
from django.shortcuts import get_object_or_404
from django.conf import settings

from features import get_model
from decorators import as_json
from utils import obj_to_dict, get_name


DEFAULT_API_FIELDS = (
    'id',
    'title',
    'status',
    'description',
    ('site', lambda site: site.id),
    ('people', lambda user: get_name(user)),
    'is_ready',
    'progress',
    'is_launched',
    'is_stopped',
)


class JsonViewMeta(type):

    def __new__(cls, name, bases, attrs):
        new = super(JsonViewMeta, cls).__new__(cls, name, bases, attrs)
        new.__call__ = as_json(new.__call__)
        return new

Esempio n. 52
0
from django.conf.urls import url

from music import views
from music.models import *

from utils import get_name

multi_models = [Language, Role, MusicOpusType, OpusType, VideoType]
single_models = [Artist, Opus, Music]

urlpatterns = []


# Generic URLs
for mod in multi_models:
    obj = get_name(mod)
    objects = get_name(mod, plural = True)
    urlpatterns.extend([
        url( r'^' + objects + r'/$' , views.multi_edit , {'Model' : mod }, name = objects + "_edit" ),
        url( r'^' + objects + r'/(?P<id>\d+)/$' , views.multi_delete , {'Model' : mod }, name = obj + "_del"  ),
        url( r'^' + objects + r'/(?P<id>\d+)/merge/$' , views.multi_merge , {'Model' : mod }, name = obj + "_merge" ),
        ])

for mod in single_models:
    obj = get_name(mod)
    objects = get_name(mod, plural = True)
    function = 'music.views.' + obj
    urlpatterns.extend([
        url( r'^' + objects + r'/$' , function + '_list' , name = objects + '_list' ),
        url( r'^' + objects + r'/new/$' , function + '_new' , name = obj + '_new' ),
        url( r'^' + objects + r'/delete/$' , function + '_delete' , name = obj + '_delete' ),