Пример #1
0
 def clone(self, user, name):
   from clone import Clone
   clone = Clone(name, user, self)
   clone.save()
   with prefix_collections(clone.get_collection_name(), System, Node):
     self.save(force_insert=True)
   with prefix_collections(clone.get_working_copy_collection_name(), System, Node):
     self.save(force_insert=True)
   return clone
Пример #2
0
def in_namespace(target, ns_types, sync=True, *args):
    if any([ns_type not in NAMESPACES.keys() for ns_type in ns_types]):
        raise ValueError('ns_type must be one of {0}'.format(
            ', '.join(NAMESPACES)
        ))

    cl = Clone(target, args, sum(map(lambda x: NAMESPACES[x].flag, ns_types)))
    if sync:
        cl.wait()
    return cl
Пример #3
0
def build_clonelist(datadir,
                    analysisdir,
                    inductiondatadir,
                    pondseasondir,
                    ext=".bmp"):

    # input: paths to data, segmented data and metadata files

    clones = recursivedict()

    inductiondates = load_induction_data(inductiondatadir)
    pond_season_md = load_pond_season_data(pondseasondir)
    manual_scales = load_manual_scales(analysisdir)

    files = os.listdir(datadir)

    for f in files:

        if f.startswith("._"):
            continue

        elif f.endswith(ext) and f.startswith("full_"):

            filebase = f[5:]

            print "Adding " + f + " to clone list"
            imagetype, barcode, clone_id, treatment, replicate, rig, datetime = parse(
                f)

            if barcode is not None:

                if str(barcode) in inductiondates.iterkeys():
                    induction = inductiondates[str(barcode)]
                else:
                    induction = None

                clones[barcode][datetime][imagetype] = Clone(
                    filebase, imagetype, barcode, clone_id, treatment,
                    replicate, rig, datetime, induction,
                    pond_season_md[clone_id]['pond'],
                    pond_season_md[clone_id]['id'],
                    pond_season_md[clone_id]['season'], datadir)

                if imagetype == "close":
                    clones[barcode][datetime][imagetype].pixel_to_mm = 1105.33
                try:
                    clones[barcode][datetime][
                        imagetype].pixel_to_mm = manual_scales[
                            clones[barcode].micro_filepath]
                except (KeyError, AttributeError):
                    pass

    return clones
Пример #4
0
def junctions2cloneset(handle, phred_encoding):
    """Converts a jnc.tsv file to a CloneSet object.
    
    Note, VJ annotation is lost.
    """
    cloneset = CloneSet()
    simple_clones = {}
    with handle:
        header = handle.readline().rstrip()
        assert header == "read_name\tvid\tjid\tnt_jnc\tq_jnc\tjnc_ve\tjnc_js\t\
nt_jnc_gr\tq_jnc_gr\tjnc_ve_gr\tjnc_js_gr\tv_score\tj_score"

        for line in handle:
            read_name, vid, jid, nt_jnc, q_jnc, jnc_ve, jnc_js, nt_jnc_gr,\
                    q_jnc_gr, jnc_ve_gr, jnc_js_gr,\
                    v_score,j_score = line.rstrip().split('\t')

            simple_clone = simple_clones.get(nt_jnc, [0, [0] * len(nt_jnc)])
            simple_clone[0] += 1  # increase clone's count
            q_vec = simple_clone[1]
            for i in xrange(len(nt_jnc)):
                q_vec[i] = max(q_vec[i], ord(q_jnc[i]) - phred_encoding)
            simple_clones[nt_jnc] = simple_clone
    for nt_jnc, (count, q_vec) in simple_clones.iteritems():
        cloneset.add(Clone((nt_jnc, q_vec, count)))
    return cloneset
Пример #5
0
    def records_in(handle):
        def get_fieldnr(names, fields):
            if isinstance(names, basestring):
                names = [names]
            for name in names:
                if name in fields:
                    return fields.index(name)

        cloneset = CloneSet()
        with handle:
            header = handle.readline().rstrip().split("\t")
            nt_jnc_col = get_fieldnr(
                ["nt_jnc", "Junction nucleotide sequence"], header)
            orig_depth_col = get_fieldnr("orig_depth", header)
            depth_col = get_fieldnr("depth", header)
            qual_col = get_fieldnr("qvec", header)

            for line in handle:
                fields = line.rstrip().split("\t")
                nt_jnc = fields[nt_jnc_col]
                orig_depth = 0 if orig_depth_col is None else \
                        int(fields[orig_depth_col])
                depth = int(fields[depth_col])
                qual = map(int, fields[qual_col].split("|"))
                clone = Clone((nt_jnc, qual, depth),
                              refpos=0,
                              orig_count=orig_depth)
                cloneset.add(clone)
        return cloneset
Пример #6
0
def clone_repo():
    repo = request.args.get('repo')
    if repo and repo in REPOSITORIES:
        clone_process = Clone(repo)
        response = jsonify(
            success=True) if not clone_process.errors else clone_process.errors
        return response
Пример #7
0
 def _generate_offspring(self):
     offspring = []
     for position in range(6):
         genes_seen = set()
         double = None
         result = None
         for clone in self.parents:
             gene = clone.get_genes()[position]
             if gene in genes_seen:
                 if double == None:
                     double = gene
                 elif double != gene:
                     if gene in RED_GENES:
                         if double in RED_GENES:
                             double = UNDEFINED_RED
                         else:
                             double = gene
                     elif double not in RED_GENES:
                         double = UNDEFINED_GREEN
                 result = double
             if result == None and gene in RED_GENES:
                 result = gene
             genes_seen.add(gene)
         if result == None:
             result = UNDEFINED_GREEN
         offspring.append(result)
     return Clone(offspring, self)
Пример #8
0
def df_to_clonelist(df, datadir=None):

    # creates a list of Clone objects from dataframe

    clones = recursivedict()
    clf = load_SVM()

    for index, row in df.iterrows():
        clone = Clone(row['filebase'],
                      'full',
                      row['barcode'],
                      row['cloneid'],
                      row['treatment'],
                      row['replicate'],
                      row['rig'],
                      row['datetime'],
                      row['inductiondate'],
                      row['pond'],
                      row['id'],
                      row['season'],
                      datadir,
                      clf=clf)

        for k in row.keys():
            try:
                setattr(clone, k, literal_eval(row[k]))
            except (ValueError, SyntaxError):
                setattr(clone, k, row[k])

        clones[str(row['barcode'])][str(row['datetime'])]['full'] = clone

    return clones
Пример #9
0
def read_from_csv():
    all_clones = CloneStorage()
    clones_file = open("clones.tsv", 'r')
    clones_reader = csv.reader(clones_file, delimiter="\t", quotechar='"')
    for row in clones_reader:
        cloneString = row[0]
        all_clones.add_clone(Clone.fromString(cloneString))
    return all_clones
Пример #10
0
def dfrow_to_clone(df, irow, params, datadir=None):

    row = df.iloc[irow]
    clone = Clone(row['filepath'], **params)

    for k, v in row.iteritems():
        try:
            setattr(clone, k, literal_eval(str(v)))
        except (ValueError, SyntaxError):
            setattr(clone, k, v)

    return clone
Пример #11
0
def main():

    # Construct the argument parser
    ap = argparse.ArgumentParser()

    # Add the arguments to the parser
    ap.add_argument("-u", "--url", required=True, help="Enter the url")
    ap.add_argument("-o", "--output", help="Return the resulting directory")
    ap.add_argument("-d", "--delay", help="Delay the requested time")

    # Get arguments
    args = vars(ap.parse_args())
    url = args['url']
    directory = args['output']
    if args['delay']:
        delay = int(args['delay'])
    else:
        delay = 0

    os.system('clear')

    clone = Clone(url)
    clone.run()
Пример #12
0
def main():
    # Create and connect clone
    clone = Clone()
    clone.subtree = SUBTREE
    clone.connect("tcp://localhost", 5556)
    clone.connect("tcp://localhost", 5566)

    try:
        while True:
            # Distribute as key-value message
            key = "%d" % random.randint(1, 10000)
            value = "%d" % random.randint(1, 1000000)
            clone.set(key, value, random.randint(0, 30))
            time.sleep(1)
    except KeyboardInterrupt:
        pass
Пример #13
0
def main():
    # Create and connect clone
    clone = Clone()
    clone.subtree = SUBTREE.encode()
    clone.connect("tcp://localhost", 5556)
    clone.connect("tcp://localhost", 5566)

    try:
        while True:
            # Distribute as key-value message
            key = b"%d" % random.randint(1,10000)
            value = b"%d" % random.randint(1,1000000)
            clone.set(key, value, random.randint(0,30))
            time.sleep(1)
    except KeyboardInterrupt:
        pass
Пример #14
0
def main():
    all_clones = read_from_csv()
    target = Clone.fromString("GGYYYY")
    find = FindSpecificSmart(all_clones, target, 20)
    find.run()
    find.print()