Beispiel #1
0
def start_supervisor():
    """Start supervisor process."""
    conf = pjoin(cget('service_dir'), 'supervisor', 'config',
        'supervisord.conf')
    pname = cget('supervisor_process_id')
    show(yellow("Starting supervisor with id: %s." % pname))
    return sudo('supervisord --configuration="%s"' % conf)
Beispiel #2
0
def ProcessDepFields(depfields,res,rtype,mode='real'):
    if len(depfields.comps.__dict__) == 0:
        return res
    else:
        oldlength = len(depfields.comps.__dict__)
        todelete = []
        for l in depfields.comps.__dict__:
            Resolved = ComputeDepType(res,depfields.comps.__getattribute__(l),rtype.poss)
            #print(show(Resolved))
            if 'not a label' in show(Resolved):  #Is this condition still used?
                pass
            elif Resolved == None:
                pass
            else:
                if mode=='real':
                    res.addfield(l,Resolved.in_poss(rtype.poss).create())
                    todelete.append(l)
                elif mode=='hyp':
                    res.addfield(l,Resolved.create_hypobj())
                    todelete.append(l)
                else:
                    print(mode+' not recognized as option for ProcessDepFields')
        for l in todelete:
            del depfields.comps.__dict__[l]
        if len(depfields.comps.__dict__) < oldlength:
            return ProcessDepFields(depfields,res,rtype,mode)
        else:
            if ttracing('create') or ttracing('create_hypobj'):
                print('Unresolved dependency in '+show(rtype))
            return None
Beispiel #3
0
def start_supervisor(conf=None):
    """Start supervisor process."""
    if not conf:
        conf = pjoin(cget("service_dir"), "supervisor", "config", "supervisord.conf")
    pname = cget("supervisor_process_id")
    show(yellow("Starting supervisor with id: %s." % pname))
    return sudo('supervisord --configuration="%s"' % conf)
def ensure_language(dbname, lang):
    """Ensures language exists."""
    sql_command = """
    CREATE OR REPLACE FUNCTION create_language_{0}() RETURNS BOOLEAN AS \$\$
        CREATE LANGUAGE {0};
        SELECT TRUE;
    \$\$ LANGUAGE SQL;

    SELECT CASE WHEN NOT
        (
            SELECT  TRUE AS exists
            FROM    pg_language
            WHERE   lanname = '{0}'
            UNION
            SELECT  FALSE AS exists
            ORDER BY exists DESC
            LIMIT 1
        )
    THEN
        create_language_{0}()
    ELSE
        FALSE
    END AS {0}_created;

    DROP FUNCTION create_language_{0}();
    """.format(
        lang
    )
    show(colors.yellow("Ensuring PostgreSQL language exists: %s"), lang)
    call_psql(sql_command, database=dbname)
Beispiel #5
0
def ti_apply(Tf, Targ):
    if isinstance(Tf, FunType) \
        and Targ.subtype_of(Tf.comps.domain):
        return Tf.comps.range
    else:
        if ttracing('ti_apply'):
            print('Not a well-typed function application: '+ show(Tf) + show(Targ))
        return None
Beispiel #6
0
def create_virtualenv():
    """Creates the virtualenv."""
    ve_dir = cget("virtualenv_dir")
    bin_path = pjoin(ve_dir, "bin")
    if not dir_exists(bin_path) or not exists(pjoin(bin_path, "activate")):
        show(yellow("Setting up new Virtualenv in: %s"), ve_dir)
        with settings(hide("stdout", "running")):
            run("virtualenv --distribute %s" % ve_dir)
Beispiel #7
0
 def __init__(self,T,a):
     self.comps = Rec({'base_type':T, 'obj':a})
     self.witness_cache = []
     self.supertype_cache = []
     self.witness_conditions = [lambda x: show(x) == show(a) and T.query(x),\
                                lambda x: isinstance(a,LazyObj)\
                                          and show(x) == show(a.eval()) and T.in_poss(self.poss).query(x)]
     self.witness_types = []
     self.poss = ''
Beispiel #8
0
    def summary(self,*vars):
        """Display a summary of all recorded notes, checks, records."""

        if vars:
            print("Values Used:")
            print("============")
            print()
            show(*vars,depth=1)
            
        var = self.var
        hd = 'Summary of'
        if var is not None:
            hd += ' '+var+' for '
        hd += self.__class__.__name__
        if self.title:
            hd += ': '+str(self.title)
        
        print()
        print(hd)
        print('=' * len(hd))
        print()
        if self._notes:
            print('Notes:')
            print('------')
            for txt in self._notes:
                print('    -',txt)
            print()
            
        if self._checks:
            print('Checks:')
            print('-------')
            width = max([len(l) for f,l,v,d in self._checks])
            for chk in self._checks:
                print(self.fmt_check(chk,width=width+2))
            print()
                
        hd = 'Values'
        if self.var:
            hd += ' of '+self.var
        hd += ':'
        print(hd)
        print('-'*len(hd))
        width = max([len(l) for l,v,d in self._record])
        
        govval = None
        if var:
            govval = self.selector([d[var] for l,v,d in self._record])
        for rec in self._record:
            print(self.fmt_record(rec,var=var,width=width+1,govval=govval,nsigfigs=self.nsigfigs))

        if govval is not None:
            print()
            h = 'Governing Value:'
            print('   ',h)
            print('   ','-'*len(h))
            print('      ','{0} = {1}'.format(var,(sfrounds(govval,self.nsigfigs) if isfloat(govval) else "{0!r}".format(govval))), self.units if self.units is not None else '')
Beispiel #9
0
def merge_dep_types(f1,f2):
    if isinstance(f1,Type) and isinstance(f1,Type):
        return f1.merge(f2)
    elif isinstance(f1,Fun) and isinstance(f2,Fun):
        var = gensym('v')
        return Fun(var, f1.domain_type.merge(f2.domain_type),
                   merge_dep_types(f1.body.subst(f1.var,var),f2.body.subst(f2.var,var)))
    else:
        if ttracing('merge_dep_types'):
            print(show(f1)+' and '+show(f2)+' cannot be merged.')
        return None
Beispiel #10
0
def shutdown():
    """Requests supervisor process and all controlled services shutdown."""
    ve_dir = cget("virtualenv_dir")
    activate = pjoin(ve_dir, "bin", "activate")
    show(yellow("Shutting supervisor down."))
    with prefix("source %s" % activate):
        with settings(hide("stderr", "stdout", "running"), warn_only=True):
            res = run_supevisordctl("shutdown all")
            if res.return_code != 2:
                msg = "Could not shutdown supervisor, process does not exists."
                show(yellow(msg))
Beispiel #11
0
def subtype_of_dep_types(f1,f2):
    if isinstance(f1,Type) and isinstance(f2,Type):
        return f1.subtype_of(f2)
    elif isinstance(f1,Fun):
        f1inst = f1.app(f1.domain_type.create_hypobj())
        return subtype_of_dep_types(f1inst,f2)
    elif isinstance(f2,Fun):
        f2inst = f2.app(f2.domain_type.create_hypobj())
        return subtype_of_dep_types(f1,f2inst)
    else:
        if ttracing('subtype_of_dep_types'):
            print(show(f1)+ ' and '+show(f2)+' cannot be compared for subtyping.')
        return None
Beispiel #12
0
def configure():
    """Configures the doc module.

    Creates doc_dir and copies documentation sources there.
    Formats and uploads scripts for building the documentation.
    Finally, loads django-sphinxdoc project from a fixture.

    Project fixture can be found in deployment/files/doc/fixture.json.

    """
    # Add extra context variables used by sphinx-doc and apidoc
    excluded = cget('autodoc_excluded_apps')
    excluded = ' '.join(excluded) if excluded else ''
    cset("autodoc_excluded_apps", excluded, force=True)
    cset("project_display_name_slug", slugify(cget('project_display_name')))

    # Asure the doc folder exists
    user = cget('user')
    ddir = cget('doc_dir')
    source_doc_dir = pjoin(cget("project_dir"), "code", "docs")
    create_target_directories([ddir], "755", user)

    # Delete the content of the folder if exist
    with settings(hide("running", "stdout", "stderr")):
        output = sudo('ls -A {doc_dir}'.format(doc_dir=ddir))
        if len(output) > 0:
            sudo("rm -r {doc_dir}/*".format(doc_dir=ddir))

    # Copy files to doc dir
    with settings(hide("running", "stdout")):
        run("cp -r {source}/* {dest}".format(source=source_doc_dir, dest=ddir))
    ensure_permissions(ddir, user=user, group=user, recursive=True)

    context = dict(env["ctx"])

    # Upload formatted build script
    scripts = ['make_apidoc.sh']
    local_dir = local_files_dir("doc")
    show(yellow("Uploading doc scripts: {0}.".format(' '.join(scripts))))
    for script_name in scripts:
        source = pjoin(local_dir, script_name)
        destination = pjoin(cget("script_dir"), script_name)
        upload_template_with_perms(source, destination, context, mode="755")

    # Upload formatted conf.py file
    show(yellow("Uploading formatted conf.py file."))
    conf_file = "conf_formatted.py"
    source = pjoin(cget("local_root"), "docs", 'source', conf_file)
    destination = pjoin(ddir, 'source', conf_file)
    upload_template_with_perms(source, destination, context, mode="755")
Beispiel #13
0
 def appc_m(self,arg,M):
     if self.validate_arg_m(arg,M):
         return self.app(arg)
     else:
         if ttracing('appc_m'):
             print (self.show()+'('+show(arg)+'): badly typed function application')
         return None            
Beispiel #14
0
 def nu(self,assgn):
     if self.fixed_nu is None:
         res = PType_n(andpred,[self.comps.left,self.comps.right],assgn) # Meet types are treated neurologically as neurological PTypes not neurological Meet types
         res.name = show(self)+'_n'
         return res
     else:
         return self.fixed_nu
Beispiel #15
0
def occupancies_ref(ks,q):
    def remove(xs,x):
        xs_new = xs[:]
        xs_new.remove(x)
        return xs_new
    Z = float(sum(falling_fac(q,n)*esp(ks,n) for n in range(q+1)))
    print "Z:",Z
    return [(q*k*sum(show(falling_fac(q-1,n)*esp(remove(ks,k),n)) for n in range(q)))/Z for k in ks]
Beispiel #16
0
def build():
    """Creates the documentation files and adds them to the database.

    The first step is to create automatic module documentation using apidoc.

    Next the documentation is built and added to the database using
    updatedoc management command from django-sphinxdoc.

    See doc/readme.rst for more details.

    Apidoc generating script resides in deployment/files/doc/make_apidoc.sh.
    Note: This script is project-specific.

    """
    show(yellow("Bulding documentation"))
    apidoc_script = pjoin(cget("script_dir"), "make_apidoc.sh")
    with settings(hide("running", "stdout"), warn_only=True):
        run(apidoc_script)
Beispiel #17
0
def combine_dep_types(f1,f2):
    if isinstance(f1,Type) and isinstance(f2,Type):
        return f1.merge(f2)
    elif isinstance(f1,Fun) and isinstance(f2,Type):
        var = gensym('v')
        return Fun(var, f1.domain_type, combine_dep_types(f1.body.subst(f1.var,var),f2))
    elif isinstance(f1,Type) and isinstance(f2,Fun):
        var = gensym('v')
        return Fun(var, f2.domain_type, combine_dep_types(f1,f2.body.subst(f2.var,var)))
    elif isinstance(f1,Fun) and isinstance(f2,Fun):
        var1 = gensym('v')
        var2 = gensym('v')
        return Fun(var1, f1.domain_type,
                   Fun(var2, f2.domain_type, combine_dep_types(f1.body.subst(f1.var,var1),f2.body.subst(f2.var,var2))))
    else:
        if ttracing('combine_dep_types'):
            print(show(f1)+' and '+show(f2)+' cannot be combined.')
        return None
Beispiel #18
0
 def type(self):
     if self.oplist[1] == '@':
         if 'types' in [x for x in dir(self.oplist[0])
                        if x in dir(self.oplist[2])]:
             return ti_apply(self.oplist[0].types[0],
                             self.oplist[2].types[0])
         else:
             pass
     else:
         print('Unable to compute type of ' + show(self))
         return None
Beispiel #19
0
def configure():
    """Upload supervisor configuration files."""
    user = cget("user")
    # settings directories
    sdir = cset("supervisor_dir", pjoin(cget("service_dir"), "supervisor"))
    slogdir = cset("supervisor_log_dir", pjoin(cget("log_dir"), "supervisor"))
    cset("supervisor_process_base", cget("project_name").replace("-", "_"))
    cset("supervisor_process_id", "%s%s" % (cget("supervisor_process_base"), "_supervisor"))
    # create all dirs and log dirs
    dirs = ["", "config", cget("project_name")]
    dirs = [pjoin(sdir, l) for l in dirs]
    log_dirs = ["", cget("project_name"), "child_auto", "solr"]
    log_dirs = [pjoin(slogdir, l) for l in log_dirs]
    create_target_directories(dirs + log_dirs, "700", user)

    context = dict(env["ctx"])
    local_dir = local_files_dir("supervisor")
    dest_dir = pjoin(sdir, "config")

    confs = cget("supervisor_files")
    show(yellow("Uploading service configuration files: %s." % confs))
    for name in confs:
        source = pjoin(local_dir, name)
        destination = pjoin(dest_dir, name)
        if isdir(source):
            upload_templated_folder_with_perms(source, local_dir, dest_dir, context, mode="644", directories_mode="700")
        else:
            upload_template_with_perms(source, destination, context, mode="644")

    scripts = [
        "supervisorctl.sh",
        "supervisord.sh",
        "rabbitmq.sh",
        "celery-worker.sh",
        "supervisord-services.sh",
        "supervisorctl-services.sh",
    ]
    for script_name in scripts:
        source = pjoin(cget("local_root"), "deployment", "scripts", script_name)
        destination = pjoin(cget("script_dir"), script_name)
        upload_template_with_perms(source, destination, context, mode="755")
Beispiel #20
0
 def pathvalue(self, path):
     splits=deque(path.split("."))
     if (len(splits) == 1):
         if splits[0] in dir(self):
             return self.__getattribute__(splits[0])
         else:
             if ttracing('pathvalue'):
                 print(splits[0]+' not a label in '+self.show())
             return None
     else:
         addr = splits.popleft()
         if addr not in dir(self):
             if ttracing('pathvalue'):
                 print('No attribute '+addr+' in '+show(self))
             return None
         elif 'pathvalue' not in dir(self.__getattribute__(addr)):
             if ttracing('pathvalue'):
                 print('No paths into '+show(self.__getattribute__(addr)))
             return None
         else:
             return self.__getattribute__(addr).pathvalue(".".join(splits))
def main_likelihood_experiment():
    for tf in Escherichia_coli.tfs:
        print tf
        sites = getattr(Escherichia_coli,tf)
        tols = [10**-i for i in range(7)]
        print "mono"
        mono_lls = [show(mono_likelihood(sites,tol=show(tol))) for tol in tols]
        print "di"
        di_lls = [show(di_likelihood(sites,tol=show(tol))) for tol in tols]
        plt.close()
        plt.plot(tols,mono_lls,label="Mono")
        plt.plot(tols,di_lls,label="Di")
        plt.xlabel("Tolerance")
        plt.ylabel("Log Likelihood")
        plt.semilogx()
        plt.legend()
        #plt.title("Mono- vs. Di-nucleotide Log-Likelihood in %s sites" % tf)
        fmt_string = "%s, site length:%s,num sites:%s,motif ic:%1.2f" % (tf,len(sites[0]),len(sites),motif_ic(sites))
        plt.title(fmt_string)
        plt.savefig("%s_mono_vs_di_ll_w_pseudocount.png" % tf,dpi=300)
        plt.close()
Beispiel #22
0
 def query(self, a):
     if a in self.witness_cache: return True
     elif isinstance(a,HypObj) and show(self) in showall(a.types):
         return True
     elif isinstance(a,HypObj) and forsome(a.types,
                                           lambda T: show(self) in showall(T.supertype_cache)):
         return True
     elif isinstance(a, LazyObj):
         if isinstance(a.eval(), LazyObj):
             return a.eval().type().subtype_of(self)
         else:
             return self.query(a.eval())
     elif forsome(self.witness_types, lambda T: T.in_poss(self.poss).query(a)):
         self.witness_cache.append(a)
         return True
     else: 
         if some_condition(self.witness_conditions,a):
             self.witness_cache.append(a)
             return True
         else:
             return False
Beispiel #23
0
 def nu(self,assgn):
     if self.fixed_nu is None:
         res = PType_n(orpred,[self.comps.left,self.comps.right],assgn) # Join types are treated neurologically as neurological PTypes not neurological Meet types
         res.name = show(self)+'_n'
         # def join_judgmnt_type_n(s,an):
         #     T1 = MeetType_n(or_n.nu.pred,Ta)
         #     T2 = MeetType_n(s,InhibitType_n(Ta))
         #     return StringType_n([T1,T2])
         # res.judgmnt_type_n = join_judgmnt_type_n.__get__(self.nu,self.nu.__class__)
         return res
     else:
         return self.fixed_nu
Beispiel #24
0
 def show(self):
     s = ""
     for kvp in self.comps.__dict__.items():           
         if s == "":
             s = s + kvp[0] + " : "
         else:
             s = s + ", "+kvp[0] + " : "
         
         if(isinstance(kvp[1], RecType)):
              s = s + kvp[1].show()                
         else:
             s = s + show(kvp[1]) 
     return "{"+s+"}"
def hessian_experiment(trials=1000):
    sigma0 =   1
    mu0    = -10
    Ne0    =   5
    L0     =  10
    n0     =  50
    # n,L are fixed; vary sigma, mu, Ne.
    ic0 = sample_ic(sigma0,mu0,Ne0,L0,n0)
    epsilon = 0.01
    f = lambda x,y,z: show((sample_ic(sigma0+x,mu0+y,Ne0+z,L0,n0,trials=trials)-ic0)**2)
    hessian = compute_hessian(f,(sigma0,mu0,Ne0),epsilon=0.1)
    hessian2 = compute_hessian(f2,(sigma0,mu0,Ne0),epsilon=0.1)
    lambs, vs = np.linalg.eig(hessian)
    return lambs, vs
Beispiel #26
0
def configure():
    """Upload supervisor configuration files."""
    user = cget('user')
    # settings directories
    sdir = cset('supervisor_dir', pjoin(cget('service_dir'), 'supervisor'))
    slogdir = cset('supervisor_log_dir', pjoin(cget('log_dir'), 'supervisor'))
    cset("supervisor_process_base", cget('project_name').replace('-', '_'))
    cset("supervisor_process_id",
        '%s%s' % (cget('supervisor_process_base'), '_supervisor'))
    # create all dirs and log dirs
    dirs = ['', 'config', cget('project_name')]
    dirs = [pjoin(sdir, l) for l in dirs]
    log_dirs = ['', cget('project_name'), 'child_auto', 'solr']
    log_dirs = [pjoin(slogdir, l) for l in log_dirs]
    create_target_directories(dirs + log_dirs, "700", user)

    context = dict(env["ctx"])
    local_dir = local_files_dir("supervisor")
    dest_dir = pjoin(sdir, 'config')

    confs = cget("supervisor_files")
    show(yellow("Uploading service configuration files: %s." % confs))
    for name in confs:
        source = pjoin(local_dir, name)
        destination = pjoin(dest_dir, name)
        if isdir(source):
            upload_templated_folder_with_perms(source, local_dir, dest_dir,
                context, mode="644", directories_mode="700")
        else:
            upload_template_with_perms(
                source, destination, context, mode="644")

    scripts = ['supervisorctl.sh', 'supervisord.sh']
    for script_name in scripts:
        source = pjoin(cget("local_root"), 'deployment', 'scripts', script_name)
        destination = pjoin(cget("script_dir"), script_name)
        upload_template_with_perms(source, destination, context, mode="755")
Beispiel #27
0
def reload(conf=None):
    """Start or restart supervisor process."""
    ve_dir = cget("virtualenv_dir")
    activate = pjoin(ve_dir, "bin", "activate")
    show(yellow("Reloading supervisor."))
    with prefix("source %s" % activate):
        with settings(hide("stderr", "stdout", "running"), warn_only=True):
            res = run_supevisordctl("reload", conf=conf)
            if res.return_code != 0:
                show(yellow("Supervisor unavailable, starting new process."))
                res = start_supervisor(conf=conf)
                if res.return_code != 0:
                    show(red("Error starting supervisor!."))
Beispiel #28
0
def update_virtualenv():
    """Updates virtual Python environment."""
    ve_dir = cget("virtualenv_dir")
    activate = pjoin(ve_dir, "bin", "activate")
    user = cget("user")
    cache = cget("pip_cache")

    show(yellow("Updating Python virtual environment."))
    show(green("Be patient. It may take a while."))

    for req in cget('pip_requirements'):
        requirements = pjoin(remote_files_dir('requirements'), req)
        show(yellow("Processing requirements file: %s" % requirements))
        with settings(warn_only=True, sudo_prefix=SUDO_PREFIX):
            with prefix("source %s" % activate):
                sudo("pip install --no-input --download-cache=%s"
                    " --requirement %s --log=/tmp/pip.log" % (
                        cache, requirements), user=user)

    show(yellow("Linking python-q4 to local virtualenv."))
    for package in ['PyQt4', 'sip.so']:
        copy_to_virtualenv(package)
Beispiel #29
0
def update_virtualenv():
    """Updates virtual Python environment."""
    ve_dir = cget("virtualenv_dir")
    activate = pjoin(ve_dir, "bin", "activate")
    cache = cget("pip_cache")

    show(yellow("Updating Python virtual environment."))
    show(green("Be patient. It may take a while."))

    for req in cget('pip_requirements'):
        requirements = pjoin(remote_files_dir('requirements'), req)
        show(yellow("Processing requirements file: %s" % requirements))
        with settings(warn_only=True):
            with prefix("source %s" % activate):
                run("pip install --no-input --download-cache=%s"
                    " --requirement %s --log=/tmp/pip.log" %
                    (cache, requirements))
Beispiel #30
0
 def show(self):
     return '^'.join([show(i) for i in self.comps.types])
Beispiel #31
0
                    o = keras.layers.Concatenate(axis=-1)(os)

                o = keras.layers.Conv2D(filters=filter_num * layer_dict[l], kernel_size=3, strides=1, padding="SAME")(o)
                o = keras.layers.BatchNormalization()(o)
                o = keras.layers.LeakyReLU(0.2)(o)

                o_dict[l] = o
    if version == "v1":
        o = keras.layers.Conv2D(256, kernel_size=3, strides=1, padding="SAME")(o_dict[img_shape[0]])
    elif version == "v2":
        os = []
        for l, o in o_dict.items():
            o_ = keras.layers.Conv2DTranspose(filters=filter_num * layer_dict[l], kernel_size=3, strides=img_shape[0]//l, padding="SAME")(o)
            o_ = keras.layers.BatchNormalization()(o_)
            o_ = keras.layers.LeakyReLU(0.2)(o_)
            os.append(o_)
        o = keras.layers.Concatenate(axis=-1)(os)
        o = keras.layers.Conv2D(256, kernel_size=3, strides=1, padding="SAME")(o)
    else:
        raise Exception("wrong version")
    return keras.Model(inputs=inputs, outputs=o)


dataset = process_numpy(img_data, seg_data, batch_size=8)
model = get_model()
model.summary()
keras.utils.plot_model(model, show_shapes=True)
model.compile(optimizer=keras.optimizers.Adam(learning_rate=1e-4), loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True), metrics=CustomIoU(num_classes=256))
model.fit(dataset, epochs=200)
show(seg_data[:16], model(img_data[:16]))
Beispiel #32
0
 def show(self):
     return '\n' + self.name + ':\n' + '_' * 45 + '\n' + '\n'.join([
         show(i) + ': ' + show(self.model[i].witness_cache)
         for i in self.model
     ]) + '\n' + '_' * 45 + '\n'
Beispiel #33
0
def main(args):

    parser = Parser(args.grammar, args.expand_binaries)
    print(
        'Grammar rules:',
        f'{parser.grammar.num_lexical_rules:,} lexical,',
        f'{parser.grammar.num_unary_rules:,} unary,',
        f'{parser.grammar.num_binary_rules:,} binary.'
    )

    if args.infile:

        print(f'Predicting trees for tokens in `{args.infile}`.')
        print(f'Writing trees to file `{args.outfile}`...')

        if args.parallel:
            trees = predict_from_file_parallel(
                parser, args.infile, args.num_lines, args.tokenize)
        else:
            trees = predict_from_file(
                parser, args.infile, args.num_lines, args.tokenize)

        with open(args.outfile, 'w') as fout:
            print('\n'.join(trees), file=fout)

        if args.show:
            show(args.outfile)

        print('Evaluating bracket score...')
        if args.goldfile:
            try:
                evalb(args.evalb_dir, args.outfile, args.goldfile, args.result, args.ignore_empty)
                if args.show:
                    show(args.result)
            except:
                exit('Could not evaluate trees. Maybe you did not parse the entire file?')

        print(f'Finished. Results saved to `{args.result}`.')

    elif args.treefile:
        num_trees = 10 if args.num_lines == None else args.num_lines

        parses = predict_from_trees(parser, args.treefile)

        fscores = []
        for i in range(num_trees):
            gold, pred, prec, rec, fscore = next(parses)
            fscores.append(fscore)

            print(f'Tree {i}, f1={fscore:.3f}.')
            print()
            print('Gold:')
            gold.pretty_print()
            print()
            print('Pred:')
            pred.pretty_print()
            print()

        print()
        print('All F1 =', ' '.join([f'{fscore:.3f}' for fscore in fscores]))
        print('Avg F1 = ', sum(fscores) / len(fscores))

    elif args.syneval:
        syneval(parser, args.syneval, args.outfile, parallel=args.parallel, short=args.short)

    else:
        if args.sent:
            sentence = tokenize.word_tokenize(args.sent)
        else:
            # Demo: use a default test-sentence with gold tree.
            sentence, gold = SENT.split(), GOLD

        print('Parsing sentence...')
        start = time.time()
        tree, score = parser.parse(sentence, use_numpy=args.use_numpy)
        elapsed = time.time() - start
        tree.un_chomsky_normal_form()

        print('Predicted.')
        print()
        tree.pretty_print()
        print('Logprob:', score)
        print()

        if not args.sent:
            gold = Tree.fromstring(gold)
            prec, recall, fscore = parser.evalb(
                gold.pformat(margin=np.inf), tree.pformat(margin=np.inf))
            print('Gold.')
            gold.pretty_print()
            print(f'Precision = {prec:.3f}')
            print(f'Recall = {recall:.3f}')
            print(f'F1 = {fscore:.3f}')
            print()

        print(f'Parse-time: {elapsed:.3f}s.')

        if args.perplexity:
            perplexity = parser.perplexity(sentence)
            print('Perplexity:', round(perplexity, 2))
Beispiel #34
0
def logtype_t(x, c):
    if ttracing('learn_witness_type'):
        print(show(x) + ' is a logical type and cannot learn new conditions')
Beispiel #35
0
from utils import show, ROI, displayLineImage, averageSlopeIntercept
from filters import rgbToGray, gaussBlur, cannyEdge, combineImg
from transform import HoughTransform
import numpy as np
import cv2
import matplotlib.pyplot as plt

if __name__ == '__main__':
    # Path to image
    img_path = r"img/test_img.jpg"

    # Read Image
    original_img = cv2.imread(img_path)

    # Show original image
    show(original_img, tag="Original Image")

    #Copy original image
    copyImg = np.copy(original_img)

    #GrayScale image
    grayImg = rgbToGray(copyImg)
    show(grayImg, "Gray Scale Image")
    cv2.imwrite("output/gray_image.png", grayImg)

    #Smoothening Gray image with Gaussian Blur
    gBlur = gaussBlur(grayImg)
    show(gBlur, "Gaussian Blur Image")
    cv2.imwrite("output/gaussian_blur_image.png", gBlur)
    #Canny edge detection
    canny = cannyEdge(gBlur)
Beispiel #36
0
 def show(self):
     return show(self.comps.base_type) + '_' + show(self.comps.obj)
Beispiel #37
0
 def show(self):
     return '[' + show(self.comps.base_type) + ']'
Beispiel #38
0
 def show(self):
     return self.comps.pred.name + '(' + ', '.join(
         [show(x) for x in self.comps.args]) + ')'
Beispiel #39
0
import piggyphoto, pygame
import argparse
import utils
from cam_tool import cam_tool

parser = argparse.ArgumentParser()
parser.add_argument('--cam_model', type=str, help="Camera model")
parser.add_argument('--file_name', type=str, help="file name to save picture")
parser.add_argument('--depth', type=int, default=0, help="to use depth map")
args = parser.parse_args()

cam = cam_tool(args.cam_model)

i = 0
filename = args.file_name
while not utils.quit_pressed():
    cam.capture(filename + '.jpg', args.depth)
    utils.show(filename + '.jpg')

    for event in pygame.event.get():
        if event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE:
            cam.capture(filename + '_' + str(i) + '.jpg', args.depth)
            i += 1

Beispiel #40
0
 def show(self):
     return '"' + ' '.join([show(i) for i in self.items]) + '"'
Beispiel #41
0
 def show(self):
     return show(self.rec) + '.' + show(self.path)
Beispiel #42
0
def showmodel(m=_M):
    return show(m)
Beispiel #43
0
 def show(self):
     return '\n' + self.name + ':\n' + '_' * 45 + '\n' + '\n'.join([
         show(i) + ': ' + show(self.model[i].witness_cache)
         for i in self.model if i not in ['Ty', 'Re', 'RecTy']
     ]) + '\n' + '_' * 45 + '\n'
Beispiel #44
0
#!/usr/bin/env python

import numpy as np
import torch
from net import Net
from utils import device, show

weights_path = "deeper-network-epoch-18-718.74-val-loss-0.9430666666666667-val-accuracy.pt"
net = Net()
net.load_state_dict(torch.load(weights_path))
net.to(device)
net.eval()

X_test = np.load("test_x.npy")
for im in X_test:
    im[im < 220] = 0

show(X_test[0])
X_test = torch.from_numpy(X_test).unsqueeze(1).unsqueeze(1).to(device)
y_preds = []

with torch.no_grad():
    for i, im in enumerate(X_test):
        y_preds.append([int(i), int(torch.argmax(net(im)))])

# Add header "ID,label" to csv file
np.savetxt("predictions7.csv", y_preds, delimiter=",", fmt="%d")
Beispiel #45
0
def train(generator, discriminator, criterion, optim_g, optim_d, dataloader,
          fixed_noise, config):
    try:
        os.makedirs(config['res_path'])
    except FileExistsError:
        pass
    # Lists to keep track of progress
    img_list = []
    g_losses = []
    d_losses = []
    g_accs = []
    iters = 0
    device = get_device()
    logging.info("Starting Training Loop...")

    # Real/Fake convention
    epochs = config["epochs"]
    for epoch in range(epochs):
        n_batches = 0
        g_acc = []
        g_loss = 0.0
        d_loss = 0.0
        for i, (data, _) in enumerate(dataloader, 0):
            g_correct_preds = 0
            # Part I: Update D network - maximize log(D(x)) + log(1-D(G(z))
            discriminator.zero_grad()

            # Put data to device
            data = data.to(device)
            batch_size = data.size(0)
            one = torch.tensor(-1, dtype=torch.float).to(device)
            # 6. Soft and Noisy Labels
            real_label = 0.5 * torch.rand((batch_size, ), device=device) + 0.7
            fake_label = 0.3 * torch.rand((batch_size, ), device=device)

            # Forward pass (real) data batch through discriminator

            output_r = discriminator(data).view(-1)
            g_correct_preds += get_correct_count(output_r)
            if config["wasserstein"]:
                errD_real = torch.mean(output_r)
            else:
                errD_real = criterion(output_r, real_label)
                errD_real.backward()
            D_x = output_r.mean().item()

            # Train with all-fake batch
            # Generate new batch of latent vectors
            noise = torch.randn((batch_size, config["nz"], 1, 1),
                                device=device)
            fake_images = generator(noise)

            # Classify all fake images
            output_f = discriminator(fake_images.detach()).view(-1)
            g_correct_preds += (config["batch_size"] -
                                get_correct_count(output_f))
            if config["wasserstein"]:
                errD_fake = torch.mean(output_f)
            else:
                errD_fake = criterion(output_f, fake_label)
                errD_fake.backward()
            D_G_z1 = output_f.mean().item()

            # Add gradients from the real images and the fake images
            if config["wasserstein"]:
                errD = -errD_real + errD_fake
            else:
                errD = errD_real + errD_fake

            # Update Discriminator
            optim_d.step()

            # Part II: Update G Network: Maximize log(D(G(z))
            generator.zero_grad()
            # discriminator.eval()
            output = discriminator(fake_images).view(-1)
            if config["wasserstein"]:
                # following the implementation of Wasserstein loss found in
                # https://github.com/martinarjovsky/WassersteinGAN/blob/master/main.py
                errG = -torch.mean(output)
                errG.backward()
            else:
                errG = criterion(output, real_label)
                errG.backward()
            D_G_z2 = output.mean().item()
            optim_g.step()
            # discriminator.train()

            if i % 50 == 0:
                logging.info(
                    "[{}/{}][{}/{}] Loss_D: {}, Loss_G: {}, D(x): {}, D(G(z)): {}/{}"
                    .format(epoch, epochs, i, len(dataloader), errD.item(),
                            errG.item(), D_x, D_G_z1, D_G_z2))

            g_loss += errG.item()
            d_loss += errD.item()
            g_acc.append(g_correct_preds / (config['batch_size'] * 2))
            iters += 1
            n_batches += 1

        g_accs.append(np.average(g_acc))
        g_losses.append(g_loss / n_batches)
        d_losses.append(d_loss / n_batches)
        logging.info("===========================================")
        logging.info("Discriminator accuracy at epoch {}: {}".format(
            epoch, g_accs[-1]))
        logging.info("Generator loss at epoch {}: {}".format(
            epoch, g_losses[-1]))
        logging.info("Disriminator loss at epoch {}: {}".format(
            epoch, d_losses[-1]))
        logging.info("===========================================")

        # Check how the generator is doing by saving G's output on fixed_noise
        with torch.no_grad():
            fake = generator(fixed_noise).detach().cpu()
        show(vutils.make_grid(fake, padding=2, normalize=True), epoch, config)

        # Generate plot
        logging.info("Generating plot to loss_graph.png")
        generate_lineplot((g_losses, d_losses), "Loss",
                          config["res_path"] + "loss_graph.png")

    columns = [
        "Epoch", "Generator Loss", "Discriminator Loss", "Generator Accuracy"
    ]
    df = pd.DataFrame(columns=columns)

    for i in range(epochs):
        df.loc[len(df)] = [i, g_losses[i], d_losses[i], g_accs[i]]
    logging.info("Writing results to res.csv")
    df.to_csv(config["res_path"] + "res.csv")
Beispiel #46
0
 def show(self):
     return 'lambda ' + self.var + ':' + self.domain_type.show(
     ) + ' . ' + show(self.body)
Beispiel #47
0
def canny(image, high_thres=90, low_thres=30):
    show(image, "origin")
    height = len(image)
    width = len(image[0])

    # gauss filter
    gauss = np.zeros((5, 5))
    for i in range(5):
        for j in range(5):
            x = i - 2
            y = j - 2
            sigma = 1
            gauss[i][j] = np.exp(-(x * x + y * y) /
                                 (2 * sigma * sigma)) / \
                          (2 * np.arccos(-1) * sigma * sigma)
    image = conv(image, gauss)
    show(image, "after gauss")

    # Sobel operator
    gx = np.array([[-1, -2, -1],
                   [0, 0, 0],
                   [1, 2, 1]])
    gy = np.array([[-1, 0, 1],
                   [-2, 0, 2],
                   [-1, 0, 1]])
    hx = conv(image, gx)
    hy = conv(image, gy)
    h = np.sqrt(np.power(hx, 2.0) + np.power(hy, 2.0))
    h = np_value_range(h)
    h = normalize(h)
    show(h, "calc gradient")

    # maximum filter
    res = np.zeros((height, width))
    for i in range(1, height - 1):
        for j in range(1, width - 1):
            hx_ij = hx[i][j]
            hy_ij = hy[i][j]
            # up, down can be exchanged
            up = -1
            down = -1

            if hx_ij == 0:  # [90 270]
                up = h[i - 1][j]
                down = h[i + 1][j]
            elif hy_ij == 0:  # [0 180]
                up = h[i][j + 1]
                down = h[i][j - 1]
            else:
                if hy_ij <= 0:
                    hx_ij = -hx_ij
                    hy_ij = -hy_ij
                if hx_ij > hy_ij:  # [0, 45]
                    c = hy_ij / hx_ij
                    up = h[i][j + 1] * c + (1 - c) * h[i - 1][j + 1]
                    down = h[i][j - 1] * c + (1 - c) * h[i + 1][j - 1]
                elif 0 < hx_ij < hy_ij:  # [45, 90]
                    c = 1 - 1 / (hy_ij / hx_ij)
                    up = h[i - 1][j + 1] * c + (1 - c) * h[i - 1][j]
                    down = h[i + 1][j - 1] * c + (1 - c) * h[i + 1][j]
                elif 0 < -hx_ij < hy_ij:  # [90, 135]:
                    c = 1 / (-hy_ij / hx_ij)
                    up = h[i - 1][j] * c + (1 - c) * h[i - 1][j - 1]
                    down = h[i + 1][j] * c + (1 - c) * h[i + 1][j + 1]
                else:  # [135, 180]
                    c = 1 - (-hy_ij / hx_ij)
                    up = h[i - 1][j - 1] * c + (1 - c) * h[i][j - 1]
                    down = h[i + 1][j + 1] * c + (1 - c) * h[i][j + 1]

            if h[i][j] >= up and h[i][j] >= down:
                res[i][j] = h[i][j]
            else:
                res[i][j] = 0
    show(res, "filter")

    # 2-threshold filter
    final = np.zeros((height, width))
    for i in range(height):
        for j in range(width):
            if res[i][j] >= high_thres:
                final[i][j] = 255
            elif res[i][j] <= low_thres:
                final[i][j] = 0
            else:
                final[i][j] = 1
    change = True
    previous = final.copy()
    show(final, "before linking")
    while change:
        change = False
        for i in range(height):
            for j in range(width):
                if previous[i][j] == 1:
                    states = [((previous[_i][_j] == 255) for _i in range(i - 1, i + 2)) for _j in range(j - 1, j + 2)]
                    if True in states:
                        final[i][j] = 255
                        change = True
                else:
                    final[i][j] = previous[i][j]
        show(final, "after linking")
    show(final, "after linking")

    save("canny-fig")
    return final, "canny.jpg"
Beispiel #48
0
def run_supevisordctl(command, conf=None):
    """Start supervisor process."""
    if not conf:
        conf = pjoin(cget("service_dir"), "supervisor", "config", "supervisord.conf")
    show(yellow("Running supervisorctrl: %s." % command))
    return sudo('supervisorctl --configuration="%s" %s' % (conf, command))
Beispiel #49
0
def detect_bricks(
    image, black_threshold, brick_min_area, brick_circularity_range, show_images, dump_images
):
    """
    Compute coordinates of brick centers.
    """

    # Convert the image to grayscale.
    image_gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)

    # Make the image binary by thresholding it with the black_threshold value.
    _, image_binary = cv2.threshold(image_gray, black_threshold, 255, cv2.THRESH_BINARY_INV)

    # Find all closed contours on the binary image.
    contours, _ = cv2.findContours(image_binary, cv2.RETR_LIST, cv2.CHAIN_APPROX_NONE)

    # Filter contours by area and circularity to leave only brick contours.
    brick_contours = []
    brick_contours_indices = []  # For debug only.
    contour_index = -1  # For debug only.
    for contour in contours:
        contour_index += 1
        area = cv2.contourArea(contour)
        if area < brick_min_area:
            continue
        perimeter = cv2.arcLength(contour, closed=True)
        circularity = 4.0 * np.pi * area / (perimeter ** 2.0)
        if brick_circularity_range[0] < circularity < brick_circularity_range[1]:
            brick_contours.append(contour)
            brick_contours_indices.append(contour_index)

    # For each brick contour get the center of its bounding box.
    bricks = []
    for contour in brick_contours:
        box_x, box_y, box_width, box_height = cv2.boundingRect(contour)
        bricks.append((box_x + box_width // 2, box_y + box_height // 2))

    if show_images or dump_images:
        #contours_colors = [[255, 255, 0]] * len(contours)
        contours_colors = 255 * np.random.rand(len(contours), 3)
        image_contours = cv2.merge((image_gray,) * 3)
        for i in range(len(contours)):
            image_contours = cv2.drawContours(image_contours, contours, i, contours_colors[i], -1)
        image_brick_contours = cv2.merge((image_gray,) * 3)
        for i in brick_contours_indices:
            image_brick_contours = cv2.drawContours(image_brick_contours, contours, i, contours_colors[i], -1)
        image_bricks = cv2.merge((image_gray,) * 3)
        for brick in bricks:
            image_bricks = cv2.drawMarker(image_bricks, brick, (255, 255, 0), cv2.MARKER_CROSS, thickness=2)

        if show_images:
            show(image, '1. image', 1)
            show(image_gray, '2. image_gray', 1)
            show(image_binary, '3. image_binary', 1)
            show(image_contours, '4. image_contours', 1)
            show(image_brick_contours, '5. image_brick_contours', 1)
            show(image_bricks, '6. image_bricks', 0)

        if dump_images:
            dump(image, '1_image')
            dump(image_gray, '2_image_gray')
            dump(image_binary, '3_image_binary')
            dump(image_contours, '4_image_contours')
            dump(image_brick_contours, '5_image_brick_contours')
            dump(image_bricks, '6_image_bricks')

    return bricks
Beispiel #50
0
 def show(self):
     return show(self.oplist)
Beispiel #51
0
 def show(self):
     return '^'.join([show(i) for i in self.items])
Beispiel #52
0
print(Ind.judge('j'))

print(Ind.judge('n'))

print(Ind.witness_cache)

print(Ind.query('h'))

print(Ind.create())

run = Pred('run', [Ind])

p = PType(run, ['j'])

print(show(p))

print(p.validate())

p.create()

print(p.witness_cache)

print(Ind.judge_nonspec())

print(p.query_nonspec())

print(p.judge_nonspec())

man = Pred('man', [Ind])
Beispiel #53
0
 def show(self):
     return show(self.comps.base_type)+'+'
Beispiel #54
0
 def judge_nonspec(self, n=1, max=None):
     if ttracing('judge_nonspec'):
         print(show(self) + ' is a variable type and cannot be judged')
Beispiel #55
0
def AttValRecType(T,l):
    if isinstance(T, RecType):
        return T.comps.__getattribute__(l)
    else:
        print('AttValRecType not defined on '+show(T)+' (not a record type)')
        return None
Beispiel #56
0
            optimizer.step()

            train_loss += loss.data.item()
            count += 1

        # ===================log========================
        train_loss /= count
        if epoch % show_every == 0:

            val = val_loss(model, test_loader, hidden_size, train=True)

            print('epoch [{}/{}], loss:{:.4f}, val:{:.4f}, train_loss:{:.4f}'.
                  format(epoch + 1, num_epochs, loss.data.item(),
                         val.data.item(), train_loss))
            pic = to_img(output.cpu().data)
            show(pic[0][0])

    torch.save(model.state_dict(), teacher_fname)

else:
    # load teacher model
    checkpoint = torch.load(teacher_fname)
    model.load_state_dict(checkpoint)
"""
------- Sample 2D latent code -------
"""

model.eval()
N = 10  # number of images per size
range_ = 2  # range of exploration
Beispiel #57
0
def LabelsRecType(T):
    if isinstance(T, RecType):
        return T.comps.__dict__.keys()
    else:
        print('LabelsRecType not defined on '+ show(T) +' (not a record type)')
        return None
Beispiel #58
0
import pickle

from utils import show, save, calibrate, interpolate, adaptive_median

with open("potato.pickle", "rb") as f:
    image = pickle.load(f, encoding="latin1")

image = calibrate(image)  # Compensate for fixed-pattern noise
image = adaptive_median(image)  # Reduce gaussian noise and remove impulses
image = interpolate(image, 3,
                    mode="median")  # downsample to fit 'normal' y-scale

show(image, r=(0, 1.2))
Beispiel #59
0
def equal(x, y):
    if show(x) == show(y):
        return True
    else:
        return False
Beispiel #60
0
            calculate coordinates
            filter with NMS
            draw"""
        start_time = datetime.datetime.now()
        data, prior = self.r()
        with torch.no_grad():
            confi, offset = self.onet(data.cuda())
        confi = confi.cpu().numpy().flatten()
        offset = offset.cpu().numpy()

        offset, prior, confi = offset[confi >= 0.999], prior[
            confi >= 0.999], confi[confi >= 0.999]

        offset, landmarks = offset[:, :4], offset[:, 4:]
        offset, landmarks = utils.transform(offset, landmarks, prior)

        boxes = np.hstack(
            (offset, np.expand_dims(confi,
                                    axis=1), landmarks))  # 将偏移量与置信度结合,进行NMS
        boxes = utils.NMS(boxes, threshold=0.4, ismin=True)
        end_time = datetime.datetime.now()
        print("ONet cost {}ms".format(
            (end_time - start_time).microseconds / 1000))
        return boxes


if __name__ == "__main__":
    FILE = "F:/MTCNN/test/video2.mp4"
    FUNC = Test
    utils.show(FILE, FUNC, 20)