Example #1
0
File: grid.py Project: chbrown/tsa
def corpus_sandbox(analysis_options):
    print('Exploring SB-5 corpus')
    session = create_session()
    sb5b_documents = session.query(Document).join(Source).\
        filter(Source.name == 'sb5b').all()

    print('Found %d documents' % len(sb5b_documents))

    rows = [dict(
        label=document.label,
        inferred=bool(document.details.get('Inferred')),
        source=document.details.get('Source', 'NA')) for document in sb5b_documents]
    df = pd.DataFrame.from_records(rows)

    # df_agg = df.groupby(['label', 'inferred'])

    # df.pivot_table(values=['label'], rows=['inferred'], aggfunc=[len])
    df.pivot_table(rows=['label', 'inferred'], aggfunc=[len])
    df.pivot_table(rows=['label', 'source'], aggfunc=[len])
    df.pivot_table(rows=['source'], aggfunc=[len])
    # df_agg.plot(x='train', y='accuracy')

    for document in sb5b_documents:
        # 'weareohio' in document.document.lower(), .document
        print(document.details.get('Source'), document.label)

    IPython.embed()
def impute_data(df, cohort):

    #import IPython
    #IPython.embed()

    if isinstance(df, str):
        df = ml.read_data(df)

    #########################
    ## IMPUTE MISSING DATA ##
    #########################
    print "Imputing missing data..."

    #change msam to missing is msam_NA==1
    nanList =  ['g6_g6msam_nan', 'g7_g7msam_nan', 'g8_g8msam_nan', 'g9_g8msam_nan']
    varList = [[ 'g6_g6msam_Advanced', 'g6_g6msam_Basic', 'g6_g6msam_Proficient'], ['g7_g7msam_Advanced', 'g7_g7msam_Basic', 'g7_g7msam_Proficient'], ['g8_g8msam_Advanced', 'g8_g8msam_Basic', 'g8_g8msam_Proficient'],['g9_g8msam_Advanced', 'g9_g8msam_Basic', 'g9_g8msam_Proficient']]
    for x in range(0,len(nanList)):
        nacol = nanList[x]
        colList = varList[x]
        for col in colList:
            df.loc[df[nacol] == 1, col] = np.nan 


    #pred missing data using any available data
    wordList = ['absrate', 'mapr', 'msam_Advanced', 'msam_Basic', 'msam_Proficient', 'mobility', 'nsusp', 'mpa', 'tardyr', 'psatm', 'psatv', 'retained']
    for word in wordList:
        colList = [col for col in df.columns if word in col]
        rowMean = df[colList].mean(axis=1)
        for col in colList:
            print df[col].value_counts(dropna=False)
            df.loc[:,col].fillna(rowMean, inplace=True)
            print df[col].value_counts(dropna=False)


    '''
    ############################
    # IMPUTE NEIGHBORHOOD DATA #
    ############################

    print "Imputing missing school neighborhood data..."

    ## Fill missing school neighborhood data
    print "Fixing neighborhood columns..."
    neighborhood_cols = ['suspensionrate',  'mobilityrateentrantswithdra',  'attendancerate',   'avg_class_size',   'studentinstructionalstaffratio',   'dropoutrate',  'grade12documenteddecisionco',  'grade12documenteddecisionem',  'grade12documenteddecisionmi',  'grad12docdec_col_emp', 'graduationrate',   'studentsmeetinguniversitysyste',   'Est_Households_2012',  'Est_Population_2012',  'Med_Household_Income_2012',    'Mean_Household_Income_2012',   'Pop_Below_Poverty_2012',   'Percent_Below_Poverty_2012',   'Pop_Under18_2012', 'Under18_Below_Poverty_2012',   'Under18_Below_Poverty_Percent_2012',   'Housholds_on_Food_stamps_with_Children_Under18_2012',  'Housholds_Pop_on_Food_Stamps_2012',    'Pop_BlackAA_2012', 'Pop_White_2012',   'Bt_18_24_percent_less_than_High_School_2012',  'Bt_18_24_percent_High_School_2012',    'Bt_18_24_percent_Some_College_or_AA_2012', 'Bt_1824_percent_BA_or_Higher_2012',    'Over_25_percent_less_than_9th_grade_2012', 'Over_25_percent_9th_12th_2012',    'Over_25_percent_High_School_2012', 'Over_25__percent_Some_College_No_Deg_2012',    'Over_25_percent_AA_2012',  'Over_25_percent_Bachelors_2012',   'Over_25_percent_Graduate_or_Professionals_2012']
    ml.replace_with_mean(df, neighborhood_cols)
    '''

    #summary = ml.summarize(df)
    #print summary.T
    #ml.print_to_csv(summary.T, 'updated_summary_stats_vertical.csv')

    return_file = '/mnt/data2/education_data/mcps/DATA_DO_NOT_UPLOAD/imputed_data_cohort' + str(cohort) + '.csv'
    ml.print_to_csv(df, return_file)

    #IPython.embed()

    print "Done!"
    import IPython
    IPython.embed()
    return df
Example #3
0
def transform_data():
    from solaris.run import load_data
    from sklearn.externals import joblib

    data = load_data('data/data.pkl')

    kringing = PertubatedKriging()
    #kringing = PertubatedSpline()

    data['description'] = '%r: %r' % (kringing, kringing.est)
    print data['description']

    print('_' * 80)
    print(kringing)
    print

    for key in ['train', 'test']:
        print('_' * 80)
        print('transforming %s' % key)
        print
        X = data['X_%s' % key]

        X = kringing.fit_transform(X)
        data['X_%s' % key] = X

    print
    print('dumping data')
    joblib.dump(data, 'data/interp10_data.pkl')
    IPython.embed()
Example #4
0
def ipython(user_ns=None):
    try:
        import IPython
        from IPython.config.loader import Config
    except ImportError:
        return simple_repl(user_ns=user_ns)
    defns = {'os':os, 're':re, 'sys':sys}
    if not user_ns:
        user_ns = defns
    else:
        defns.update(user_ns)
        user_ns = defns

    c = Config()
    c.InteractiveShellApp.exec_lines = [
        'from __future__ import division, absolute_import, unicode_literals, print_function',
        ]
    c.TerminalInteractiveShell.confirm_exit = False
    c.PromptManager.in_template = (r'{color.LightGreen}calibre '
            '{color.LightBlue}[{color.LightCyan}%s{color.LightBlue}]'
            r'{color.Green}|\#> '%get_version())
    c.PromptManager.in2_template = r'{color.Green}|{color.LightGreen}\D{color.Green}> '
    c.PromptManager.out_template = r'<\#> '
    c.TerminalInteractiveShell.banner1 = BANNER
    c.PromptManager.justify = True
    c.TerminalIPythonApp.ipython_dir = ipydir
    os.environ['IPYTHONDIR'] = ipydir

    c.InteractiveShell.separate_in = ''
    c.InteractiveShell.separate_out = ''
    c.InteractiveShell.separate_out2 = ''

    c.PrefilterManager.multi_line_specials = True

    IPython.embed(config=c, user_ns=user_ns)
Example #5
0
def user_console(user_email_address):
    with global_session_scope() as db_session:
        account = db_session.query(Account).filter_by(
            email_address=user_email_address).one()

        if account.provider == 'eas':
            banner = """
        You can access the account instance with the 'account' variable.
        """
        else:
            with writable_connection_pool(account.id, pool_size=1).get()\
                    as crispin_client:
                if account.provider == 'gmail' \
                        and 'all' in crispin_client.folder_names():
                    crispin_client.select_folder(
                        crispin_client.folder_names()['all'][0],
                        uidvalidity_cb)

                banner = """
        You can access the crispin instance with the 'crispin_client' variable,
        and the account instance with the 'account' variable.

        IMAPClient docs are at:

            http://imapclient.readthedocs.org/en/latest/#imapclient-class-reference
        """

        IPython.embed(banner1=banner)
Example #6
0
 def _aggregate_batch(data_holder, use_list=False):
     size = len(data_holder[0])
     result = []
     for k in range(size):
         if use_list:
             result.append(
                 [x[k] for x in data_holder])
         else:
             dt = data_holder[0][k]
             if type(dt) in [int, bool]:
                 tp = 'int32'
             elif type(dt) == float:
                 tp = 'float32'
             else:
                 try:
                     tp = dt.dtype
                 except AttributeError:
                     raise TypeError("Unsupported type to batch: {}".format(type(dt)))
             try:
                 result.append(
                     np.asarray([x[k] for x in data_holder], dtype=tp))
             except Exception as e:  # noqa
                 logger.exception("Cannot batch data. Perhaps they are of inconsistent shape?")
                 if isinstance(dt, np.ndarray):
                     s = pprint.pformat([x[k].shape for x in data_holder])
                     logger.error("Shape of all arrays to be batched: " + s)
                 try:
                     # open an ipython shell if possible
                     import IPython as IP; IP.embed()    # noqa
                 except ImportError:
                     pass
     return result
    def remove_convex(self, start_state = None, peg = None):
        left_peg = start_state.get_left_peg(peg)
        right_peg = start_state.get_right_peg(peg)
        new_contour_pegs = self.convex_hull(start_state, peg, left_peg, right_peg)

        pegs_to_be_added = []
        for elem in new_contour_pegs:
            if (elem != left_peg) and (elem != right_peg):
                pegs_to_be_added.append(elem)

        prev_inside = start_state.inside
        prev_contour = start_state.contour
        new_outside = start_state.outside[:]
        new_outside.append(peg)
        new_inside = []
        for elem in prev_inside:
            if elem not in pegs_to_be_added:
                new_inside.append(elem)
        list_of_new_pegs = []
        for elem in prev_contour.peg_order:
            if elem != peg:
                list_of_new_pegs.append(prev_contour.peg_dict[elem])
            else:
                for new_peg in pegs_to_be_added:
                    list_of_new_pegs.append(Peg(new_peg, True, 0))
        end_state = State(list_of_new_pegs, new_inside, new_outside)
        assert len(new_inside + new_outside + end_state.contour.peg_order) == 12
        IPython.embed()
        return end_state
Example #8
0
File: batchtps.py Project: rll/lfd
    def test_mapping_cost(
        self,
        other,
        bend_coef=DEFAULT_LAMBDA[1],
        outlierprior=1e-1,
        outlierfrac=1e-2,
        outliercutoff=1e-2,
        T=5e-3,
        norm_iters=DEFAULT_NORM_ITERS,
    ):
        mapping_err = self.mapping_cost(other, outlierprior, outlierfrac, outliercutoff, T, norm_iters)
        for i in range(self.N):
            ## compute error for 0 on cpu
            s_gpu = mapping_err[i]
            s_cpu = np.float32(0)
            xt = self.pts_t[i].get()
            xw = self.pts_w[i].get()

            yt = other.pts_t[i].get()
            yw = other.pts_w[i].get()

            ##use the trace b/c then numpy will use float32s all the way
            s_cpu += np.trace(xt.T.dot(xt) + xw.T.dot(xw) - 2 * xw.T.dot(xt))
            s_cpu += np.trace(yt.T.dot(yt) + yw.T.dot(yw) - 2 * yw.T.dot(yt))

            if not np.isclose(s_cpu, s_gpu, atol=1e-4):
                ## high err tolerance is b/c of difference in cpu and gpu precision?
                print "cpu and gpu sum sq differences differ!!!"
                ipy.embed()
                sys.exit(1)
Example #9
0
File: batchtps.py Project: rll/lfd
 def test_bending_cost(
     self,
     other,
     bend_coef=DEFAULT_LAMBDA[1],
     outlierprior=1e-1,
     outlierfrac=1e-2,
     outliercutoff=1e-2,
     T=5e-3,
     norm_iters=DEFAULT_NORM_ITERS,
 ):
     self.get_target_points(other, outlierprior, outlierfrac, outliercutoff, T, norm_iters)
     self.update_transform(bend_coef)
     bending_costs = self.bending_cost(bend_coef)
     for i in range(self.N):
         c_gpu = bending_costs[i]
         k_nn = self.kernels[i].get()
         w_nd = self.w_nd[i].get()
         c_cpu = np.float32(0)
         for d in range(DATA_DIM):
             r = np.dot(k_nn, w_nd[:, d]).astype(np.float32)
             r = np.float32(np.dot(w_nd[:, d], r))
             c_cpu += r
         c_cpu *= np.float32(bend_coef)
         if np.abs(c_cpu - c_gpu) > 1e-4:
             ## high err tolerance is b/c of difference in cpu and gpu precision?
             print "cpu and gpu bend costs differ!!!"
             ipy.embed()
             sys.exit(1)
Example #10
0
File: test.py Project: gkahn13/bsp
def pr2_flashlidar():
    env = rave.Environment()
    #env.Load('envs/testpr2sensors.env.xml')
    env.Load('envs/pr2-table.env.xml')
    
    env.SetViewer('qtcoin') 
    time.sleep(1)
    
    start_time = time.time()
    sensors = [s for s in env.GetSensors() if s.GetName().find("flashlidar") != -1]
    lidar = sensors[0]
    
    lidar.Configure(Sensor.ConfigureCommand.PowerOn)
    #lidar.Configure(Sensor.ConfigureCommand.RenderDataOn)
            
    while True:
        start_time = time.time()
        olddata = lidar.GetSensorData(Sensor.Type.Laser)
        while True:
            data = lidar.GetSensorData(Sensor.Type.Laser)
            if data.stamp != olddata.stamp:
                break
            time.sleep(0.1)
        print('Elapsed: {0}'.format(time.time() - start_time))
        break
    
    lidar.Configure(Sensor.ConfigureCommand.PowerOff)
    #lidar.Configure(Sensor.ConfigureCommand.RenderDataOff)
    
    IPython.embed()
Example #11
0
File: batchtps.py Project: rll/lfd
def check_update(ctx, b):
    ctx.tps_params[0] = ctx.default_tps_params.copy()
    ctx.update_ptrs()
    xt = ctx.pts_t[0].get()
    p_mat = ctx.proj_mats[b][0].get()
    o_mat = ctx.offset_mats[b][0].get()
    true_res = np.dot(p_mat, xt) + o_mat
    ctx.set_tps_params(ctx.offset_mats[b])
    o_gpu = ctx.tps_params[0].get()
    if not np.allclose(o_gpu, o_mat):
        print "setting tps params failed"
        diff = np.abs(o_mat - o_gpu)
        nz = np.nonzero(diff)
        print nz
        ipy.embed()
        sys.exit(1)
    ctx.update_transform(b)
    p1 = ctx.tps_params[0].get()
    if not np.allclose(true_res, p1):
        print "p1 and true res differ"
        print p1[:3]
        diff = np.abs(p1 - true_res)
        print np.max(diff)
        amax = np.argmax(diff)
        print amax
        nz = np.nonzero(diff)
        print nz[0]
        ipy.embed()
        sys.exit(1)
Example #12
0
File: test.py Project: gkahn13/bsp
def pr2_sensors():
    env = rave.Environment()
    #env.Load('robots/pr2-beta-sim.robot.xml')
    env.Load('envs/testpr2sensors.env.xml')
    r = env.GetRobots()[0]
    
    env.SetViewer('qtcoin') 
    
    ienablesensor = 0
    while True:
        start_time = time.time()
        sensors = env.GetSensors()
        for i,sensor in enumerate(sensors):
            if i==ienablesensor:
                sensor.Configure(Sensor.ConfigureCommand.PowerOn)
                sensor.Configure(Sensor.ConfigureCommand.RenderDataOn)
            else:
                sensor.Configure(Sensor.ConfigureCommand.PowerOff)
                sensor.Configure(Sensor.ConfigureCommand.RenderDataOff)
        print 'showing sensor %s, try moving obstacles'%(sensors[ienablesensor].GetName())
        if sensors[ienablesensor].Supports(Sensor.Type.Laser):
            # if laser, wait for the sensor data to be updated and then print it
            olddata = sensors[ienablesensor].GetSensorData(Sensor.Type.Laser)
            while True:
                data = sensors[ienablesensor].GetSensorData(Sensor.Type.Laser)
                if data.stamp != olddata.stamp:
                    break
                time.sleep(0.1)
            print 'sensor data: ',data.ranges                        
        #time.sleep(5)
        ienablesensor = (ienablesensor+1)%len(sensors)
        print('Elapsed: {0}'.format(time.time() - start_time))

    
    IPython.embed()
Example #13
0
File: test.py Project: gkahn13/bsp
def eih():
    env = rave.Environment()
    env.Load('data/testwamcamera.env.xml')
    
    env.SetViewer('qtcoin')
    
    ienablesensor = 0
    while True:
        sensors = env.GetSensors()
        for i,sensor in enumerate(sensors):
            if i==ienablesensor:
                sensor.Configure(Sensor.ConfigureCommand.PowerOn)
                sensor.Configure(Sensor.ConfigureCommand.RenderDataOn)
            else:
                sensor.Configure(Sensor.ConfigureCommand.PowerOff)
                sensor.Configure(Sensor.ConfigureCommand.RenderDataOff)
        print 'showing sensor %s, try moving obstacles'%(sensors[ienablesensor].GetName())
        if sensors[ienablesensor].Supports(Sensor.Type.Laser):
            # if laser, wait for the sensor data to be updated and then print it
            olddata = sensors[ienablesensor].GetSensorData(Sensor.Type.Laser)
            while True:
                data = sensors[ienablesensor].GetSensorData(Sensor.Type.Laser)
                if data.stamp != olddata.stamp:
                    break
                time.sleep(0.1)
            print 'sensor data: ',data.ranges                        
        time.sleep(5)
        ienablesensor = (ienablesensor+1)%len(sensors)

    
    IPython.embed()
def fileToHash(filename):
    try:
        assert(isfile(filename))
    except:
        IPython.embed(simple_prompt=True)
    with open(filename, 'rb') as f:
        return sha256(f.read()).hexdigest()
Example #15
0
def main():
    """
    program starting point
    """
    shell = False
    atomic = False
    try:
        optlist, _ = getopt.getopt(sys.argv[1:], 'p:n:', ["shell", "atomic"])
        optdict = dict(optlist)
        prefix = optdict['-p']
        if '-' in prefix:
            raise ValueError('"-" cannot exist in prefix=%r' % prefix)
        num_workers = int(optdict['-n'])
        if "--shell" in optdict:
            shell = True
        if "--atomic" in optdict:
            atomic = True
    except Exception:
        print traceback.format_exc()
        usage()
        sys.exit(1)
    orchestrator = Orchestrator(prefix, num_workers)
    if shell:
        # give me a ipython shell
        IPython.embed()
        return
    orchestrator.start(atomic)
Example #16
0
	def segment(self):
		# Specify segments and index
		exit = False
		i = 1
		while not exit:
			user_ret = raw_input("Enter new segment name: ")
			self.map_index_labels[i] = str(user_ret)
			self.map_index_data[i] = []
			i += 1
			user_ret = raw_input("Done with specifing segments?[y/n]")
			if user_ret == 'y':
				exit = True
		print "----------------Collecting Segments ----------------------"
		exit = False
		while not exit:
			self.print_all_index_labels()
			index = int(raw_input("Which index?"))
			start_frm = int(raw_input("Starting frame?"))
			end_frm = int(raw_input("End frame?"))
			new_segment = (start_frm, end_frm)
			segment_list = self.map_index_data[index]
			segment_list.append(new_segment)
			self.map_index_data[index] = segment_list
			user_ret = raw_input("Done with specifing segments?[y/n]")
			if user_ret == 'y':
				exit = True
		IPython.embed()
Example #17
0
 def pause(self, message="Pause"):
     if hasattr(self, 'services'):
         self.services.start_interactive_mode()
     import IPython
     IPython.embed()
     if hasattr(self, 'services'):
         self.services.stop_interactive_mode()
def ArrayToCode(array, codes, code_lengths):
    code_lengths = np.array(code_lengths, dtype = np.int32)
    total_length = np.sum(code_lengths[array])
    total_4bytes = (total_length - 1) / 32 + 1

    compressed_codes= np.zeros(total_4bytes, dtype = np.uint32)

    idx = 0
    shift = 0
    for i in range(len(array)):
        number = array[i]
        length = code_lengths[number]
        code = codes[number]
        while length > 0:
            eff = min(length, 32-shift)
            bits_to_write = code & ((1 << eff) - 1)
            if idx == total_4bytes:
                print i
                import IPython
                IPython.embed()
            compressed_codes[idx] += bits_to_write << shift

            code = code >> eff
            length -= eff
            idx += (shift + eff) / 32
            shift = (shift + eff) % 32

    assert idx * 32 + shift == total_length  # For debug

    return compressed_codes, total_length
def test(model, dataset, weights_filepath=BEST_WEIGHT_FILE):

    model.load_weights(weights_filepath)

    train_iterator = dataset.iterator(batch_size=batch_size,
                                      num_batches=nb_test_batches)

    batch_x, batch_y = train_iterator.next()

    results_dir = 'results'
    if not os.path.exists(results_dir):
        os.makedirs(results_dir)

    pred = model._predict(batch_x)
    pred = pred.reshape(batch_size, patch_size, 1, patch_size, patch_size)



    #pred_as_b012c = pred.transpose(0, 3, 4, 1, 2)

    # for i in range(batch_size):
    #     v, t = mcubes.marching_cubes(pred_as_b012c[i, :, :, :, 0], 0.5)
    #     mcubes.export_mesh(v, t, results_dir + '/drill_' + str(i) + '.dae', 'drill')
    #     viz.visualize_batch_x(pred, i, str(i), results_dir + "/pred_" + str(i))
    #     viz.visualize_batch_x(batch_x, i, str(i), results_dir + "/input_" + str(i))
    #     viz.visualize_batch_x(batch_y, i, str(i), results_dir + "/expected_" + str(i))
    for i in range(batch_size):
        viz.visualize_batch_x_y_overlay(batch_x, batch_y, pred, i=i,  title=str(i))
        # viz.visualize_batch_x(pred, i, 'pred_' + str(i), )
        # viz.visualize_batch_x(batch_x, i,'batch_x_' + str(i), )
        # viz.visualize_batch_x(batch_y, i, 'batch_y_' + str(i), )


    import IPython
    IPython.embed()
 def update_alpha_beta_gamma(self):  # Step 6
     # At the moment, we can only fix C0 = 0
     if self.parDict['CFit'] == 'fixed' and self.parDict['C0'] == 0.0:
         X = np.ones((self.N, 2), dtype=float)
         X[:, 1] = self.xi
         # Eqn (77)
         XTXinv = np.linalg.inv(np.dot(X.T, X))
         Sigma_chat = XTXinv * self.sigsqr
         # Eqn (76)
         chat = np.dot(np.dot(XTXinv, X.T), self.eta)
         # Eqn (75)
         self.alpha, self.beta = self.rng.multivariate_normal(chat, Sigma_chat)
         self.gamma=0.
     else:
         # This doesn't work if any parameters fixed
         X = np.ones((self.N, 3), dtype=float)
         X[:, 1] = self.xi
         X[:, 2] = self.z
         # Eqn (77)
         XTXinv = np.linalg.inv(np.dot(X.T, X))
         Sigma_chat = XTXinv * self.sigsqr
         # Eqn (76)
         chat = np.dot(np.dot(XTXinv, X.T), self.eta)
         # Eqn (75)
         try:
             self.alpha, self.beta, self.gamma = self.rng.multivariate_normal(chat, Sigma_chat)
         except:
             print "multivariate_normal fail"
             IPython.embed()
             sys.exit()
Example #21
0
def _do_start_shell(config):
    # Import db handle, session and other useful stuff to the shell's scope
    db = None
    if isinstance(config, ServiceDaemon):
        db = config.get_main_store()

    # so that there is a db session handy in the shell
    session = db.Session()

    # these are just useful to have in a dev. shell
    import IPython, traceback, inspect, sys
    from pprint import pprint, pformat

    header = (
        "Database handle is:           db\n"
        "There's also an open session: session\n"
        "Imported packages:  traceback, inspect, sys\n"
        "Imported functions: pprint(), pformat()"
    )

    # start the kind of shell requested by user
    if config.shell:
        return IPython.embed(header=header)

    if config.ikernel:
        return IPython.embed_kernel()
Example #22
0
def main():
    app = QApplication(sys.argv)
    app.quitOnLastWindowClosed = True
    form = PlotDialog(app)
    form.show()
    IPython.embed()
    app.exit()
Example #23
0
def test_window_distance(width, num_steps, plot=None):
    import sdf_file, obj_file
    np.random.seed(100)

    mesh_file_name = 'data/test/meshes/Co_clean.obj'
    sdf_3d_file_name = 'data/test/sdf/Co_clean.sdf'

    sdf = sdf_file.SdfFile(sdf_3d_file_name).read()
    mesh = obj_file.ObjFile(mesh_file_name).read()
    graspable = GraspableObject3D(sdf, mesh)

    grasp_axis = np.array([0, 1, 0])
    grasp_width = 0.1

    grasp1_center = np.array([0, 0, -0.025])
    grasp1 = g.ParallelJawPtGrasp3D(grasp1_center, grasp_axis, grasp_width)
    grasp2_center = np.array([0, 0, -0.030])
    grasp2 = g.ParallelJawPtGrasp3D(grasp2_center, grasp_axis, grasp_width)

    w1, w2 = graspable.surface_information(grasp1, width, num_steps)
    v1, v2 = graspable.surface_information(grasp2, width, num_steps)

    # IPython.embed()

    if plot:
        plot(w1.proj_win, num_steps)
        plot(w2.proj_win, num_steps)
        plot(v1.proj_win, num_steps)
        plot(v2.proj_win, num_steps)
        plt.show()

    IPython.embed()

    return
Example #24
0
def ipython_drop(msg, glo, loc):
    """
    Add this function in your code to drop to an ipython shell

    (provided that glVar.shell == True)

    msg: informative message to print when shell is invoked
    glo: globals()
    loc: locals()
    """
    import IPython
    banner = 'Dropping into IPython, type CTR-D to exit' + msg
    try:
        # Try pre-0.11 syntax first
        args = ['-pi1','In <\\#>: ','-pi2','   .\\D.: ',
                '-po','Out<\\#>: ','-nosep']
        ipshell = IPython.Shell.IPShellEmbed(
            args, 
            banner = banner,
            exit_msg = 'Leaving Interpreter, back to program.')
        ipshell(global_ns = glo, local_ns = loc)
    except AttributeError:
        # try the new syntax: post-0.11
        #from IPython.config.loader import Config
        #cfg = Config()
        # directly open the shell
        IPython.embed(user_ns=loc, banner2=banner)
    def validExtend(self, start_config, end_config):

        segmentNum = 10
        IPython.embed()
        x1=numpy.linspace(start_config[0],end_config[0],segmentNum)
        x2=numpy.linspace(start_config[1],end_config[1],segmentNum)
        x3=numpy.linspace(start_config[2],end_config[2],segmentNum)
        x4=numpy.linspace(start_config[3],end_config[3],segmentNum)
        x5=numpy.linspace(start_config[4],end_config[4],segmentNum)
        x6=numpy.linspace(start_config[5],end_config[5],segmentNum)
        x7=numpy.linspace(start_config[6],end_config[6],segmentNum)

        for i in xrange(0,len(x1)):

            newconfig = [x1[i], x2[i], x3[i], x4[i], x5[i], x6[i], x7[i]]

            self.robot.SetActiveDOFValues(newconfig)
            env = self.robot.GetEnv()

            #check collision
            isCollision =  env.CheckCollision(env.GetBodies()[0],env.GetBodies()[1])
            isSelfCollision = self.robot.CheckSelfCollision()

            if(isCollision==True or isSelfCollision==True):
                return False

        return True
 def cookie_save(self):
     t = time.strftime('%Y%m%dT%H%M')
     import IPython; IPython.embed()
     _f = '%s_cookiejar.pickle' % t
     with open(_f, 'wb') as f:
         pickle.dump(self.cj, f, pickle.HIGHEST_PROTOCOL)
     print('File save on %s' % _f)
Example #27
0
def python_shell(options):
    logger = setup_logger("Robot", debug=options.verbose)

    def conn_callback(*args):
        sys.stdout.write(".")
        sys.stdout.flush()
        return True

    if options.shell == "ipython":
        import IPython
    else:
        import importlib
        sys.path.append(os.path.abspath(""))
        module_name, entrance_name = options.shell.rsplit(".", 1)
        module_instance = importlib.import_module(module_name)
        entrance = module_instance.__getattribute__(entrance_name)

    robot, device = connect_robot_helper(options.target, options.clientkey)

    if options.shell == "ipython":
        logger.info("----> READY")
        logger.info("""
      * Hint: Try 'robot?' and 'dir(robot)' to get more informations)\n""")
        IPython.embed()
        return 0
    else:
        return entrance(robot, device)
Example #28
0
    def do_merge(self):
        import cv2
        import numpy as np

        images = [cv2.imread(path.join(BUILD_FULL_PATH, graph)) for graph in self.graphs]
        shape = self.rows, self.cols
        try:
            widths = np.array([img.shape[1] for img in images]).reshape(shape).max(axis=0)
            heights = np.array([img.shape[0] for img in images]).reshape(shape).max(axis=1)
        except AttributeError:
            import IPython
            IPython.embed()
        width = widths.sum()
        height = heights.sum()

        merge_img = np.zeros((height, width, 3), np.uint8)

        cumsum_width = np.r_[0, np.cumsum(widths)]
        cumsum_height = np.r_[0, np.cumsum(heights)]

        for i, img in enumerate(images):
            row = i // self.cols
            col = i % self.cols
            y = cumsum_height[row]
            x = cumsum_width[col]
            h, w = img.shape[:2]
            merge_img[y:y+h, x:x+w] = img

        cv2.imwrite(path.join(BUILD_FULL_PATH, self.graphs[-1].replace(".png", ".merge.png")), merge_img)
Example #29
0
def expand_mlb_data(infile_data, pitcher=False, live=False):
  new_feature_dataframes = [infile_data]
  expanded_columns = []
  for feature_name, (func, columns) in get_expansion_targets(pitcher=pitcher, expanding_live=live):
    with Timer() as t:
      print 'Expanding', feature_name, '(' + ', '.join(columns) + ')...'
      raw_data = [func(row) for index, row in infile_data.iterrows()]
      raw_columns = encode_names(feature_name, columns)
      expanded_columns += raw_columns
      try:
        new_feature_data = pd.DataFrame(raw_data,
                                      index=infile_data.index,
                                      columns=raw_columns)
      except AssertionError as ex:
        print 'Debugging assertion error -- probably no data for some featurizer was loaded. ' \
              'I suspect Numberfire scraping needs to happen!'
        import IPython
        IPython.embed()
      except TypeError as ex:
        print 'Debugging type error'
        import IPython
        IPython.embed()

      new_feature_dataframes.append(new_feature_data)
    print '  took %d seconds' % t.elapsed
  expanded_data = pd.concat(new_feature_dataframes, axis=1)
  # After doing all of that concatenation the index is super weird so just reset it
  expanded_data.reset_index(drop=True, inplace=True)
  # Transform categorical variables to indicator variables -- but only for expanded discrete columns.
  # May need to tweak how this list is generated in the future.
  categorical_cols = [c for c in expanded_columns if expanded_data[c].dtype.name == 'object']
  expanded_discretized = pd.get_dummies(expanded_data, prefix_sep='=', columns=categorical_cols)
  return expanded_discretized
Example #30
0
def some_func(x, y):

	try:
		return x / y
	except:
		IPython.embed()
		raise
Example #31
0
def ipython_kw_matches(text):
    """Match named ITK object's named parameters"""
    import IPython
    import itk
    import re
    import inspect
    import itkTemplate
    regexp = re.compile(r'''
                    '.*?' |  # single quoted strings or
                    ".*?" |  # double quoted strings or
                    \w+     |  # identifier
                    \S  # other characters
                    ''', re.VERBOSE | re.DOTALL)
    ip = IPython.get_ipython()
    if "." in text:  # a parameter cannot be dotted
        return []
    # 1. Find the nearest identifier that comes before an unclosed
    # parenthesis e.g. for "foo (1+bar(x), pa", the candidate is "foo".
    if ip.Completer.readline:
        textUntilCursor = ip.Completer.readline.get_line_buffer()[:ip.Completer.readline.get_endidx()]
    else:
        # IPython >= 5.0.0, which is based on the Python Prompt Toolkit
        textUntilCursor = ip.Completer.text_until_cursor

    tokens = regexp.findall(textUntilCursor)
    tokens.reverse()
    iterTokens = iter(tokens)
    openPar = 0
    for token in iterTokens:
        if token == ')':
            openPar -= 1
        elif token == '(':
            openPar += 1
            if openPar > 0:
                # found the last unclosed parenthesis
                break
    else:
        return []
    # 2. Concatenate dotted names ("foo.bar" for "foo.bar(x, pa" )
    ids = []
    isId = re.compile(r'\w+$').match
    while True:
        try:
            ids.append(iterTokens.next())
            if not isId(ids[-1]):
                ids.pop()
                break
            if not iterTokens.next() == '.':
                break
        except StopIteration:
            break
    # lookup the candidate callable matches either using global_matches
    # or attr_matches for dotted names
    if len(ids) == 1:
        callableMatches = ip.Completer.global_matches(ids[0])
    else:
        callableMatches = ip.Completer.attr_matches('.'.join(ids[::-1]))
    argMatches = []
    for callableMatch in callableMatches:
        # drop the .New at this end, so we can search in the class members
        if callableMatch.endswith(".New"):
            callableMatch = callableMatch[:-4]
        try:
            object = eval(callableMatch, ip.Completer.namespace)
            if isinstance(object, itkTemplate.itkTemplate):
                # this is a template - lets grab the first entry to search for
                # the methods
                object = object.values()[0]
            namedArgs = []
            isin = isinstance(object, itk.LightObject)
            if inspect.isclass(object):
                issub = issubclass(object, itk.LightObject)
            if isin or (inspect.isclass(object) and issub):
                namedArgs = [n[3:] for n in dir(object) if n.startswith("Set")]
        except Exception as e:
            print(e)
            continue
        for namedArg in namedArgs:
            if namedArg.startswith(text):
                argMatches.append(u"%s=" % namedArg)
    return argMatches
Example #32
0
#!venv/bin/python
from miniblog.utils import set_db_env_var
import IPython
from miniblog import app
from miniblog import db


IPython.embed()
Example #33
0
def main():
    """Main function"""

    # Parse the command line
    args = parse_args()
    # Initialize MPI
    rank, n_ranks = init_workers(args.distributed)

    # Load configuration
    config = load_config(args.config)
    output_dir = os.path.expandvars(config.get('output_dir', None))
    if rank == 0:
        os.makedirs(output_dir, exist_ok=True)
    else:
        output_dir = None

    # Setup logging
    config_logging(verbose=args.verbose, output_dir=output_dir)
    logging.info('Initialized rank %i out of %i', rank, n_ranks)
    if args.show_config and (rank == 0):
        logging.info('Command line config: %s' % args)
    if rank == 0:
        logging.info('Configuration: %s', config)
        logging.info('Saving job outputs to %s', output_dir)

    # Load the datasets
    train_data_loader, valid_data_loader = get_data_loaders(
        distributed=args.distributed, **config['data'])
    logging.info('Loaded %g training samples', len(train_data_loader.dataset))
    if valid_data_loader is not None:
        logging.info('Loaded %g validation samples',
                     len(valid_data_loader.dataset))

    # Load the trainer
    trainer = get_trainer(distributed=args.distributed,
                          output_dir=output_dir,
                          device=args.device,
                          **config['trainer'])
    # Build the model and optimizer
    trainer.build_model(**config.get('model', {}))
    if rank == 0:
        trainer.print_model_summary()

    # Run the training
    summary = trainer.train(train_data_loader=train_data_loader,
                            valid_data_loader=valid_data_loader,
                            **config['training'])
    if rank == 0:
        trainer.write_summaries()

    # Print some conclusions
    n_train_samples = len(train_data_loader.sampler)
    logging.info('Finished training')
    train_time = np.mean(summary['train_time'])
    logging.info('Train samples %g time %g s rate %g samples/s',
                 n_train_samples, train_time, n_train_samples / train_time)
    if valid_data_loader is not None:
        n_valid_samples = len(valid_data_loader.sampler)
        valid_time = np.mean(summary['valid_time'])
        logging.info('Valid samples %g time %g s rate %g samples/s',
                     n_valid_samples, valid_time, n_valid_samples / valid_time)

    # Drop to IPython interactive shell
    if args.interactive and (rank == 0):
        logging.info('Starting IPython interactive session')
        import IPython
        IPython.embed()

    if rank == 0:
        logging.info('All done!')
Example #34
0
def log(msg):
    print 'RECIEVE:', msg['FromUserName'], 'TYPE:', msg[
        'Type'], 'CONTENT:', msg['Content']
    if msg['Type'] == 'Init':
        return
    IPython.embed()
    def render_msg(self, msg):
        """ render a message, return the html block"""
        # TODO for chatroom, add nickname on avatar
        sender = u'you ' + msg.talker if not msg.isSend else 'me'
        format_dict = {'sender_label': sender,
                       'time': msg.createTime }
        if(not msg.isSend and msg.is_chatroom()):
            format_dict['nickname'] = '>\n       <pre align=\'left\'>'+msg.talker_nickname+'</pre'
        else:
            format_dict['nickname'] = ' '

        def fallback():
            template = TEMPLATES[TYPE_MSG]
            content = msg.msg_str()
            format_dict['content'] = self.smiley.replace_smileycode(content)
            return template.format(**format_dict)

        template = TEMPLATES.get(msg.type)
        if msg.type == TYPE_SPEAK:
            audio_str, duration = self.res.get_voice_mp3(msg.imgPath)
            format_dict['voice_duration'] = duration
            format_dict['voice_str'] = audio_str
            return template.format(**format_dict)
        elif msg.type == TYPE_IMG:
            # imgPath was original THUMBNAIL_DIRPATH://th_xxxxxxxxx
            imgpath = msg.imgPath.split('_')[-1]
            if not imgpath:
                logger.warn('No imgpath in an image message. Perhaps a bug in wechat.')
                return fallback()
            bigimgpath = self.parser.imginfo.get(msg.msgSvrId)
            fnames = [k for k in [imgpath, bigimgpath] if k]
            img = self.res.get_img(fnames)
            if not img:
                logger.warn("No image thumbnail found for {}".format(imgpath))
                return fallback()
            # TODO do not show fancybox when no bigimg found
            format_dict['img'] = (img, 'jpeg')
            return template.format(**format_dict)
        elif msg.type == TYPE_EMOJI or msg.type == TYPE_CUSTOM_EMOJI:
            if 'emoticonmd5' in msg.content:
                pq = PyQuery(msg.content)
                md5 = pq('emoticonmd5').text()
            else:
                md5 = msg.imgPath
                # TODO md5 could exist in both.
                # first is emoji md5, second is image2/ md5
                # can use fallback here.
            if md5:
                emoji_img, format = self.res.get_emoji_by_md5(md5)
                format_dict['emoji_format'] = format
                format_dict['emoji_img'] = emoji_img
            else:
                import IPython as IP; IP.embed()
            return template.format(**format_dict)
        elif msg.type == TYPE_LINK:
            content = msg.msg_str()
            # TODO show a short link with long href, if link too long
            if content.startswith(u'URL:'):
                url = content[4:]
                content = u'URL:<a target="_blank" href="{0}">{0}</a>'.format(url)
                format_dict['content'] = content
                return template.format(**format_dict)
        elif msg.type == TYPE_VIDEO_FILE:
            video = self.res.get_video(msg.imgPath)
            if video.endswith(".mp4"):
                video_str = get_file_b64(video)
                format_dict["video_str"] = video_str
                return template.format(**format_dict)
            elif video.endswith(".jpg"):
                # only has thumbnail
                image_str = get_file_b64(video)
                format_dict["img"] = (image_str, 'jpeg')
                return TEMPLATES[TYPE_IMG].format(**format_dict)
            # fallback
            format_dict['content'] = f"VIDEO FILE {msg.imgPath}"
            return TEMPLATES_FILES[TYPE_MSG].format(**format_dict)
        elif msg.type == TYPE_WX_VIDEO:
            # TODO: fetch video from resource
            return fallback()
        return fallback()
Example #36
0
if __name__ == "__main__":
    interactive = '-ni' not in sys.argv
    figpath = "wrench_cone.pdf"
    cone = pyfme.Cone(T, M)
    dag = pyfme.ReductionDAG(cone)
    dag.savefig(figpath)
    if interactive:
        print "First, open the file '%s' in your favorite PDF viewer." % figpath
        raw_input("Then, press [Enter] ")

    pivot_seq = [(0, 0), (1, 0), (2, 0), (3, 0), (4, 0), (5, 0)]
    for (node_id, pivot_col) in pivot_seq:
        if interactive:
            print "\nMake sure you refresh '%s' in your PDF viewer." % figpath
            raw_input("Press [Enter] to call pivot(%d, %d) " % (
                node_id, pivot_col))
        dag.pivot(node_id, pivot_col)
        dag.savefig(figpath)

    T = dag.nodes[-1].cone.get_matrix()
    assert T.shape[1] == 6  # output is a 6D wrench

    print "\nCalculations complete!"
    print "The resulting wrench cone has %d inequalities." % T.shape[0]
    print "Its formula is given by:"
    print repr(T)

    if IPython.get_ipython() is None:
        IPython.embed()
Example #37
0
    def __init__(self, imageOrFilter, Label=False, Title=None):
        import tempfile
        import itk
        import os
        import platform
        # get some data from the environment
        command = os.environ.get("WRAPITK_SHOW2D_COMMAND")
        if command is None:
            if platform.system() == "Darwin":
                command = (
                    "open -a ImageJ -n --args -eval 'open(\"%(image)s\"); "
                    "run (\"View 100%%\"); rename(\"%(title)s\");'")
            else:
                command = (
                    "imagej %(image)s -run 'View 100%%' -eval "
                    "'rename(\"%(title)s\")' &")

        label_command = os.environ.get("WRAPITK_SHOW2D_LABEL_COMMAND")
        if label_command is None:
            if platform.system() == "Darwin":
                label_command = (
                    "open -a ImageJ -n --args -eval 'open(\"%(image)s\"); "
                    "run (\"View 100%%\"); rename(\"%(title)s\"); "
                    "run(\"3-3-2 RGB\");'")
            else:
                label_command = (
                    "imagej %(image)s -run 'View 100%%' -eval "
                    "'rename(\"%(title)s\")' -run '3-3-2 RGB' &")

        compress = os.environ.get(
            "WRAPITK_SHOW2D_COMPRESS",
            "true").lower() in ["on", "true", "yes", "1"]
        extension = os.environ.get("WRAPITK_SHOW2D_EXTENSION", ".tif")

        # use the tempfile module to get a non used file name and to put
        # the file at the rignt place
        self.__tmpFile__ = tempfile.NamedTemporaryFile(suffix=extension)
        # get an updated image
        img = output(imageOrFilter)
        img.UpdateOutputInformation()
        img.Update()
        if Title is None:
            # try to generate a title
            s = img.GetSource()
            if s:
                s = itk.down_cast(s)
                if hasattr(img, "GetSourceOutputIndex"):
                    o = '[%s]' % img.GetSourceOutputIndex()
                elif hasattr(img, "GetSourceOutputName"):
                    o = '[%s]' % img.GetSourceOutputName()
                else:
                    o = ""
                Title = "%s%s" % (s.__class__.__name__, o)
            else:
                Title = img.__class__.__name__
            try:
                import IPython
                ip = IPython.get_ipython()
                if ip is not None:
                    names = []
                    ref = imageOrFilter
                    if s:
                        ref = s
                    for n, v in ip.user_ns.iteritems():
                        if isinstance(v, itk.LightObject) and v == ref:
                            names.append(n)
                    if names != []:
                        Title = ", ".join(names) + " - " + Title
            except ImportError:
                # just do nothing
                pass
        # change the LabelMaps to an Image, so we can look at them easily
        if 'LabelMap' in dir(itk) and img.GetNameOfClass() == 'LabelMap':
            # retreive the biggest label in the label map
            maxLabel = img.GetNthLabelObject(
                img.GetNumberOfLabelObjects() - 1).GetLabel()
            # search for a filter to convert the label map
            lab = itk.LabelMapToLabelImageFilter.keys()
            maxVal = itk.NumericTraits[itk.template(params[1])[1][0]].max()
            cond = params[0] == class_(img) and maxVal >= maxLabel
            label_image_type = sorted([params[1] for params in lab if cond])[0]
            convert = itk.LabelMapToLabelImageFilter[
                img, label_image_type].New(img)
            convert.Update()
            img = convert.GetOutput()
            # this is a label image - force the parameter
            Label = True
        write(img, self.__tmpFile__.name, compress)
        # now run imview
        import os
        if Label:
            os.system(
                label_command %
                {"image": self.__tmpFile__.name, "title": Title})
        else:
            os.system(
                command %
                {"image": self.__tmpFile__.name, "title": Title})
    def __init__(self,
                 args=None,
                 description=None,
                 epilog=None,
                 examples=None,
                 usage=None,
                 interspersed=False,
                 nr=None,
                 changeVersion=True,
                 exactNr=True,
                 subcommands=None,
                 inputApp=None,
                 localConfigurationFile=None,
                 findLocalConfigurationFile=None,
                 allowCurses=True,
                 **kwArgs):
        """
        :param description: description of the command
        :param epilog: text to be printed after the options-help
        :param examples: usage examples to be printed after the epilog
        :param usage: Usage
        :param interspersed: Is the command line allowed to be interspersed (options after the arguments)
        :param args: Command line arguments when using the Application as a 'class' from a script
        :param nr: Number of required arguments
        :param changeVersion: May this application change the version of OF used?
        :param exactNr: Must not have more than the required number of arguments
        :param subcommands: parse and use subcommands from the command line. Either True or a list with subcommands
        :param inputApp: Application with input data. Used to allow a 'pipe-like' behaviour if the class is used from a Script
        :param localConfigurationFile: Use this file (or list of files) as a local configuration
        :param findLocalConfigurationFile: Method to find a configuration file BEFORE the actual parameters are parsed
        :param allowCurses: This application can wrap the output in a curses-window
        """

        global _LocalConfigurationFile

        self.allowCurses = allowCurses
        self.cursesWindow = None

        if _LocalConfigurationFile is not None:
            configuration().addFile(_LocalConfigurationFile)

        if isinstance(localConfigurationFile, string_types):
            configuration().addFile(localConfigurationFile)
        elif localConfigurationFile is not None:
            for c in localConfigurationFile:
                configuration().addFile(c)

        if subcommands:
            self.subs = True
            if interspersed:
                self.error(
                    "Subcommand parser does not work with 'interspersed'")
            if subcommands == True:
                subcommands = []
            self.parser = SubcommandFoamOptionParser(args=args,
                                                     description=description,
                                                     epilog=epilog,
                                                     examples=examples,
                                                     usage=usage,
                                                     subcommands=subcommands)
            nr = None
            exactNr = False
        else:
            self.subs = False
            self.parser = FoamOptionParser(args=args,
                                           description=description,
                                           epilog=epilog,
                                           examples=examples,
                                           usage=usage,
                                           interspersed=interspersed)

        try:
            self.calledName = sys.argv[0]
        except AttributeError:
            self.calledName = "unknown"

        self.calledAsClass = (args != None)
        if self.calledAsClass:
            try:
                self.calledName = self.__class__.__name__ + " used by " + sys.argv[
                    0]
            except AttributeError:
                self.calledName = self.__class__.__name__ + " used by unknown program"
            self.parser.prog = self.calledName
        elif not _LocalConfigurationFile and findLocalConfigurationFile:
            if args:
                usedArgs = args
            else:
                try:
                    usedArgs = sys.argv[1:]
                except AttributeError:
                    usedArgs = []
            _LocalConfigurationFile = findLocalConfigurationFile(usedArgs)
            if _LocalConfigurationFile and not path.exists(
                    _LocalConfigurationFile):
                # Fix functions that do not check for the existence
                _LocalConfigurationFile = None
            if _LocalConfigurationFile:
                configuration().addFile(_LocalConfigurationFile)

        self.generalOpts = None

        self.__appData = self.iDict()
        if inputApp:
            self.__appData["inputData"] = inputApp.getData()

        grp = OptionGroup(self.parser, "Default",
                          "Options common to all PyFoam-applications")

        if changeVersion:
            # the options are evaluated in Basics.FoamOptionParser
            grp.add_option(
                "--foamVersion",
                dest="foamVersion",
                default=None,
                help=
                "Change the OpenFOAM-version that is to be used. To get a list of know Foam-versions use the pyFoamVersion.py-utility"
            )
            if "WM_PROJECT_VERSION" in environ:
                grp.add_option("--currentFoamVersion",
                               dest="foamVersion",
                               const=environ["WM_PROJECT_VERSION"],
                               default=None,
                               action="store_const",
                               help="Use the current OpenFOAM-version " +
                               environ["WM_PROJECT_VERSION"])

            grp.add_option(
                "--force-32bit",
                dest="force32",
                default=False,
                action="store_true",
                help=
                "Forces the usage of a 32-bit-version if that version exists as 32 and 64 bit. Only used when --foamVersion is used"
            )
            grp.add_option(
                "--force-64bit",
                dest="force64",
                default=False,
                action="store_true",
                help=
                "Forces the usage of a 64-bit-version if that version exists as 32 and 64 bit. Only used when --foamVersion is used"
            )
            grp.add_option(
                "--force-debug",
                dest="compileOption",
                const="Debug",
                default=None,
                action="store_const",
                help=
                "Forces the value Debug for the WM_COMPILE_OPTION. Only used when --foamVersion is used"
            )
            grp.add_option(
                "--force-opt",
                dest="compileOption",
                const="Opt",
                default=None,
                action="store_const",
                help=
                "Forces the value Opt for the WM_COMPILE_OPTION. Only used when --foamVersion is used"
            )
            grp.add_option(
                "--force-system-compiler",
                dest="foamCompiler",
                const="system",
                default=None,
                action="store_const",
                help=
                "Force using a 'system' compiler (compiler installed in the system)"
            )
            grp.add_option(
                "--force-openfoam-compiler",
                dest="foamCompiler",
                const="OpenFOAM",
                default=None,
                action="store_const",
                help=
                "Force using a 'OpenFOAM' compiler (compiler installed in ThirdParty)"
            )
            grp.add_option(
                "--force-compiler",
                dest="wmCompiler",
                default=None,
                action="store",
                help="Overwrite value for WM_COMPILER (for instance Gcc47 ...)"
            )

        grp.add_option(
            "--psyco-accelerated",
            dest="psyco",
            default=False,
            action="store_true",
            help=
            "Accelerate the script using the psyco-library (EXPERIMENTAL and requires a separatly installed psyco)"
        )
        grp.add_option(
            "--profile-python",
            dest="profilePython",
            default=False,
            action="store_true",
            help=
            "Profile the python-script (not the OpenFOAM-program) - mostly of use for developers"
        )
        grp.add_option(
            "--profile-cpython",
            dest="profileCPython",
            default=False,
            action="store_true",
            help=
            "Profile the python-script (not the OpenFOAM-program) using the better cProfile library - mostly of use for developers"
        )
        grp.add_option(
            "--profile-hotshot",
            dest="profileHotshot",
            default=False,
            action="store_true",
            help=
            "Profile the python-script using the hotshot-library (not the OpenFOAM-program) - mostly of use for developers - DEPRECATED as this library will by removed from standard python and is no longer supported"
        )
        grp.add_option(
            "--profile-line-profiler",
            dest="profileLineProfiler",
            default=False,
            action="store_true",
            help=
            "Profile the python-script using the line_profiler-library (not the OpenFOAM-program) - mostly of use for developers - EXPERIMENTAL"
        )

        dbg = OptionGroup(
            self.parser, "Debugging",
            "Options mainly used for debugging PyFoam-Utilities")

        dbg.add_option(
            "--location-of-local-config",
            dest="locationOfLocalConfig",
            default=False,
            action="store_true",
            help=
            "Prints the location of the found LocalConfigPyFoam-file that is used (if any)"
        )
        dbg.add_option(
            "--traceback-on-error",
            dest="traceback",
            default=False,
            action="store_true",
            help=
            "Prints a traceback when an error is encountered (for debugging)")
        dbg.add_option(
            "--interactive-debugger",
            dest="interactiveDebug",
            default=False,
            action="store_true",
            help=
            "In case of an exception start the interactive debugger PDB. Also implies --traceback-on-error"
        )
        dbg.add_option(
            "--catch-USR1-signal",
            dest="catchUSR1Signal",
            default=False,
            action="store_true",
            help=
            "If the USR1-signal is sent to the application with 'kill -USR1 <pid>' the application ens and prints a traceback. If interactive debugging is enabled then the debugger is entered. Use to investigate hangups"
        )
        dbg.add_option("--also-catch-TERM-signal",
                       dest="alsoCatchTERMsignal",
                       default=False,
                       action="store_true",
                       help="In addition to USR1 catch the regular TERM-kill")
        dbg.add_option(
            "--keyboard-interrupt-trace",
            dest="keyboardInterrupTrace",
            default=False,
            action="store_true",
            help=
            "Make the application behave like with --catch-USR1-signal if <Ctrl>-C is pressed"
        )
        dbg.add_option(
            "--syntax-error-debugger",
            dest="syntaxErrorDebugger",
            default=False,
            action="store_true",
            help=
            "Only makes sense with --interactive-debugger: Do interactive debugging even when a syntax error was encountered"
        )
        dbg.add_option(
            "--i-am-a-developer",
            dest="developerMode",
            default=False,
            action="store_true",
            help=
            "Switch on all of the above options. Usually this makes only sense if you're developing PyFoam'"
        )
        dbg.add_option(
            "--interactive-after-execution",
            dest="interacticeAfterExecution",
            default=False,
            action="store_true",
            help=
            "Instead of ending execution drop to an interactive shell (which is IPython if possible)"
        )

        grp.add_option(
            "--dump-application-data",
            dest="dumpAppData",
            default=False,
            action="store_true",
            help=
            "Print the dictionary with the generated application data after running"
        )
        grp.add_option("--pickle-application-data",
                       dest="pickleApplicationData",
                       default=None,
                       action="store",
                       type="string",
                       help="""\
Write a pickled version of the application data to a file. If the
filename given is 'stdout' then the pickled data is written to
stdout. The usual standard output is then captured and added to the
application data as an entry 'stdout' (same for 'stderr'). Be careful
with these option for commands that generate a lot of output""")

        self.parser.add_option_group(grp)
        self.parser.add_option_group(dbg)

        if self.allowCurses:
            crs = OptionGroup(
                self.parser, "Curses",
                "Wrap and color output using the curses-library")
            crs.add_option("--curses-wrap",
                           dest="cursesWrap",
                           default=False,
                           action="store_true",
                           help="Switch on curses wrapping (if possible)")
            crs.add_option(
                "--output-buffer-curses",
                dest="outputBufferCurses",
                default=2000,
                action="store",
                type="int",
                help=
                "Number of lines that the curses buffer should store. Default: %default"
            )
            crs.add_option(
                "--sleep-time-end-curses",
                dest="sleepTimeEndCurses",
                default=0,
                action="store",
                type="int",
                help=
                "Number of seconds to sleep before dropping back onto the regular terminal when the command ended. Default: %default"
            )
            crs.add_option(
                "--no-powerline-font",
                dest="powerlineFont",
                default=True,
                action="store_false",
                help=
                "The current terminal does not use a powerline-font and therefor the delimiters look weird (Powerline only works with Python3 because it needs native Unicode-support)"
            )

            self.parser.add_option_group(crs)

        self.addOptions()
        self.parser.parse(nr=nr, exactNr=exactNr)
        ensureDynamicLibraries()
        if len(kwArgs) > 0:
            self.parser.processKeywordArguments(kwArgs)
        self.opts = self.parser.getOptions()
        if self.subs:
            self.cmdname = self.parser.cmdname

        if self.opts.locationOfLocalConfig:
            if _LocalConfigurationFile:
                print_("Local configuration found at", _LocalConfigurationFile)
            else:
                print_("No LocalConfigPyFoam-file found")

        if "WM_PROJECT_VERSION" not in environ:
            warning(
                "$WM_PROJECT_VERSION unset. PyFoam will not be able to determine the OpenFOAM-version and behave strangely"
            )
        if self.opts.developerMode:
            self.opts.syntaxErrorDebugger = True
            self.opts.keyboardInterrupTrace = True
            self.opts.alsoCatchTERMsignal = True
            self.opts.catchUSR1Signal = True
            self.opts.interactiveDebug = True
            self.opts.traceback = True

        if self.opts.interactiveDebug:
            sys.excepthook = lambda a1, a2, a3: pyFoamExceptionHook(
                a1, a2, a3, debugOnSyntaxError=self.opts.syntaxErrorDebugger)
            self.opts.traceback = True
        if self.opts.catchUSR1Signal:
            import signal
            signal.signal(signal.SIGUSR1, pyFoamSIG1HandlerPrintStack)
            if self.opts.alsoCatchTERMsignal:
                signal.signal(signal.SIGTERM, pyFoamSIG1HandlerPrintStack)
            self.opts.traceback = True

        if self.opts.keyboardInterrupTrace:
            import signal
            signal.signal(signal.SIGINT, pyFoamSIG1HandlerPrintStack)
            self.opts.traceback = True

        if self.opts.psyco:
            try:
                import psyco
                psyco.full()
            except ImportError:
                warning("No psyco installed. Continuing without acceleration")
        profOptions = sum([
            self.opts.profilePython, self.opts.profileCPython,
            self.opts.profileHotshot, self.opts.profileLineProfiler
        ])
        if profOptions > 0:
            if profOptions > 1:
                self.error(
                    "Only one profiling option can be specified at a time")
            print_("Running profiled")
            fnAdd = ""
            if self.opts.profilePython:
                import profile
            elif self.opts.profileCPython:
                import cProfile as profile
            elif self.opts.profileLineProfiler:
                import line_profiler
                profile = line_profiler.LineProfiler(self.run)
                import PyFoam.RunDictionary.SolutionDirectory
                profile.add_module(PyFoam.RunDictionary.SolutionDirectory)
                fnAdd = ".lineProfiler"
            else:
                import hotshot
            profileData = path.basename(sys.argv[0]) + fnAdd + ".profile"
            if self.opts.profilePython or self.opts.profileCPython:
                profile.runctx('self.run()', None, {'self': self}, profileData)
                print_("Reading python profile")
                import pstats
                stats = pstats.Stats(profileData)
            elif self.opts.profileLineProfiler:
                import inspect
                nr = profile.add_module(inspect.getmodule(self))
                self.warning("Adding", nr, "functions for line-profiling")
                profile.runctx('self.run()', None, {'self': self})
                profile.dump_stats(profileData)
                profile.print_stats(open(profileData + ".printed", "w"))
                stats = None
            else:
                profileData += ".hotshot"
                prof = hotshot.Profile(profileData)
                prof.runctx('self.run()', {}, {'self': self})
                print_("Writing and reading hotshot profile")
                prof.close()
                import hotshot.stats
                stats = hotshot.stats.load(profileData)
            if stats:
                stats.strip_dirs()
                stats.sort_stats('time', 'calls')
                stats.print_stats(20)

            self.parser.restoreEnvironment()
        else:
            try:
                doCurses = False
                if self.allowCurses:
                    if self.opts.cursesWrap:
                        if not sys.__stdout__.isatty():
                            self.warning(
                                "Stdout is not a terminal. Not using curses for output wrapping"
                            )
                        elif getattr(self.opts, "progress", False):
                            self.warning("Not using curses with progress")
                        elif getattr(self.opts, "silent", False):
                            self.warning("Not using curses with silent")
                        elif not hasCurses:
                            self.warning("Python has no curses library")
                        else:
                            doCurses = True

                if self.opts.pickleApplicationData == "stdout":
                    # Redirect output to memory
                    from PyFoam.ThirdParty.six.moves import StringIO

                    oldStdout = sys.stdout
                    oldStderr = sys.stderr
                    sys.stdout = StringIO()
                    sys.stderr = StringIO()
                    doCurses = False

                if doCurses:
                    result = cursesWrap(
                        self,
                        bufflen=self.opts.outputBufferCurses,
                        powerline=self.opts.powerlineFont,
                        endSleepTime=self.opts.sleepTimeEndCurses)
                else:
                    result = self.run()

                # do this at the earliest possible moment
                self.parser.restoreEnvironment()

                if self.opts.pickleApplicationData == "stdout":
                    # restore stdout
                    self.__appData["stdout"] = sys.stdout.getvalue()
                    self.__appData["stderr"] = sys.stderr.getvalue()
                    sys.stdout = oldStdout
                    sys.stderr = oldStderr

                if self.opts.pickleApplicationData:
                    from PyFoam.ThirdParty.six.moves import cPickle as pickle
                    if self.opts.pickleApplicationData == "stdout":
                        pick = pickle.Pickler(sys.stdout)
                    else:
                        pick = pickle.Pickler(
                            open(self.opts.pickleApplicationData, 'wb'))
                    pick.dump(self.__appData)
                    del pick
                if self.opts.dumpAppData:
                    import pprint
                    print_("Application data:")
                    printer = pprint.PrettyPrinter()
                    printer.pprint(self.__appData)

                if self.opts.interacticeAfterExecution:
                    print_("\nDropping to interactive shell ... ", end="")
                    ns = {}
                    ns.update(locals())
                    ns.update(globals())
                    try:
                        import IPython
                        print_("found IPython ...", end="")
                        if "embed" in dir(IPython):
                            print_("up-to-date IPython\n")
                            IPython.embed(user_ns=ns)
                        else:
                            print_("old-school IPython\n")
                            IPython.Shell.IPythonShellEmbed(argv="",
                                                            user_ns=ns)()

                    except ImportError:
                        print_("no IPython -> regular shell\n")
                        from code import InteractiveConsole
                        c = InteractiveConsole(ns)
                        c.interact()
                    print_("\nEnding interactive shell\n")
                return result
            except PyFoamException:
                e = sys.exc_info()[1]
                if self.opts.traceback or self.calledAsClass:
                    raise
                else:
                    self.errorPrint(str(e))
Example #39
0
    elif hash_algo == "sha1":
        return sha1(rand_str).hexdigest()
    elif hash_algo == "sha224":
        return sha224(rand_str).hexdigest()
    elif hash_algo == "sha256":
        return sha256(rand_str).hexdigest()
    elif hash_algo == "sha384":
        return sha384(rand_str).hexdigest()
    else:
        return sha512(rand_str).hexdigest()

sockobj = socket(AF_INET, SOCK_STREAM)
sockobj.connect((serverHost, serverPort)) 

# First response with greetings
data = sockobj.recv(1024).strip()
print data
hash_algo = re.search(pattern, data.split('\r\n')[-1]).group('hash')
rand_str = re.search(pattern, data.split('\r\n')[-1]).group('string')
sockobj.sendall(get_hash(hash_algo, rand_str) + '\r\n')

while True:
    data = sockobj.recv(1024).strip()
    print data
    try:
        hash_algo = re.search(pattern, data).group('hash')
        rand_str = re.search(pattern, data).group('string')
        sockobj.sendall(get_hash(hash_algo, rand_str) + '\r\n')
    except Exception:
        import IPython; IPython.embed()
    
Example #40
0
            if isinstance(object, itkTemplate.itkTemplate):
                # this is a template - lets grab the first entry to search for
                # the methods
                object = object.values()[0]
            namedArgs = []
            isin = isinstance(object, itk.LightObject)
            if inspect.isclass(object):
                issub = issubclass(object, itk.LightObject)
            if isin or (inspect.isclass(object) and issub):
                namedArgs = [n[3:] for n in dir(object) if n.startswith("Set")]
        except Exception as e:
            print(e)
            continue
        for namedArg in namedArgs:
            if namedArg.startswith(text):
                argMatches.append(u"%s=" % namedArg)
    return argMatches

# install progress callback and custom completer if we are in ipython
# interpreter
try:
    import itkConfig
    import IPython
    if IPython.get_ipython():
        IPython.get_ipython().Completer.matchers.insert(0, ipython_kw_matches)
    # some cleanup
    del itkConfig, IPython
except (ImportError, AttributeError):
    # fail silently
    pass
def main():
    logging.info("dkm_api_sender_uart start")
    IPython.embed()
Example #42
0
def main(argv):
    parser = ArgumentParser(usage=__doc__.lstrip())
    parser.add_argument("--verbose",
                        "-v",
                        action="count",
                        default=1,
                        help="more verbosity")
    parser.add_argument(
        "--no-build",
        "-n",
        action="store_true",
        default=False,
        help="do not build the project (use system installed version)")
    parser.add_argument("--build-only",
                        "-b",
                        action="store_true",
                        default=False,
                        help="just build, do not run any tests")
    parser.add_argument("--doctests",
                        action="store_true",
                        default=False,
                        help="Run doctests in module")
    parser.add_argument(
        "--coverage",
        action="store_true",
        default=False,
        help=("report coverage of project code. HTML output goes "
              "under build/coverage"))
    parser.add_argument(
        "--gcov",
        action="store_true",
        default=False,
        help=("enable C code coverage via gcov (requires GCC). "
              "gcov output goes to build/**/*.gc*"))
    parser.add_argument("--lcov-html",
                        action="store_true",
                        default=False,
                        help=("produce HTML for C code coverage information "
                              "from a previous run with --gcov. "
                              "HTML output goes to build/lcov/"))
    parser.add_argument("--mode",
                        "-m",
                        default="fast",
                        help="'fast', 'full', or something that could be "
                        "passed to nosetests -A [default: fast]")
    parser.add_argument(
        "--submodule",
        "-s",
        default=None,
        help="Submodule whose tests to run (cluster, constants, ...)")
    parser.add_argument("--pythonpath",
                        "-p",
                        default=None,
                        help="Paths to prepend to PYTHONPATH")
    parser.add_argument("--tests",
                        "-t",
                        action='append',
                        help="Specify tests to run")
    parser.add_argument("--python",
                        action="store_true",
                        help="Start a Python shell with PYTHONPATH set")
    parser.add_argument("--ipython",
                        "-i",
                        action="store_true",
                        help="Start IPython shell with PYTHONPATH set")
    parser.add_argument("--shell",
                        action="store_true",
                        help="Start Unix shell with PYTHONPATH set")
    parser.add_argument("--debug",
                        "-g",
                        action="store_true",
                        help="Debug build")
    parser.add_argument("--parallel",
                        "-j",
                        type=int,
                        default=0,
                        help="Number of parallel jobs during build")
    parser.add_argument("--show-build-log",
                        action="store_true",
                        help="Show build output rather than using a log file")
    parser.add_argument("--bench",
                        action="store_true",
                        help="Run benchmark suite instead of test suite")
    parser.add_argument(
        "--bench-compare",
        action="store",
        metavar="COMMIT",
        help=("Compare benchmark results to COMMIT. "
              "Note that you need to commit your changes first!"))
    parser.add_argument("--raise-warnings",
                        default=None,
                        type=str,
                        choices=('develop', 'release'),
                        help="if 'develop', warnings are treated as errors")
    parser.add_argument("args",
                        metavar="ARGS",
                        default=[],
                        nargs=REMAINDER,
                        help="Arguments to pass to Nose, Python or shell")
    args = parser.parse_args(argv)

    if args.bench_compare:
        args.bench = True
        args.no_build = True  # ASV does the building

    if args.lcov_html:
        # generate C code coverage output
        lcov_generate()
        sys.exit(0)

    if args.pythonpath:
        for p in reversed(args.pythonpath.split(os.pathsep)):
            sys.path.insert(0, p)

    if args.gcov:
        gcov_reset_counters()

    if args.debug and args.bench:
        print("*** Benchmarks should not be run against debug "
              "version; remove -g flag ***")

    if not args.no_build:
        # we need the noarch path in case the package is pure python.
        site_dir, site_dir_noarch = build_project(args)
        sys.path.insert(0, site_dir)
        sys.path.insert(0, site_dir_noarch)
        os.environ['PYTHONPATH'] = site_dir + ':' + site_dir_noarch

    extra_argv = args.args[:]
    if extra_argv and extra_argv[0] == '--':
        extra_argv = extra_argv[1:]

    if args.python:
        # Debugging issues with warnings is much easier if you can see them
        print("Enabling display of all warnings")
        import warnings
        import types

        warnings.filterwarnings("always")
        if extra_argv:
            # Don't use subprocess, since we don't want to include the
            # current path in PYTHONPATH.
            sys.argv = extra_argv
            with open(extra_argv[0], 'r') as f:
                script = f.read()
            sys.modules['__main__'] = types.ModuleType('__main__')
            ns = dict(__name__='__main__', __file__=extra_argv[0])
            exec_(script, ns)
            sys.exit(0)
        else:
            import code
            code.interact()
            sys.exit(0)

    if args.ipython:
        # Debugging issues with warnings is much easier if you can see them
        print("Enabling display of all warnings and pre-importing mcfit")
        import warnings
        warnings.filterwarnings("always")
        import IPython
        import mcfit
        IPython.embed(user_ns={"mcfit": mcfit})
        sys.exit(0)

    if args.shell:
        shell = os.environ.get('SHELL', 'sh')
        print("Spawning a Unix shell...")
        os.execv(shell, [shell] + extra_argv)
        sys.exit(1)

    if args.coverage:
        dst_dir = os.path.join(ROOT_DIR, 'build', 'coverage')
        fn = os.path.join(dst_dir, 'coverage_html.js')
        if os.path.isdir(dst_dir) and os.path.isfile(fn):
            shutil.rmtree(dst_dir)
        extra_argv += ['--cover-html', '--cover-html-dir=' + dst_dir]

    if args.bench:
        # Run ASV
        items = extra_argv
        if args.tests:
            items += args.tests
        if args.submodule:
            items += [args.submodule]

        bench_args = []
        for a in items:
            bench_args.extend(['--bench', a])

        if not args.bench_compare:
            cmd = ['asv', 'run', '-n', '-e', '--python=same'] + bench_args
            os.chdir(os.path.join(ROOT_DIR, 'benchmarks'))
            os.execvp(cmd[0], cmd)
            sys.exit(1)
        else:
            commits = [x.strip() for x in args.bench_compare.split(',')]
            if len(commits) == 1:
                commit_a = commits[0]
                commit_b = 'HEAD'
            elif len(commits) == 2:
                commit_a, commit_b = commits
            else:
                p.error("Too many commits to compare benchmarks for")

            # Check for uncommitted files
            if commit_b == 'HEAD':
                r1 = subprocess.call(
                    ['git', 'diff-index', '--quiet', '--cached', 'HEAD'])
                r2 = subprocess.call(['git', 'diff-files', '--quiet'])
                if r1 != 0 or r2 != 0:
                    print("*" * 80)
                    print("WARNING: you have uncommitted changes --- "
                          "these will NOT be benchmarked!")
                    print("*" * 80)

            # Fix commit ids (HEAD is local to current repo)
            p = subprocess.Popen(['git', 'rev-parse', commit_b],
                                 stdout=subprocess.PIPE)
            out, err = p.communicate()
            commit_b = out.strip()

            p = subprocess.Popen(['git', 'rev-parse', commit_a],
                                 stdout=subprocess.PIPE)
            out, err = p.communicate()
            commit_a = out.strip()

            cmd = [
                'asv', 'continuous', '-e', '-f', '1.05', commit_a, commit_b
            ] + bench_args
            os.chdir(os.path.join(ROOT_DIR, 'benchmarks'))
            os.execvp(cmd[0], cmd)
            sys.exit(1)

    test_dir = os.path.join(ROOT_DIR, 'build', 'test')

    if args.build_only:
        sys.exit(0)
    elif args.submodule:
        modname = PROJECT_MODULE + '.' + args.submodule
        try:
            __import__(modname)
            test = sys.modules[modname].test
        except (ImportError, KeyError, AttributeError):
            print("Cannot run tests for %s" % modname)
            sys.exit(2)
    elif args.tests:

        def fix_test_path(x):
            # fix up test path
            p = x.split(':')
            p[0] = os.path.relpath(os.path.abspath(p[0]), test_dir)
            return ':'.join(p)

        tests = [fix_test_path(x) for x in args.tests]

        def test(*a, **kw):
            extra_argv = kw.pop('extra_argv', ())
            extra_argv = extra_argv + tests[1:]
            kw['extra_argv'] = extra_argv
            from mcfit.testing import Tester
            return Tester(tests[0]).test(*a, **kw)
    else:
        __import__(PROJECT_MODULE)
        test = sys.modules[PROJECT_MODULE].test

    # Run the tests under build/test
    try:
        shutil.rmtree(test_dir)
    except OSError:
        pass
    try:
        os.makedirs(test_dir)
    except OSError:
        pass

    cwd = os.getcwd()
    try:
        os.chdir(test_dir)
        result = test(args.mode,
                      verbose=args.verbose,
                      extra_argv=extra_argv,
                      doctests=args.doctests,
                      raise_warnings=args.raise_warnings,
                      coverage=args.coverage)
    finally:
        os.chdir(cwd)

    if result.wasSuccessful():
        sys.exit(0)
    else:
        sys.exit(1)
Example #43
0
def app_console(config, dbname):

    app = create_app(db)

    app.config['TESTING'] = True
    app.config['WTF_CSRF_ENABLED'] = False

    # Needed for making the console work in app request context
    ctx = app.test_request_context()
    ctx.push()
    # app.preprocess_request()

    # The test client. You can do .get and .post on all endpoints
    client = app.test_client()

    get = client.get
    post = client.post
    put = client.put
    patch = client.patch
    delete = client.delete

    # Helper method for sending JSON POST.
    def jpost(url, **kwargs):
        return client.post(url,
                           data=json.dumps(kwargs, default=dthandler),
                           content_type="application/json")

    def jput(url, **kwargs):
        return client.put(url,
                          data=json.dumps(kwargs, default=dthandler),
                          content_type="application/json")

    def jpatch(url, **kwargs):
        return client.patch(url,
                            data=json.dumps(kwargs, default=dthandler),
                            content_type="application/json")

    def jread(resp):
        return json.loads(resp.data)

    def rget(url, **kwargs):
        return jread(get(url, **kwargs))

    def rpost(url, **kwargs):
        return jread(post(url, **kwargs))

    def rjpost(url, **kwargs):
        return jread(jpost(url, **kwargs))

    # Use this in your code as `with login() as c:` and you can use
    # all the methods defined on `app.test_client`
    @contextmanager
    def login(email="*****@*****.**", password="******"):
        client.post('/login', data={'email': email, 'password': password})
        yield
        client.get('/logout', follow_redirects=True)

    q = db.session.query
    add = db.session.add
    addall = db.session.add_all
    commit = db.session.commit
    delete = db.session.delete

    sitemap = app.url_map._rules_by_endpoint

    routes = {}
    endpoints = {}

    for rule in app.url_map._rules:
        routes[rule.rule] = rule.endpoint
        endpoints[rule.endpoint] = rule.rule
    try:
        import IPython
        IPython.embed()
    except:
        import code
        code.interact(local=merge(locals(), globals()))
Example #44
0
def make_mesh_2d(all_lines, O, R, g, n_points_w=None):
    all_letters = np.concatenate([line.letters for line in all_lines])
    corners_2d = np.concatenate([letter.corners() for letter in all_letters]).T
    corners = image_to_focal_plane(corners_2d, O)
    t0s = np.full((corners.shape[1], ), np.inf, dtype=np.float64)
    corners_t, corners_XYZ = newton.t_i_k(R, g, corners, t0s)

    corners_X, _, corners_Z = corners_XYZ
    relative_Z_error = np.abs(g(corners_X) - corners_Z) / corners_Z
    corners_XYZ = corners_XYZ[:,
                              np.
                              logical_and(relative_Z_error <= 0.02,
                                          abs(corners_Z) < 1e6, corners_t < 0)]
    corners_X, _, _ = corners_XYZ

    debug_print_points('corners.png', corners_2d)

    if lib.debug:
        try:
            import matplotlib.pyplot as plt
            ax = plt.axes()
            box_XY = Crop.from_points(corners_XYZ[:2]).expand(0.01)
            x_min, y_min, x_max, y_max = box_XY

            for y in np.linspace(y_min, y_max, 3):
                xs = np.linspace(x_min, x_max, 200)
                ys = np.full(200, y)
                zs = g(xs)
                points = np.stack([xs, ys, zs])
                points_r = inv(R).dot(points) + Of[:, newaxis]
                ax.plot(points_r[0], points_r[2])

            base_xs = np.array([corners[0].min(), corners[0].max()])
            base_zs = np.array([-3270.5, -3270.5])
            ax.plot(base_xs, base_zs)
            ax.set_aspect('equal')
            plt.savefig('dewarp/camera.png')
        except Exception as e:
            print(e)
            import IPython
            IPython.embed()

    if g.split():
        meshes = [
            make_mesh_2d_indiv(all_lines,
                               corners_XYZ[:, corners_X <= g.T],
                               O,
                               R,
                               g,
                               n_points_w=n_points_w),
            make_mesh_2d_indiv(all_lines,
                               corners_XYZ[:, corners_X > g.T],
                               O,
                               R,
                               g,
                               n_points_w=n_points_w),
        ]
    else:
        meshes = [
            make_mesh_2d_indiv(all_lines,
                               corners_XYZ,
                               O,
                               R,
                               g,
                               n_points_w=n_points_w)
        ]

    for i, mesh in enumerate(meshes):
        # debug_print_points('mesh{}.png'.format(i), mesh, step=20)
        pass

    return meshes
Example #45
0
def main(argv):
    IPython.start_ipython([
        "--no-banner",
        "--no-autoindent",
    ])
Example #46
0
def transfer_partial_weights(state_dict_other,
                             obj,
                             submodule=0,
                             prefix=None,
                             add_prefix=''):
    print('Transferring weights...')

    if 0:
        print('\nStates source\n')
        for name, param in state_dict_other.items():
            print(name)
        print('\nStates target\n')
        for name, param in obj.state_dict().items():
            print(name)

    own_state = obj.state_dict()
    copyCount = 0
    skipCount = 0
    paramCount = len(own_state)

    for name_raw, param in state_dict_other.items():
        if isinstance(param, torch.nn.Parameter):
            # backwards compatibility for serialized parameters
            param = param.data
        if prefix is not None and not name_raw.startswith(prefix):
            #print("skipping {} because of prefix {}".format(name_raw, prefix))
            continue

        # remove the path of the submodule from which we load
        name = add_prefix + ".".join(name_raw.split('.')[submodule:])

        if name in own_state:
            if hasattr(own_state[name],
                       'copy_'):  #isinstance(own_state[name], torch.Tensor):
                #print('copy_ ',name)
                if own_state[name].size() == param.size():
                    own_state[name].copy_(param)
                    copyCount += 1
                else:
                    print(
                        'Invalid param size(own={} vs. source={}), skipping {}'
                        .format(own_state[name].size(), param.size(), name))
                    skipCount += 1

            elif hasattr(own_state[name], 'copy'):
                own_state[name] = param.copy()
                copyCount += 1
            else:
                print(
                    'training.utils: Warning, unhandled element type for name={}, name_raw={}'
                    .format(name, name_raw))
                print(type(own_state[name]))
                skipCount += 1
                IPython.embed()
        else:
            skipCount += 1
            print('Warning, no match for {}, ignoring'.format(name))
            #print(' since own_state.keys() = ',own_state.keys())

    print(
        'Copied {} elements, {} skipped, and {} target params without source'.
        format(copyCount, skipCount, paramCount - copyCount))
print(f"matplotlib backend: {matplotlib.get_backend()}")
print(f"matplotlib config file: {matplotlib.matplotlib_fname()}")
print(f"matplotlib config dir: {matplotlib.get_configdir()}")
plt.close("all")

# try to set separate window ploting
if "inline" in matplotlib.get_backend():
    print("Plotting is set to inline at the moment:", end=" ")

    if "ipykernel" in matplotlib.get_backend():
        print("backend is ipykernel (IPython?)")
        print("Trying to set backend to separate window:", end=" ")
        import IPython

        IPython.get_ipython().run_line_magic("matplotlib", "")
    else:
        print("unknown inline backend")

print("continuing with this plotting backend", end="\n\n\n")

# set styles
try:
    # installed with "pip install SciencePLots" (https://github.com/garrettj403/SciencePlots.git)
    # gives quite nice plots
    plt_styles = ["science", "grid", "bright", "no-latex"]
    plt.style.use(plt_styles)
    print(f"pyplot using style set {plt_styles}")
except Exception as e:
    print(e)
    print("setting grid and only grid and legend manually")
Example #48
0
def split_back(network: Network, part: AnyStr) -> None:
    """
    implement the refinement step. split back part from the union node it was
    grouped into into a separated node
    :param network: Network
    :param part: Str of the name of the original node that is part of the union
    """
    # assume that layer_index is in [2, ..., L-1] (L = num of layers)
    try:
        layer_index = int(part.split("_")[1])
    except IndexError:
        debug_print("IndexError in core.test_refinement.step.split_back()")
        import IPython
        IPython.embed()

    layer = network.layers[layer_index]
    next_layer = network.layers[layer_index + 1]
    prev_layer = network.layers[layer_index - 1]

    part2node_map = network.get_part2node_map()
    union_node = network.name2node_map[part2node_map[part]]

    parts = union_node.name.split("+")
    other_parts = [p for p in parts if p != part]
    if not other_parts:
        return

    part_node = ARNode(name=part,
                       ar_type=union_node.ar_type,
                       activation_func=union_node.activation_func,
                       in_edges=[],
                       out_edges=[],
                       bias=network.orig_name2node_map[part].bias)
    bias = sum([
        network.orig_name2node_map[other_part].bias
        for other_part in other_parts
    ])

    other_parts_node = ARNode(name="+".join(other_parts),
                              ar_type=union_node.ar_type,
                              activation_func=union_node.activation_func,
                              in_edges=[],
                              out_edges=[],
                              bias=bias)

    splitting_nodes = [part_node, other_parts_node]

    for splitting_node in splitting_nodes:
        # print("splitting_node.name={}".format(splitting_node.name))
        for next_layer_node in next_layer.nodes:
            group_a = splitting_node.name.split("+")
            group_b = next_layer_node.name.split("+")
            # print("call 1 - group_a")
            # print(group_a)
            # print("call 1 - group_b")
            # print(group_b)
            out_edge_weight = calculate_weight_of_edge_between_two_part_groups(
                network=network, group_a=group_a, group_b=group_b)

            if out_edge_weight is not None:
                out_edge = Edge(splitting_node.name, next_layer_node.name,
                                out_edge_weight)
                splitting_node.out_edges.append(out_edge)
                next_layer_node.in_edges.append(out_edge)
            # fill_zero_edges(network)
        for prev_layer_node in prev_layer.nodes:
            group_a = prev_layer_node.name.split("+")
            group_b = splitting_node.name.split("+")
            # print("call 2 - group_a")
            # print(group_a)
            # print("call 2 - group_b")
            # print(group_b)
            in_edge_weight = calculate_weight_of_edge_between_two_part_groups(
                network=network, group_a=group_a, group_b=group_b)
            if in_edge_weight is not None:
                in_edge = Edge(prev_layer_node.name, splitting_node.name,
                               in_edge_weight)
                splitting_node.in_edges.append(in_edge)
                prev_layer_node.out_edges.append(in_edge)
            # fill_zero_edges(network)
        layer.nodes.append(splitting_node)
        fill_zero_edges(network)
    network.remove_node(union_node, layer_index)
    network.generate_name2node_map()
Example #49
0
 def start_ipython(sim_class):
     IPython.embed()
Example #50
0
def hook():
    #for debugging
    import IPython
    IPython.embed()
    exit(0)
Example #51
0
from tensorpack.tfutils.varmanip import dump_chkpt_vars
from tensorpack.utils import logger
import argparse

if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('model')
    parser.add_argument('--dump', help='dump to an npz file')
    parser.add_argument('--shell',
                        action='store_true',
                        help='start a shell with the params')
    args = parser.parse_args()

    if args.model.endswith('.npy'):
        params = np.load(args.model, encoding='latin1').item()
    elif args.model.endswith('.npz'):
        params = dict(np.load(args.model))
    else:
        params = dump_chkpt_vars(args.model)
    logger.info("Variables in the model:")
    logger.info(str(params.keys()))

    if args.dump:
        assert args.dump.endswith('.npz'), args.dump
        np.save(args.dump, **params)

    if args.shell:
        # params is a dict. play with it
        import IPython as IP
        IP.embed(config=IP.terminal.ipapp.load_default_config())
Example #52
0
write_keypoint_table()




def plot_baseline_resnet_vs_inception():
    dfbb = {
        "ResNet18": "data/_analysis/R6-20190315T030959.log/logdata.csv",
        "InceptionV3": "data/_analysis/A6e-20190222T103123.log/logdata.csv",
    }
    dfbb = pd.concat({k: pd.read_csv(v) for k, v in dfbb.items()}, sort=False)\
        .query('perf and al_iter == 0 and epoch <= 150')\
        .droplevel(1).set_index('epoch', append=True).unstack(level=0)

    ax = dfbb['val_acc'].plot()
    ax.legend(loc='center right')
    table = pd.plotting.table(
        ax, dfbb['val_acc'].describe().round(4).loc[['max', 'min']],
        loc='lower center', colWidths=[0.3, 0.3, 0.3], alpha=1)
    table.auto_set_font_size(False)

    ax.set_xlabel('Epoch')
    ax.set_ylabel('Test Accuracy')
    f = ax.figure
    f.suptitle("Test Accuracy vs Epoch")
    #  f.tight_layout(rect=[0, 0.03, 1, 0.95])
    f.savefig(join(analysis_dir, "baselines_acc_vs_epoch.png"))
#  plot_baseline_resnet_vs_inception()

import IPython ; IPython.embed() ; import sys ; sys.exit()
Example #53
0
    def get_explanation(self, image):
        # Initialize beam
        iters = 0
        ind = 0
        c1, h1 = self.sess.run([self.sy_zero_initial_state1], feed_dict={
                        self.sy_batchsize_explanation: 1
                    })[0]
        c2, h2 = self.sess.run([self.sy_zero_initial_state2], feed_dict={
                        self.sy_batchsize_explanation: 1
                    })[0]

        state1, state2, probs = self.sess.run([self.sy_lstm_states1, self.sy_lstm_states2, self.sy_sentence_word_probs],
                        feed_dict={
                            self.sy_X: image,
                            self.sy_rnn_inputs: np.array([ind] + [0]*(self.max_length-1), dtype=np.int32)[None],
                            self.sy_seq_lengths: np.array([1], dtype=np.int32),
                            self.sy_c_state1: c1,
                            self.sy_h_state1: h1,
                            self.sy_c_state2: c2,
                            self.sy_h_state2: h2,
                            self.sy_batchsize_explanation: 1,
                            self.keep_prob: 1.0
                        })
        c1, h1 = state1
        c2, h2 = state2
        ipy.embed()
        beam = np.argsort(probs[0,0,:])[::-1][:self.beam_width]
        beam_probs = [probs[0,0,:][ind] for ind in beam]
        hypothesis = [([ind], math.log(prob), c1, h1, c2, h2) for ind, prob in zip(beam, beam_probs)]

        # Beam Search
        num_completed = 0
        seen_sentences = []
        while 1:
            iters += 1
            new_hypothesis = []
            for i, datum in enumerate(hypothesis):
                indices, prob, c1, h1, c2, h2  = datum
                last_ind = indices[-1] # Get last index used in hypothesis
                if self.id_to_word[last_ind] == ".":
                    if datum not in seen_sentences:
                        num_completed = num_completed + 1 # Keep track of effective beam width (if it reaches 0 are done)
                        seen_sentences.append(datum) # Keep track of sentences you have seen to know when to stop search
                    new_hypothesis.append(datum) # Keep in beam
                    continue
                state1, state2, probs = self.sess.run([self.sy_lstm_states1, self.sy_lstm_states2, self.sy_sentence_word_probs],
                                feed_dict={
                                    self.sy_X: image,
                                    self.sy_rnn_inputs: np.array([ind] + [0]*(self.max_length-1), dtype=np.int32)[None],
                                    self.sy_seq_lengths: np.array([1], dtype=np.int32),
                                    self.sy_c_state1: c1,
                                    self.sy_h_state1: h1,
                                    self.sy_c_state2: c2,
                                    self.sy_h_state2: h2,
                                    self.sy_batchsize_explanation: 1,
                                    self.keep_prob: 1.0
                                })
                c1, h1 = state1
                c2, h2 = state2
                beam = np.argsort(probs[0,0,:])[::-1][:self.beam_width]
                beam_probs = [probs[0,0,:][ind] for ind in beam]
                new_beam = [(indices + [ind], prob+math.log(prob_new), c1, h1, c2, h2) for ind, prob_new in zip(beam, beam_probs)]
                new_hypothesis.extend(new_beam)

            # new_hypothesis contains all complete sentences or max length reached -> return the best result
            if num_completed == self.beam_width or iters == self.max_length:
                indices, prob, c1, h1, c2, h2 = sorted(new_hypothesis, key= lambda tup: tup[1]/(math.pow((5+len(tup[0])), self.len_norm_coeff) / math.pow(6, self.len_norm_coeff)))[::-1][0]
                sentence = " ".join([self.id_to_word[ind] for ind in indices])
                if '.' in sentence:
                    sentence = sentence[:-2]+"." # Move period
                else:
                    sentence = sentence+"." # Append period
                    indices.append(self.word_to_id['.']) # Append period
                return sentence, indices

            # Take top beam_width results from new hypothesis (normalized by length)
            hypothesis = sorted(new_hypothesis, key= lambda tup: tup[1]/(math.pow((5+len(tup[0])), self.len_norm_coeff) / math.pow(6, self.len_norm_coeff)))[::-1][:self.beam_width]
Example #54
0
def shell():
    import IPython

    IPython.start_ipython([])
Example #55
0
        surface, ctx, size, n = utils.drawing.setup_cairo(
            N=args.numpoints,
            scale=SCALE,
            background=[0, 0, 0, 1],
            cartesian=True,
            font_size=FONT_SIZE)
        assert (size == SIZE)
        assert (n == N)

        ctx.set_source_rgba(0, 0, 0, 1)
        utils.drawing.drawRect(ctx, 0, 0, size, size)

    if args.loaddcel and exists("{}.dcel".format(dcel_filename)):
        logging.info("Loading DCEL")
        theDCEL = utils.dcel.DCEL.loadfile(dcel_filename)
        IPython.embed(simple_prompt=True)
    else:
        logging.info("Initialising DCEL")
        #make the bbox
        bbox = np.array([0, 0, SIZE, SIZE])
        theDCEL = utils.dcel.DCEL(bbox=bbox)

    #------------------------------
    # def MAIN LOOP
    #------------------------------
    if not args.static:
        logging.info("Generating")
        for x in range(args.timesteps):
            logging.info("Step: {}".format(x))
            should_quit = tick(theDCEL, x)
# loading data
import json
import urllib as requestlib

# building frames
import pandas as pd
from pandas import DataFrame, Series

# plotting
import numpy as np
import matplotlib.pyplot as matplt

# get_ipython().magic(u'matplotlib inline')
try:
    import IPython
    shell = IPython.get_ipython()
    shell.enable_matplotlib(gui='qt')
except:
    pass

# In[2]:

hourly_temp_view = "http://127.0.0.1:5984/temperature_data/_design/temperature_data/_view/temp_by_hour?group_level=1"
hourly_count_view = "http://127.0.0.1:5984/temperature_data/_design/temperature_data/_view/count_by_hour?group_level=1"

# temperature_request = requestlib.urlopen(hourly_temp_view)
# temperature_response = temperature_request.read()
# temperature_data = json.loads(temperature_response.decode())
# temperature_data = temperature_data["rows"]

# count_request = requestlib.urlopen(hourly_count_view)
Example #57
0
def run(appname,
        plain=False,
        import_models=False,
        startfile=None,
        bpython=False,
        python_code=False,
        cronjob=False):
    """
    Start interactive shell or run Python script (startfile) in web2py
    controller environment. appname is formatted like:

    - a : web2py application name
    - a/c : exec the controller c into the application environment
    """

    (a, c, f, args, vars) = parse_path_info(appname, av=True)
    errmsg = 'invalid application name: %s' % appname
    if not a:
        die(errmsg)
    adir = os.path.join('applications', a)

    if not os.path.exists(adir):
        if sys.stdin and not sys.stdin.name == '/dev/null':
            confirm = raw_input(
                'application %s does not exist, create (y/n)?' % a)
        else:
            logging.warn('application does not exist and will not be created')
            return
        if confirm.lower() in ['y', 'yes']:

            os.mkdir(adir)
            w2p_unpack('welcome.w2p', adir)
            for subfolder in [
                    'models', 'views', 'controllers', 'databases', 'modules',
                    'cron', 'errors', 'sessions', 'languages', 'static',
                    'private', 'uploads'
            ]:
                subpath = os.path.join(adir, subfolder)
                if not os.path.exists(subpath):
                    os.mkdir(subpath)
            db = os.path.join(adir, 'models/db.py')
            if os.path.exists(db):
                data = fileutils.read_file(db)
                data = data.replace('<your secret key>',
                                    'sha512:' + web2py_uuid())
                fileutils.write_file(db, data)

    if c:
        import_models = True
    extra_request = {}
    if args:
        extra_request['args'] = args
    if vars:
        extra_request['vars'] = vars
    _env = env(a,
               c=c,
               f=f,
               import_models=import_models,
               extra_request=extra_request)
    if c:
        pyfile = os.path.join('applications', a, 'controllers', c + '.py')
        pycfile = os.path.join('applications', a, 'compiled',
                               "controllers_%s_%s.pyc" % (c, f))
        if ((cronjob and os.path.isfile(pycfile))
                or not os.path.isfile(pyfile)):
            exec(read_pyc(pycfile), _env)
        elif os.path.isfile(pyfile):
            execfile(pyfile, _env)
        else:
            die(errmsg)

    if f:
        exec('print( %s())' % f, _env)
        return

    _env.update(exec_pythonrc())
    if startfile:
        try:
            ccode = None
            if startfile.endswith('.pyc'):
                ccode = read_pyc(startfile)
                exec(ccode, _env)
            else:
                execfile(startfile, _env)

            if import_models:
                BaseAdapter.close_all_instances('commit')
        except Exception as e:
            print(traceback.format_exc())
            if import_models:
                BaseAdapter.close_all_instances('rollback')
    elif python_code:
        try:
            exec(python_code, _env)
            if import_models:
                BaseAdapter.close_all_instances('commit')
        except Exception as e:
            print(traceback.format_exc())
            if import_models:
                BaseAdapter.close_all_instances('rollback')
    else:
        if not plain:
            if bpython:
                try:
                    import bpython
                    bpython.embed(locals_=_env)
                    return
                except:
                    logger.warning('import bpython error; trying ipython...')
            else:
                try:
                    import IPython
                    if IPython.__version__ > '1.0.0':
                        IPython.start_ipython(user_ns=_env)
                        return
                    elif IPython.__version__ == '1.0.0':
                        from IPython.terminal.embed import InteractiveShellEmbed
                        shell = InteractiveShellEmbed(user_ns=_env)
                        shell()
                        return
                    elif IPython.__version__ >= '0.11':
                        from IPython.frontend.terminal.embed import InteractiveShellEmbed
                        shell = InteractiveShellEmbed(user_ns=_env)
                        shell()
                        return
                    else:
                        # following 2 lines fix a problem with
                        # IPython; thanks Michael Toomim
                        if '__builtins__' in _env:
                            del _env['__builtins__']
                        shell = IPython.Shell.IPShell(argv=[], user_ns=_env)
                        shell.mainloop()
                        return
                except:
                    logger.warning(
                        'import IPython error; use default python shell')
        enable_autocomplete_and_history(adir, _env)
        code.interact(local=_env)
Example #58
0
def iterate(g, theta, q, mu, h, epsilon, N_states_under_fermi_surface):

    def plot_fermi_surface():
        import pylab as pl
        first_band_energies = (np.sort(E_k_n, axis=-1)[:, :, 0] - mu)
        second_band_energies = (np.sort(E_k_n, axis=-1)[:, :, 1] - mu)
        third_band_energies = (np.sort(E_k_n, axis=-1)[:, :, 2] - mu)
        import warnings
        # Supress unicode warning from matplotlib:
        with warnings.catch_warnings():
            pl.figure(figsize=(16,8))
            pl.subplot(231)
            pl.title('$E_{k}$')
            pl.xlabel('$k_x$')
            pl.ylabel('$k_y$')
            warnings.simplefilter("ignore")
            KX = reduced_kx*q*np.ones((N_kx, N_ky))
            KY = reduced_ky*q*np.ones((N_kx, N_ky))
            pl.contour(KX, KY, first_band_energies, 30)
            CS = pl.contour(KX, KY, first_band_energies, 0, colors='k', linewidths=3)
            pl.clabel(CS, inline=1, fontsize=20, fmt='$\mu$')
            pl.axhline(q/2, color='k', linestyle='--')
            pl.grid(True)

            pl.subplot(232)
            CS = pl.contour(KX, KY, second_band_energies, 10)
            pl.clabel(CS, inline=1, fontsize=10)
            pl.grid(True)

            pl.subplot(233)
            CS = pl.contour(KX, KY, third_band_energies, 10)
            pl.clabel(CS, inline=1, fontsize=10)
            pl.grid(True)

            x_origin_index = np.where(reduced_ky==0)[1][0]
            y_origin_index = np.where(reduced_kx==0)[0][0]
            kx = q * reduced_kx[:, 0]
            ky = q * reduced_ky[0, :]
            pl.subplot(234)
            for i in range(3):
                pl.plot(kx, E_k_n[:, y_origin_index, i])
                pl.ylabel('$E_{k_x}$')
                pl.xlabel('$k_x$')
                pl.axhline(mu, color='k', linestyle='--')
                pl.grid(True)
            pl.axis(xmin=kx.min(), xmax=kx.max())

            pl.subplot(235)
            for i in range(3):
                pl.plot(ky, E_k_n[x_origin_index, :, i])
                pl.ylabel('$E_{k_y}$')
                pl.xlabel('$k_y$')
                pl.axhline(mu, color='k', linestyle='--')
                pl.grid(True)
            pl.axis(xmin=ky.min(), xmax=ky.max())
            pl.show()

    global i

    print('(N_kx, N_ky):'.rjust(15),'(%d, %d)'%(N_kx, N_ky))
    print(    'N_states:'.rjust(15), N_states_under_fermi_surface)
    print(           'g:'.rjust(15), g)
    print(       'theta:'.rjust(15), theta)
    print(            '='.rjust(15), theta/pi, 'pi')
    print(  "initial mu:".rjust(15), mu)
    print("   initial q:".rjust(15), q)

    i = 0

    start_time = time.time()
    time_of_last_print = time.time() - 10
    while True:
        try:
            # Construct the Hamiltonian, which has shape (N_kx, N_ky, 3, 3):
            H_k =  construct_H_k(epsilon, h)

            # Diagonalise it to give an array of shape (N_kx, N_ky, 3) of
            # energy eigenvalues, and an array of shape (N_kx, N_ky, 3, 3) of
            # eigenvectors, or equivalently of rotation matrices that
            # diagonalise each Hamiltonian.
            E_k_n, U_k = eigh(H_k)

            dkx = (reduced_kx[1, 0] - reduced_kx[0, 0]) * q
            dky = (reduced_ky[0, 1] - reduced_ky[0, 0]) * q
            N_states_under_fermi_surface = pi / (dkx * dky)

            density = N_states_under_fermi_surface*dkx*dky/(4*pi**2)

            # Compute new guesses of q, h and epsilon:
            new_q, new_mu = compute_q_and_mu(E_k_n, q, N_states_under_fermi_surface)
            new_epsilon = compute_epsilon(E_k_n, U_k, new_mu, new_q, g, theta)
            new_h = compute_h(E_k_n, U_k, new_mu, new_q, g, theta)

            convergence = abs((mu - new_mu)/mu)
            q += RELAXATION_PARAMETER*(new_q - q)
            mu += RELAXATION_PARAMETER*(new_mu - mu)
            h += RELAXATION_PARAMETER*(new_h - h)
            epsilon += RELAXATION_PARAMETER*(new_epsilon - epsilon)
            i += 1

            # if not i % 10:
            #     plot_fermi_surface()
            now = time.time()
            if (now - time_of_last_print > PRINT_INTERVAL or True) or (convergence < CONVERGENCE_THRESHOLD):
                print('\n  loop iteration:', i)
                print('    time per step:', round(1000*(now - start_time)/i, 2), 'ms')
                print('    convergence:', convergence)
                print("    mu", mu)
                print("    q", q)
                print("    N_states:", N_states_under_fermi_surface)
                print('    density', density)
                time_of_last_print = now

                plot_fermi_surface()

                if convergence < CONVERGENCE_THRESHOLD:
                    print('time taken:', time.time() - start_time)
                    plot_fermi_surface()
                    return q, mu, h, epsilon, E_k_n, U_k
        except KeyboardInterrupt:
            import IPython
            IPython.embed()
            break
Example #59
0
 def gaz_callback(self, gaz):
     rospy.loginfo(gaz)
     IPython.embed()
Example #60
0
def rrdesi(options=None, comm=None):
    """Estimate redshifts for DESI targets.

    This loads distributed DESI targets from one or more spectra grouping
    files and computes the redshifts.  The outputs are written to a redrock
    scan file and a DESI redshift catalog.

    Args:
        options (list): optional list of commandline options to parse.
        comm (mpi4py.Comm): MPI communicator to use.

    """
    global_start = elapsed(None, "", comm=comm)

    parser = argparse.ArgumentParser(description="Estimate redshifts from"
                                     " DESI target spectra.")

    parser.add_argument("-t",
                        "--templates",
                        type=str,
                        default=None,
                        required=False,
                        help="template file or directory")

    parser.add_argument("-o",
                        "--output",
                        type=str,
                        default=None,
                        required=False,
                        help="output file")

    parser.add_argument("--zbest",
                        type=str,
                        default=None,
                        required=False,
                        help="output zbest FITS file")

    parser.add_argument("--targetids",
                        type=str,
                        default=None,
                        required=False,
                        help="comma-separated list of target IDs")

    parser.add_argument("--mintarget",
                        type=int,
                        default=None,
                        required=False,
                        help="first target to process in each file")

    parser.add_argument("-n",
                        "--ntargets",
                        type=int,
                        required=False,
                        help="the number of targets to process in each file")

    parser.add_argument("--nminima",
                        type=int,
                        default=3,
                        required=False,
                        help="the number of redshift minima to search")

    parser.add_argument("--allspec",
                        default=False,
                        action="store_true",
                        required=False,
                        help="use individual spectra instead of coadd")

    parser.add_argument("--ncpu",
                        type=int,
                        default=None,
                        required=False,
                        help="DEPRECATED: the number of multiprocessing"
                        " processes; use --mp instead")

    parser.add_argument(
        "--mp",
        type=int,
        default=0,
        required=False,
        help="if not using MPI, the number of multiprocessing"
        " processes to use (defaults to half of the hardware threads)")

    parser.add_argument("--debug",
                        default=False,
                        action="store_true",
                        required=False,
                        help="debug with ipython (only if communicator has a "
                        "single process)")

    parser.add_argument("infiles", nargs='*')

    args = None
    if options is None:
        args = parser.parse_args()
    else:
        args = parser.parse_args(options)

    if args.ncpu is not None:
        print('WARNING: --ncpu is deprecated; use --mp instead')
        args.mp = args.ncpu

    comm_size = 1
    comm_rank = 0
    if comm is not None:
        comm_size = comm.size
        comm_rank = comm.rank

    # Check arguments- all processes have this, so just check on the first
    # process

    if comm_rank == 0:
        if args.debug and comm_size != 1:
            print("--debug can only be used if the communicator has one "
                  " process")
            sys.stdout.flush()
            if comm is not None:
                comm.Abort()

        if (args.output is None) and (args.zbest is None):
            parser.print_help()
            print("ERROR: --output or --zbest required")
            sys.stdout.flush()
            if comm is not None:
                comm.Abort()
            else:
                sys.exit(1)

        if len(args.infiles) == 0:
            print("ERROR: must provide input files")
            sys.stdout.flush()
            if comm is not None:
                comm.Abort()
            else:
                sys.exit(1)

        if (args.targetids is not None) and ((args.mintarget is not None) \
            or (args.ntargets is not None)):
            print("ERROR: cannot select targets by both ID and range")
            sys.stdout.flush()
            if comm is not None:
                comm.Abort()
            else:
                sys.exit(1)

    targetids = None
    if args.targetids is not None:
        targetids = [int(x) for x in args.targetids.split(",")]

    n_target = None
    if args.ntargets is not None:
        n_target = args.ntargets

    first_target = None
    if args.mintarget is not None:
        first_target = args.mintarget
    elif n_target is not None:
        first_target = 0

    # Multiprocessing processes to use if MPI is disabled.
    mpprocs = 0
    if comm is None:
        mpprocs = get_mp(args.mp)
        print("Running with {} processes".format(mpprocs))
        if "OMP_NUM_THREADS" in os.environ:
            nthread = int(os.environ["OMP_NUM_THREADS"])
            if nthread != 1:
                print("WARNING:  {} multiprocesses running, each with "
                      "{} threads ({} total)".format(mpprocs, nthread,
                                                     mpprocs * nthread))
                print("WARNING:  Please ensure this is <= the number of "
                      "physical cores on the system")
        else:
            print("WARNING:  using multiprocessing, but the OMP_NUM_THREADS")
            print("WARNING:  environment variable is not set- your system may")
            print("WARNING:  be oversubscribed.")
        sys.stdout.flush()
    elif comm_rank == 0:
        print("Running with {} processes".format(comm_size))
        sys.stdout.flush()

    try:
        # Load and distribute the targets
        if comm_rank == 0:
            print("Loading targets...")
            sys.stdout.flush()

        start = elapsed(None, "", comm=comm)

        # Load the targets.  If comm is None, then the target data will be
        # stored in shared memory.
        targets = DistTargetsDESI(args.infiles,
                                  coadd=(not args.allspec),
                                  targetids=targetids,
                                  first_target=first_target,
                                  n_target=n_target,
                                  comm=comm)

        # Get the dictionary of wavelength grids
        dwave = targets.wavegrids()

        stop = elapsed(start, "Read and distribution of {} targets"\
            .format(len(targets.all_target_ids)), comm=comm)

        # Read the template data

        dtemplates = load_dist_templates(dwave,
                                         templates=args.templates,
                                         comm=comm,
                                         mp_procs=mpprocs)

        # Compute the redshifts, including both the coarse scan and the
        # refinement.  This function only returns data on the rank 0 process.

        start = elapsed(None, "", comm=comm)

        scandata, zfit = zfind(targets,
                               dtemplates,
                               mpprocs,
                               nminima=args.nminima)

        stop = elapsed(start, "Computing redshifts took", comm=comm)

        # Write the outputs

        if args.output is not None:
            start = elapsed(None, "", comm=comm)
            if comm_rank == 0:
                write_zscan(args.output, scandata, zfit, clobber=True)
            stop = elapsed(start, "Writing zscan data took", comm=comm)

        if args.zbest:
            start = elapsed(None, "", comm=comm)
            if comm_rank == 0:
                zbest = zfit[zfit['znum'] == 0]

                # Remove extra columns not needed for zbest
                zbest.remove_columns(['zz', 'zzchi2', 'znum'])

                # Change to upper case like DESI
                for colname in zbest.colnames:
                    if colname.islower():
                        zbest.rename_column(colname, colname.upper())

                write_zbest(args.zbest, zbest, targets.fibermap)

            stop = elapsed(start, "Writing zbest data took", comm=comm)

    except:
        exc_type, exc_value, exc_traceback = sys.exc_info()
        lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
        lines = ["Proc {}: {}".format(comm_rank, x) for x in lines]
        print("".join(lines))
        sys.stdout.flush()
        if comm is not None:
            comm.Abort()

    global_stop = elapsed(global_start, "Total run time", comm=comm)

    if args.debug:
        import IPython
        IPython.embed()

    return