Beispiel #1
0
def derotate_image_forloop(startNum,stopNum,dateString):
    for f in range(startNum,stopNum+1): # loop over filenames
        print('----------------------------------------')
        print('Derotating image '+str("{:0>5d}".format(f))+'...')

        t = TicToc() # create instance of timer
        t.tic() # start timer

        # retrieve image
        image, header = fits.getdata(calibrated_trapezium_data_stem+
                                     'step02_dewarped/'+
                                     'lm_'+dateString+'_'+
                                     str("{:0>5d}".format(f))+
                                     '.fits',
                                     0,
                                     header=True)
        # find PA from header
        pa = header['LBT_PARA']
    
        # derotate
        image_derot = rot(image, -pa, [1024,1024], order=3, pivot=False) # axis coord here is just a dummy
    
        # save 
        fits.writeto(calibrated_trapezium_data_stem+
                     'step03_derotate/'+
                     'lm_'+dateString+'_'+
                     str("{:0>5d}".format(f))+
                     '.fits',
                     image_derot, header, overwrite=False)

        t.toc()
        print('------------------------------')
Beispiel #2
0
def poll_for_valid_message(consumer,
                           expected_file_identifier=b"f142",
                           timeout=15.0):
    """
    Polls the subscribed topics by the consumer and checks the buffer is not empty or malformed.
    Skips connection status messages.

    :param consumer: The consumer object
    :param expected_file_identifier: The schema id we expect to find in the message
    :param timeout: give up if we haven't found a message with expected_file_identifier after this length of time
    :return: Tuple of the message payload and the key
    """
    timer = TicToc()
    timer.tic()
    while timer.tocvalue() < timeout:
        msg = consumer.poll(timeout=1.0)
        assert msg is not None
        if msg.error():
            raise MsgErrorException("Consumer error when polling: {}".format(
                msg.error()))

        if expected_file_identifier is None:
            return msg.value(), msg.key()
        elif expected_file_identifier is not None:
            message_file_id = msg.value()[4:8]
            assert (
                expected_file_identifier == message_file_id
                or message_file_id == b"ep00"
            ), f"Expected message to have schema id of {expected_file_identifier}, but it has {message_file_id}"
            if message_file_id == b"f142":
                return LogData.LogData.GetRootAsLogData(msg.value(),
                                                        0), msg.key()
Beispiel #3
0
def accuracyCalculator(dataSetLoader, crrNeuralNet):
    '''
    Calculate and return accuracy on the given data set using current trained Neural Net
    
    Args: 
        dataSetLoader: given data set loader
        crrNeuralNet: given current trained neural net. 
        
    Returns:
        accuracyForCurrentNet: current accuracy on the given data set by trained net at current epoch
        
    '''

    numOfCorrectPrediction = 0
    numOfTotalDataPoints = 0
    timerObj = TicToc()
    timerObj.tic()
    with torch.no_grad():
        for crrData in dataSetLoader:
            crrInputData, crrLabels = crrData
            predictedOutputs = crrNeuralNet.feedForward(crrInputData)
            _, predictedLabels = torch.max(predictedOutputs.data, 1)
            numOfTotalDataPoints += crrLabels.size(0)
            numOfCorrectPrediction += (
                predictedLabels == crrLabels).sum().item()

    accuracyForCurrentNet = numOfCorrectPrediction / numOfTotalDataPoints
    print('Accuracy of the network on the images(train or test): %d %%' %
          (100 * numOfCorrectPrediction / numOfTotalDataPoints))
    timerObj.toc()
    return accuracyForCurrentNet
Beispiel #4
0
 def __merge_knnr_data(self, knnsr_data):
     if self.verbosity == 2:
         print("Combining KNNSR data")
     self.views = {}
     ptids = self.data.get_ptids()
     tictoc = TicToc()
     tictoc.tic()
     self.valid_ptids = []
     for m in self.data.get_modalities():
         self.views[m] = {}
         for t in knnsr_data[m].keys():
             xy_measurement = self.data.getXY(
                 ptids, m, target=t, split=False, impute_ptids=self.train_ptids
             )
             modality_ts = knnsr_data[m][t]
             if not isinstance(modality_ts, self.data.backend.DataFrame):
                 modality_ts = self.data.backend.DataFrame(modality_ts)
             view = self.merge_view(xy_measurement, modality_ts)
             self.views[m][t] = view
             self.valid_ptids += view[TADPOLEData.PTID].values.tolist()
     if self.verbosity == 2:
         print("Mergin KNNSR data took %f" % tictoc.tocvalue())
     self.valid_ptids = list(set(self.valid_ptids))
     if self.verbosity == 2:
         print("%i patients in set" % len(self.valid_ptids))
Beispiel #5
0
def main(individuals_number, crossover, mutation):

    global individuals
    individuals = individuals_number
    global crossover_type
    crossover_type = crossover
    global mutation_type_random
    mutation_type_random = mutation

    print_variable_info()

    t = TicToc()
    t.tic()

    # Initialize a population
    population = initialize()

    population = [(fitnes(i), i) for i in population] # Calculates the fitness of each individual, and stores it in pairs ordered in the form (5 , [1,2,1,1,1,4,1,8,9,4,1])

    initial_population = sorted(population, reverse=True)

    # Evolves the population
    for i in range(generations):
        population = selection_and_reproduction(population)
        population = mutate(population)

    # Print the results
    print_results(initial_population, population)

    t.toc()
Beispiel #6
0
async def oneloop(dut):
    """perf oneloop test"""

    t = TicToc()
    tb = settings()

    await reset(dut)

    clkobj = Clock(dut.clk, tb.period, 'us')
    cocotb.fork(clkobj.start())

    t.tic()
    k = 0
    for cycle in range(tb.dinArate * tb.npoints):
        await (RisingEdge(dut.clk))
        if (cycle % tb.dinArate) == 0:
            k = k + 1
            dut.dinA <= 1
            if (k % 100 == 0):
                dut._log.info("Sim progress...{} %".format(
                    int(100 * float(k) / tb.npoints)))
        else:
            dut.dinA <= 0

        if (cycle % tb.dinBrate) == 0:
            dut.dinB <= 1
        else:
            dut.dinB <= 0

    t.toc()
    print(t.elapsed)
def main(individuals_number):

    global individuals
    individuals = individuals_number
    print_variable_info()

    t = TicToc()
    t.tic()

    # Initialize a population
    population = initialize()

    population = [
        (calculate_fitness(i), i) for i in population
    ]  # Calculates the fitness of each individual, and stores it in pairs ordered in the form (5 , [1,2,1,1,1,4,1,8,9,4,1])

    initial_population = sorted(population, key=lambda x: x[0])

    # Evolves the population
    for i in range(generations):
        print("→→", i)
        population = selection_and_reproduction(population)
        print("←←")

    # Print the results
    print_results(initial_population, population)

    t.toc()
Beispiel #8
0
def run_tsp(difficulty, file_name):
    t = TicToc()
    all_cities = my_utils.generate_cities_dict(difficulty)
    cities_map = cities_visualitation.CitiesMap(all_cities)
    tsp = TravelingSalesmanProblem(all_cities, (0, ), ())
    t.tic()
    best_known_solution_value = 0.0

    if difficulty == 'big':
        best_known_solution_value = params.big_best_known_solution
    elif difficulty == 'medium':
        best_known_solution_value = params.medium_best_known_solution
    elif difficulty == 'small':
        best_known_solution_value = params.small_best_known_solution
    tsp_result, msg = search.astar_search(tsp,
                                          best_known_solution_value,
                                          display=True)
    print(msg)
    t.toc()
    cities_list = cities_visualitation.get_normalized_cities_identification(
        tsp_result.state)
    cities_list_evaluation = (-1) * tsp.value(tsp_result.state)
    cities_map.show_map(list(tsp_result.state))
    cities_map.save_plot(file_name)
    my_utils.save_results(file_name, cities_list_evaluation, cities_list, msg)
Beispiel #9
0
 def _throw(self):
     dice = None
     if self.method == 1:  # insert from keyboard
         flag = True
         while flag:
             padding = " " * (len(str(self.max_ind)) - len(str(self.ind)))
             str_input = "throw " + padding + str(self.ind) + "/" + str(
                 self.max_ind) + ", result: "
             inp = input(str_input)
             if all(c in "0123456789" for c in inp) and inp != "":
                 dice = int(inp) - 1
                 if dice in range(self.base):
                     flag = False
                 else:
                     print("invalid result, insert again")
             else:
                 print("invalid result, insert again")
     if self.method == 2:  # dice result always 2
         dice = 1
         padding = " " * (len(str(self.max_ind)) - len(str(self.ind)))
         print("throw " + padding + str(self.ind) + "/" +
               str(self.max_ind) + ", result: " + str(dice + 1))
     if self.method == 3:  # throw pressing keyboard
         t = TicToc()
         t.tic()
         input("Press enter to throw")
         dice = int(t.tocvalue() * 10**8) % self.base
         padding = " " * (len(str(self.max_ind)) - len(str(self.ind)))
         print("throw " + padding + str(self.ind) + "/" +
               str(self.max_ind) + ", result: " + str(dice + 1))
     return dice
Beispiel #10
0
def main():

    t = TicToc()

    t.tic()

    with open("/home/ubuntu/sbmd/station", "rb") as f:
        fileobj = pickle.load(f)

    statit = fileobj[2][int(sys.argv[1])]
    statit = np.flip(statit)

    credfile = "/home/ubuntu/sbmd/dwh.cfg"

    config = configparser.ConfigParser()
    config.read(credfile)

    s3k = config['AWS']['KEY']
    s3ks = config['AWS']['SECRET']

    pool = mp.Pool(mp.cpu_count())

    [pool.apply(load_trains_all, args=(co, s3k, s3ks)) for co in statit]

    ind = sys.argv[1]
    logging.info(f"Gathered conn data succesfully with index {ind}")

    t.toc()
Beispiel #11
0
def run_single():
    tictoc = TicToc()
    tictoc.tic()
    result = [eval(rs) for rs in range(CYCLES)]
    ellapsed = tictoc.tocvalue()
    print("Single: {}".format(ellapsed))
    return result
def main():
    #Create the Matrix to be tri-diagonalized
    #n = 12                       #Size of input matrix (nxn)
    #A = SymmMat(n)              #Input matrix. (Hermitian)

    #Hamiltonian of tV Model for L = 4, N = 2, ell=2
    A = -1.0 * np.array(
        ((0, 1, 0, 1, 0, 0), (1, 0, 1, 0, 1, 1), (0, 1, 0, 1, 0, 0),
         (1, 0, 1, 0, 1, 1), (0, 1, 0, 1, 0, 0), (0, 1, 0, 1, 0, 0)))

    #A = -1.0*np.array(((0,0,1,0),
    #                   (0,0,1,0),
    #                   (1,1,0,1),
    #                   (0,0,1,0)))

    #Change print format to decimal instead of scientific notation
    np.set_printoptions(formatter={'float_kind': '{:f}'.format})

    #Transform the matrix A to tridiagonal form via Lanczos
    T = LanczosTri(A)

    #Find Eigenvalues for Real, Symmetric, Tridiagonal Matrix via QR Iteration
    t2 = TicToc()
    t2.tic()
    lam = IPI(T, maxiter=50000)
    t2.toc()
    print("Eigs(T): ", lam)

    #Get eigenpairs of untransformed hermitian matrix A and time the process using blackbox function
    t1 = TicToc()
    t1.tic()
    e_gs_A, gs_A = eigsh(A, k=1, which='SA', maxiter=1000)
    #e_gs_A = NSI(A,maxiter=1000)
    t1.toc()
    print("Eigs(A): ", e_gs_A[0])
Beispiel #13
0
 def _throw(self):
     dice = None
     if self.method == 1:  # insert from keyboard
         flag = True
         while flag:
             padding = " " * (len(str(self.max_ind)) - len(str(self.ind)))
             str_input = "throw " + padding + str(self.ind) + "/" + str(self.max_ind) + ", result: "
             inp = input(str_input)
             if all(c in "0123456789" for c in inp) and inp != "":
                 dice = int(inp) - 1
                 if dice in range(self.base):
                     flag = False
                 else:
                     print("invalid result, insert again")
             else:
                 print("invalid result, insert again")
     if self.method == 2:  # dice result always 2
         dice = 1
         padding = " " * (len(str(self.max_ind)) - len(str(self.ind)))
         print("throw " + padding + str(self.ind) + "/" + str(self.max_ind) + ", result: " + str(dice + 1))
     if self.method == 3:  # throw pressing keyboard
         t = TicToc()
         t.tic()
         input("Press enter to throw")
         dice = int(t.tocvalue() * 10**8) % self.base
         padding = " " * (len(str(self.max_ind)) - len(str(self.ind)))
         print("throw " + padding + str(self.ind) + "/" + str(self.max_ind) + ", result: " + str(dice + 1))
     return dice
Beispiel #14
0
def run_dt_solver(traces, subsetSize=config.DT_SUBSET_SIZE, txtFile="treeRepresentation.txt", strategy=config.DT_SAMPLING_STRATEGY, decreaseRate=config.DT_DECREASE_RATE,\
                  repetitionsInsideSampling=config.DT_REPETITIONS_INSIDE_SAMPLING, restartsOfSampling=config.DT_RESTARTS_OF_SAMPLING, q = None, encoder=DagSATEncoding,):

    #try:
    config.encoder = encoder
    if q != None:
        separateProcess = True
    else:
        separateProcess = False
    ab = AtomBuilder()
    ab.getExamplesFromTraces(traces)
    #samplingStrategy = config.DT_SAMPLING_STRATEGY
    samplingStrategy = strategy
    #decreaseRate = config.DT_DECREASE_RATE
    decreaseRate = decreaseRate
    t = TicToc()
    t.tic()
    (atoms, atomTraceEvaluation) = ab.buildAtoms(sizeOfPSubset=subsetSize, strategy = samplingStrategy, sizeOfNSubset=subsetSize, probabilityDecreaseRate=decreaseRate,\
                  numRepetitionsInsideSampling=repetitionsInsideSampling, numRestartsOfSampling = restartsOfSampling)
    fb = DTFormulaBuilder(features=ab.atoms,
                          data=ab.getMatrixRepresentation(),
                          labels=ab.getLabels())
    fb.createASeparatingFormula()
    timePassed = t.tocvalue()
    atomsFile = "atoms.txt"
    treeTxtFile = txtFile
    ab.writeAtomsIntoFile(atomsFile)

    numberOfUsedPrimitives = fb.numberOfNodes()
    fb.tree_to_text_file(treeTxtFile)
    #    return (timePassed, len(atoms), numberOfUsedPrimitives)
    if separateProcess == True:
        q.put([timePassed, len(atoms), numberOfUsedPrimitives])
    else:
        return [timePassed, len(atoms), numberOfUsedPrimitives]
Beispiel #15
0
def rws_test():
    size = 10000
    selection = 1000
    random_state = RandomState()
    probs = random_state.uniform(size=size)
    probs /= sum(probs)

    random_state.seed(5)

    def standard_method():
        t.tic()
        result = []
        cum_probs = np.cumsum(probs)
        for _ in range(selection):
            r = random_state.random()
            for i in range(size):
                if r <= cum_probs[i]:
                    result.append(i)
                    break
        return result

    def numpy_method():
        return random_state.choice(size, size=selection, replace=True, p=probs)

    t = TicToc()
    t.tic()
    result_standard_method = standard_method()
    elp_std = t.tocvalue(restart=True)
    result_numpy_method = numpy_method()
    elp_np = t.tocvalue()
    print('standard: {}'.format(elp_std))
    print('numpy: {}'.format(elp_np))
    print(result_numpy_method)
    print(result_standard_method)
Beispiel #16
0
def run_parallel():
    tictoc = TicToc()
    tictoc.tic()
    result = Parallel(n_jobs=JOBS,
                      prefer=None)(delayed(eval)(rs) for rs in range(CYCLES))
    ellapsed = tictoc.tocvalue()
    print("Parallel: {}".format(ellapsed))
    return result
Beispiel #17
0
def run_dt_solver(
        traces,
        subsetSize=config.DT_SUBSET_SIZE,
        txtFile="treeRepresentation.txt",
        strategy=config.DT_SAMPLING_STRATEGY,
        decreaseRate=config.DT_DECREASE_RATE,
        repetitionsInsideSampling=config.DT_REPETITIONS_INSIDE_SAMPLING,
        restartsOfSampling=config.DT_RESTARTS_OF_SAMPLING,
        q=None,
        encoder=DagSATEncoding,
        misclassification=0,
        timeout=float("inf"),
        record_result=dict(),  # output
):

    #try:
    config.encoder = encoder
    separate_process = q is not None
    ab = AtomBuilder()
    ab.getExamplesFromTraces(traces)
    #samplingStrategy = config.DT_SAMPLING_STRATEGY
    samplingStrategy = strategy
    #decreaseRate = config.DT_DECREASE_RATE
    decreaseRate = decreaseRate
    t = TicToc()
    t.tic()
    (atoms, atomTraceEvaluation) = ab.buildAtoms(
        sizeOfPSubset=subsetSize,
        strategy=samplingStrategy,
        sizeOfNSubset=subsetSize,
        probabilityDecreaseRate=decreaseRate,
        numRepetitionsInsideSampling=repetitionsInsideSampling,
        numRestartsOfSampling=restartsOfSampling,
        timeout=timeout - t.tocvalue(),
    )

    fb = DTFormulaBuilder(
        features=ab.atoms,
        data=ab.getMatrixRepresentation(),
        labels=ab.getLabels(),
        stoppingVal=misclassification,
        # timeout=timeout-t.tocvalue(), #TODO
    )
    fb.createASeparatingFormula()
    timePassed = t.tocvalue()
    atomsFile = "atoms.txt"
    treeTxtFile = txtFile
    ab.writeAtomsIntoFile(atomsFile)

    numberOfUsedPrimitives = fb.numberOfNodes()
    fb.tree_to_text_file(treeTxtFile)
    fb.tree_to_dot_file("atoms.dot")
    record_result['formulaTree'] = fb.tree_to_DecisionTreeFormula()
    #    return (timePassed, len(atoms), numberOfUsedPrimitives)
    if separate_process:
        q.put([timePassed, len(atoms), numberOfUsedPrimitives])
    else:
        return [timePassed, len(atoms), numberOfUsedPrimitives]
Beispiel #18
0
def rxn_csv(format, name, dataset_dir):
    """
    Load SBML, interpret as reaction graph, set features from csv.
    :param format:
    :param name:
    :param dataset_dir:
    :return:
    """
    if cfg.dataset.interpretation != 'reaction_graph' or cfg.dataset.format != 'SBML' \
            or cfg.dataset.node_attr_file is None:
        return None

    nw = Network.from_sbml(name)
    t = TicToc()
    t.tic()
    if cfg.dataset.max_node_degree is not None:
        nw = nw.limit_node_degrees(cfg.dataset.max_node_degree)
    if cfg.dataset.max_edge_degree is not None:
        nw = nw.limit_edge_degrees(cfg.dataset.max_edge_degree)
    nw = nw.limit_to_largest_component()  # returns igraph.Graph instead of
    # biomodels.Network
    nw = nw.bipartite_projection(which=Network.hyperedge_t)

    import csv
    csv_path = files('data').joinpath(cfg.dataset.node_attr_file)
    attrs: dict
    attrs = {}
    with open(csv_path) as csvDataFile:
        csvReader = csv.reader(csvDataFile)
        # number of features is number of columns minus one for rxn id
        for row in csvReader:
            floats = [float(v) for v in row[1:len(row)]]
            # row[0] is rxn id, all others are considered features
            attrs[row[0]] = floats

    for node in nw.vs:
        node['node_label'] = 0  # TODO GG breaks if no node_label is set
        if node['name'] in attrs:
            node['has_feature'] = True
            # for comparison experiments, consider the same subgraph but without node
            # features
            if cfg.dataset.use_node_feature is False:
                node['node_feature'] = torch.tensor([0]).to(torch.float)
            else:
                node['node_feature'] = torch.tensor(attrs[node['name']]).to(
                    torch.float)

    # consider the induced subgraph of those reactions with non-zero features
    # TODO select only nodes which have features set,
    #   or simply save their ids in the loop above
    nw_sub = nw.induced_subgraph(
        nw.vs.select(has_feature=True))  # list of node ids
    # nw_sub = nw

    dsG = deepsnap.graph.Graph(nw_sub.to_networkx())
    return [dsG]
Beispiel #19
0
class ElapsedTime(object):
    """Measure the elapsed time between Tic and Toc"""
    def __init__(self):
        self.t = TicToc()
        self.t.tic()

    def elapsed(self):
        _elapsed = self.t.tocvalue()
        d = timedelta(seconds=_elapsed)
        logger.debug('< {} >'.format(d))
Beispiel #20
0
 def wrapper(*args, **kwargs):
     setup_logging()
     load_dotenv(find_dotenv())
     logger.info("Loaded environment variables")
     logger.info(f"Starting {main.__name__}() in {sys.argv[0]}")
     t = TicToc()
     t.tic()
     main(*args, **kwargs)
     logger.info(f"Finished {main.__name__}() in "
                 f"{timedelta(seconds=np.ceil(t.tocvalue()))}")
Beispiel #21
0
def perform_cv(args):
    splitter = KFold(args.folds, random_state=0, shuffle=True)
    data = TADPOLEData(
        data=args.modality_path + args.data_file,
        modality_path=args.modality_path,
        modality_k=args.modality_k,
    )

    # Split the ptids into n folds
    ptids = data.get_ptids(min_time_points=2, target=args.target)
    print("Total patients in CV: %i" % len(ptids))
    t = TicToc()
    print("CV mode %s" % args.mode)
    sys.stdout.flush()
    predictions = []
    modality_ranks = []
    for fold, (train_index, test_index) in enumerate(splitter.split(ptids)):
        print("Fold %i/%i" % (fold, args.folds - 1))
        sys.stdout.flush()
        train_ptids = [ptids[i] for i in train_index]
        test_ptids = [ptids[i] for i in test_index]
        aug_data = AugmentedTADPOLEData(
            data, args.precomputed_path + "merged_%i.p" % fold, train_ptids)
        model = SMNSR(aug_data,
                      n_jobs=args.cpus,
                      forecast=False,
                      mode=args.mode)
        print("Fitting model")
        t.tic()
        model.fit(train_ptids)
        print("Trainig took %s seconds" % t.tocvalue())
        print("Performing forecasting")
        sys.stdout.flush()
        # Fetch known target values for the patients
        y = aug_data.getY(test_ptids, target=args.target)
        prediction_definition = y[[TADPOLEData.PTID, TADPOLEData.C_MONTH]]
        print("Patients with more than one measurement in fold %i: %i" %
              (fold, y[TADPOLEData.PTID].unique().shape[0]))

        y_hat = model.predict(prediction_definition, target=args.target)
        prediction = y.merge(
            y_hat,
            left_on=[TADPOLEData.PTID, TADPOLEData.C_MONTH],
            right_on=[TADPOLEData.PTID, TO],
        )
        predictions.append((prediction))
        modality_ranks.append(model.ranked_modalities)
        fold += 1

    predictions = pd.concat(predictions, ignore_index=True)
    with open(args.output_path + args.result_file_name, "wb") as file:
        pickle.dump(prediction, file)
    evaluate_predictions(predictions, data)

    return prediction
def compute(p, listCouples, distanceMin, listOfExclusion=[]):
    """
    compute computes the distances between all Links provided, and return them
    :param p: the pybullet library engine
    :param listCouples: the list of the couples [[bodyA,LinkX],[bodyB, linkY] between which the computation shall be
    done
    :param distanceMin: distance over which there is no computation
    :param listOfExclusion:
    Format = [ [["BodyA", "LinkA"],["BodyB", "LinkB"]] , ...]
    :return: a tuple containing the couple between which the distance was computed, the points (World coordinates)
    between which the distance was computed, and the distance computed
    """
    if listOfExclusion is not None and len(listOfExclusion) > 0:
        raise NotImplementedError

    t_forLoop = TicToc()
    distCouples = []
    lineCouples = []
    # [[[1, 0], [2, 0]]]
    t_forLoop.tic()
    for couple in listCouples:
        bodyA = couple[0][0]
        linkIndexA = couple[0][1]
        bodyB = couple[1][0]
        linkIndexB = couple[1][1]
        closestPointsVec = p.getClosestPoints(bodyA=bodyA,
                                              bodyB=bodyB,
                                              distance=distanceMin,
                                              linkIndexA=linkIndexA,
                                              linkIndexB=linkIndexB)
        #distTemp = 99999
        distTemp = 99999
        fromTemp = (0, 0, 0)
        toTemp = (0, 0, 0)
        if len(closestPointsVec) > 0:
            for c in closestPointsVec:
                #distCur = round(c[8]*1000)
                distCur = round(c[8])
                if distCur < distanceMin:
                    distTemp = distCur
                    fromTemp = c[5]
                    toTemp = c[6]

        distCouples.append(distTemp)
        lineCouples.append([fromTemp, toTemp])

    #t_forLoop.toc("TICTOC -> Time elapsed for computing (For-loop) => ")

    assert len(listCouples) is len(
        distCouples), "The lengths of vectors are not equal"
    assert len(listCouples) is len(
        lineCouples), "The lengths of vectors are not equal"

    return [listCouples, lineCouples, distCouples]
def get_finite_witness(f,
                       trace_length=5,
                       operators=[
                           encodingConstants.G, encodingConstants.F,
                           encodingConstants.LAND, encodingConstants.LOR,
                           encodingConstants.ENDS, encodingConstants.LNOT,
                           encodingConstants.BEFORE,
                           encodingConstants.STRICTLY_BEFORE,
                           encodingConstants.UNTIL
                       ],
                       wall_locations=[],
                       water_locations=None,
                       robot_position=None,
                       items_locations=None,
                       testing=False):

    t = TicToc()
    solvingTic = TicToc()
    t.tic()
    all_variables = [str(v) for v in f.getAllVariables()]

    fg = SATOfLTLEncoding(f,
                          trace_length,
                          0,
                          operators=None,
                          literals=all_variables,
                          wall_positions=wall_locations,
                          water_locations=water_locations,
                          robot_position=robot_position,
                          items_locations=items_locations,
                          testing=testing)
    fg.encodeFormula()
    stats_log.debug("creation time was {}".format(t.tocvalue()))
    solvingTic.tic()
    solverRes = fg.solver.check()
    stats_log.debug("solving time was {}".format(solvingTic.tocvalue()))

    if solverRes == sat:
        solverModel = fg.solver.model()

        (cex_trace, init_world, path) = fg.reconstructWitnessTrace(solverModel)
        return (cex_trace, init_world, path)
    elif solverRes == unknown:
        return constants.UNKNOWN_SOLVER_RES
    else:
        # logging.debug(solverRes)
        # pdb.set_trace()
        if constants.DEBUG_UNSAT_CORE is True:
            filename = "debug_files/unsatCore"
            os.makedirs(os.path.dirname(filename), exist_ok=True)
            with open(filename, "w") as unsat_core_file:
                unsat_core_file.write(str(fg.solver.unsat_core()))
        return "unsat"
Beispiel #24
0
def upload_dcm_files(workers, accession_number=None):
    global R, instance_manifest, compress

    pool = Pool(processes=workers)
    initial_size = orthanc.size()
    instance_manifest = orthanc.do_get('instances')
    initial_count = len(instance_manifest)

    logging.info("-----------------------------------")
    logging.info("DCM Pre-Index File Uploader")
    logging.info("-----------------------------------")
    logging.info("  Workers:      {}".format(workers))

    if compress:
        logging.info("  J2K Compress: ON")

    t = TicToc()
    t.tic()

    if accession_number:
        # Upload single accession
        fps = Q.sget(accession_number)
        logging.info("  Upload:       Accession {}".format(accession_number))
    else:
        # Upload _all_
        fps = R.keys()
        logging.info("  Upload:       All data")

    pool.map(upload_dcm_file, fps, 20)

    toc_ = float(t.tocvalue())

    final_size = orthanc.size()
    instance_manifest = orthanc.do_get('instances')
    final_count = len(instance_manifest)

    count = final_count - initial_count
    upload_mb = final_size - initial_size

    n_per_sec = float(count) / toc_
    time40m = 40000000.0 / (n_per_sec or 1) / 60 / 60

    mb_per_sec = float(upload_mb) / toc_
    time15t = 15000000.0 / (mb_per_sec or 1) / 60 / 60

    logging.info("  Time:         {} sec".format(toc_))
    logging.info("  Num uploaded: {}".format(count))
    logging.info("  Num/sec:      {}".format(n_per_sec))
    logging.info("  Hrs for 40M:  {}".format(time40m))
    logging.info("  MB uploaded:  {}".format(upload_mb))
    logging.info("  MB/sec:       {}".format(mb_per_sec))
    logging.info("  Hrs for 15TB: {}".format(time15t))
    logging.info("-----------------------------------")
Beispiel #25
0
    async def ocr_translate(request):
        tic_toc = TicToc()

        # Read the image from the web request
        tic_toc.tic()
        image_bytes = await request.read()
        image = imread(image_bytes, pilmode="RGB")
        tic_toc.toc("Read image in")

        results = await get_ocr_results(ctx.ocr, image, ctx.max_height)

        return web.json_response({"results": results})
def _find_matches_in_database(db_value_finder, potential_values):
    matches = []
    tic_toc = TicToc()
    tic_toc.tic()
    print(f'Find potential candiates "{potential_values}" in database {db_value_finder.database}')
    try:
        matching_db_values = db_value_finder.find_similar_values_in_database(potential_values)
        matches = list(map(lambda v: v[0], matching_db_values))
    except Exception as e:
        print(f"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! Error executing a query by the database finder. Error: {e}")

    tic_toc.toc()
    return matches
Beispiel #27
0
def derotate_image_forloop(dateString):

    # obtain the list of files which have been dewarped, and need to de-rotated
    # according to the PA in the FITS headers
    asterism_frames_directory_retrieve = str(
        config["data_dirs"]["DIR_ASTERISM_DEWARP"])
    asterism_frames_pre_derot_names = list(
        glob.glob(os.path.join(asterism_frames_directory_retrieve, "*.fits")))

    for f in range(
            0, len(asterism_frames_pre_derot_names)):  # loop over filenames
        print('----------------------------------------')

        t = TicToc()  # create instance of timer
        t.tic()  # start timer

        # retrieve image
        image, header = fits.getdata(asterism_frames_pre_derot_names[f],
                                     0,
                                     header=True)
        #print(header)

        try:
            # find PA from header
            pa = np.float(header['LBT_PARA'])

            # derotate
            image_derot = rot(im=image,
                              angle=-pa,
                              axis=[1024, 1024],
                              order=3,
                              pivot=False)  # axis coord here is just a dummy

            #print(angle)
            print('Derotating image ' +
                  str(os.path.basename(asterism_frames_pre_derot_names[f])) +
                  '...')

            # save
            fits.writeto(str(config["data_dirs"]["DIR_ASTERISM_DEROT"] + \
                         "derotated_" + \
                         os.path.basename(asterism_frames_pre_derot_names[f])),
                         image_derot, header, overwrite=True)

            t.toc()
            print('----------------------------------------')

        except:
            print("Frame " + str(os.path.basename(asterism_frames_pre_derot_names[f])) + \
                  " has no parallactic angle. Maybe header keyword being sought is wrong?")
Beispiel #28
0
async def get_ocr_results(ocr, image, max_height):
    """Runs OCR on a given image and returns the recognized text,
    text as pinyin, position and dictionary translations."""
    tic_toc = TicToc()

    # Determine the ratio from detection coords to image coords.
    # Downscale if the hight exceeds the max height.
    image_to_screen = [1, 1]
    if image.shape[0] > max_height:
        tic_toc.tic()
        orig_shape = image.shape
        image = resize(image, height=max_height)
        image_to_screen = [
            orig_shape[0] / image.shape[0], orig_shape[1] / image.shape[1]
        ]
        tic_toc.toc("Downscaled image in")

    # Detect sentences in image
    tic_toc.tic()
    print("Image shape:", image.shape, "dtype", image.dtype)
    result, _ = await _awaitable(ocr.run, image)
    sentences = [{"text": r[1], "position": r[0][:2]} for r in result.values()]
    tic_toc.toc("OCR in", restart=True)

    # Translate the detected sentences and store results
    results = []
    for sentence in sentences:
        orig_text = sentence["text"]
        if contains_chinese(orig_text):
            pinyin_text = get_pinyin(orig_text)
            translations = get_all_phrase_translations(orig_text)
            translation_text = "\n".join([
                "%s (%s): %s" % (t[0], get_pinyin(t[0]), ", ".join(t[1]))
                for t in translations
            ])

            position = (int(sentence["position"][0] * image_to_screen[0]),
                        int((sentence["position"][1] * image_to_screen[1]) +
                            20))

            results.append({
                "text": orig_text,
                "position": position,
                "pinyin_text": pinyin_text,
                "translation_text": translation_text
            })

    tic_toc.toc("Translate in")

    return results
Beispiel #29
0
    def train_epoch(self, epoch, X_iter, verbose=0):
        t = TicToc()
        start = t.tic()
        epoch_loss = 0.0
        num_batches = 5

        for idx, inputs in enumerate(X_iter):
            inputs = inputs['input']
            batch_size = inputs.shape[0]

            # Convert to tensors and move to device
            inputs = torch.tensor(inputs).to(self.device)

            # Train batch and get batch loss
            batch_loss = self.train_batch(inputs)
            # Update epoch loss given als batch loss
            epoch_loss += batch_loss

            if verbose != 0:
                print(
                    '[{}] Epoch: {} #batches {}/{}, loss: {:.8f}, learning rates: {:.6f}/{:.6f}'
                    .format(datetime.timedelta(seconds=int(t.toc() - start)),
                            epoch + 1, idx + 1, num_batches,
                            (batch_loss / ((idx + 1) * batch_size)),
                            self.encoder_lr, self.decoder_lr),
                    end='\r')

        return epoch_loss
Beispiel #30
0
    def integrateFittedPeakIntensity(self, spaDict=""):
        inTimer = TicToc()
        inTimer.tic()

        if spaDict == "":
            spaDict = self.SPAResultRawDict

        for frameID, frameDict in spaDict.items():
            numberOfSpot = frameDict["numberOfSpot"]
            totalIntensity = 0

            for spotID in range(int(numberOfSpot)):
                spotDict = frameDict[str(spotID)]

                Am = spotDict["Am"]
                xCenter = spotDict["xCenter"]
                yCenter = spotDict["yCenter"]
                sigma_x = spotDict["sigma_x"]
                sigma_y = spotDict["sigma_y"]
                theta = spotDict["theta"]

                xUpperLimit = xCenter + self.halfCropRange
                xLowerLimit = xCenter - self.halfCropRange

                yUpperLimit = yCenter + self.halfCropRange
                yLowerLimit = yCenter - self.halfCropRange

                spotDict["integratedIntensity"], spotDict["integratedIntensityError"] = dblquad(
                    lambda x, y: fitFunc.gauss2D(x, y, Am, xCenter, yCenter, sigma_x, sigma_y, theta), yLowerLimit,
                    yUpperLimit, lambda x: xLowerLimit, lambda x: xUpperLimit)

                totalIntensity += spotDict["integratedIntensity"]
            frameDict["totalIntensity"] = totalIntensity

            if int(frameID) % 50 == 0:
                print("Integrating Frame ID:", frameID, end=', ')
                inTimer.toc()

        for frameID, frameDict in spaDict.items():
            numberOfSpot = frameDict["numberOfSpot"]
            for spotID in range(int(numberOfSpot)):
                spotDict = frameDict[str(spotID)]
                spotDict["integratedIntensityRatio"] = spotDict["integratedIntensity"] / frameDict["totalIntensity"]

        self.SPAResultRawDict = spaDict

        return self.SPAResultRawDict
Beispiel #31
0
def run_solver(
    *,
    q=None,
    encoder=DagSATEncoding,
    **solver_args,
):
    separate_process = q is not None

    t = TicToc()
    t.tic()
    results = get_models(encoder=encoder, **solver_args)
    time_passed = t.tocvalue()

    if separate_process == True:
        q.put([results, time_passed])
    else:
        return [results, time_passed]