Ejemplo n.º 1
0
def main3():
    logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
    
    path = '/home/gujt/work/data/Aquant2-ctree/0/'
    exe = Execute()
    exe.build_model()
    #exe.load_model()
    #exit()  
    count = 0
    for dir in os.walk(path):
        for file in dir[2]:
            print 'read.',file
            f = open(path+file)
            line1 = f.readline()
            start = now()
            while line1:
                if count > 10:
                    exit()
                count += 1
                line2 = f.readline()
                lines = [line1, line2]
                try:
                    exe.train_sentence(lines)
                except:
                    '[BUG]',line2
                finally:
                    line1 = f.readline()
                    print '[',count,']', (now()-start),'s'
                    start = now()
                            
            f.close()
    
    print 'train ok.'
    exe.save_weights('../data/weights.bin') 
Ejemplo n.º 2
0
    def distinct_patterns_over_windows(self, window_sizes=None, trials=None, save_couplings=False, remove_zeros=False):
        """
        Returns tuple: counts, entropies [, couplings]
        counts, entropies: arrays of size 2 x T x WSizes
        (0: empirical from model sample, 1: dynamics from learned model on sample)
        
        Parameters
        ----------
        window_sizes : Type, optional
            Description (default None)
        trials : Type, optional
            Description (default None)
        save_couplings : bool, optional
            Description (default False)
        remove_zeros : bool, optional
            Description (default False)
        
        Returns
        -------
        Value : Type
            Description
        """
        if window_sizes is None:
            window_sizes = [1]
        trials = trials or range(self._original_spikes.T)
        counts = np.zeros((2, len(trials), len(window_sizes)))
        #entropies = np.zeros((2, len(trials), len(window_sizes)))

        couplings = {}

        tot_learn_time = 0

        for ws, window_size in enumerate(window_sizes):
            couplings[window_size] = []

            for c, trial in enumerate(trials):
                hdlog.info("Trial %d | ws %d" % (trial, window_size))

                self._window_size = window_size

                t = now()
                self.fit(trials=[trial], remove_zeros=remove_zeros)
                diff = now() - t
                hdlog.info("[%1.3f min]" % (diff / 60.))
                tot_learn_time += diff

                if save_couplings:
                    couplings[ws].append(self._learner.network.J.copy())

                self.chomp()
                #entropies[0, c, ws] = self._raw_patterns.entropy()
                counts[0, c, ws] = len(self._raw_patterns)
                #entropies[1, c, ws] = self._hopfield_patterns.entropy()
                counts[1, c, ws] = len(self._hopfield_patterns)

        hdlog.info("Total learn time: %1.3f mins" % (tot_learn_time / 60.))
        self._learn_time = tot_learn_time
        if save_couplings:
            return counts, couplings
        return counts
Ejemplo n.º 3
0
Archivo: cli.py Proyecto: Yelp/pgctl
 def __locked_handle_service_change_state(
     self,
     state,
     service,
     start_time,
 ):
     """Handles a state change for a service and returns whether
     the state change was successful,
     """
     check_time = now()
     try:
         service.assert_()
     except PgctlUserMessage as error:
         state_change_result = self.__locked_handle_state_change_exception(
             state,
             service,
             error,
             start_time,
             check_time,
         )
         return state_change_result
     else:
         # TODO: debug() takes a lambda
         debug('loop: check_time %.3f', now() - check_time)
         if self._should_display_state(state):
             pgctl_print(state.strings.changed, service.name)
         service.service.message(state)
         return StateChangeResult.SUCCESS
Ejemplo n.º 4
0
 def your_call_is_important_to_us():
     if pyclosure['t0']:
         how_long = " (%.0f secs now)" % (now() - pyclosure['t0'])
     else:
         pyclosure['t0'] = now()
         how_long = ""
     the_logger.info("Waiting for %s%s..." % (what, how_long))
Ejemplo n.º 5
0
def deep_search(task, timeout=10*60):
    state = State.from_task(task)
    solution = Solution(task)
    solution.metrics = state.estimator.metrics
    deadline = now() + timeout

    # Python has no tail-recursion optimization.
    # Even more, python has a limit on recursion depth
    # So, we need to write big loops iteratively

    while state:  # when we try to .negate() init state we will obtain None and will exit from the loop
        solution.steps += 1
        if not state.is_feasible:
            state = state.parent
            continue

        if state.is_all_covered():
            solution.store_result(state)
            state = state.negate()  # try to deselect the current set or rollback to the parent state
            continue

        if state.get_optimistic_cost() >= solution.best_cost:
            if now() > deadline:  # we get to this place often enough to stop in time,
                                  # and we get to it not on the each iteration, so we will not check the time too frequently
                return solution
            state = state.negate()  # try to deselect the current set or rollback to the parent state
            continue

        state = state.next_child()

    solution.proven_as_optimal = True  # we have not terminated on timeout, so we have explored all the tree
    return solution
Ejemplo n.º 6
0
 def write_func(ident):
     with lock.write_lock():
         enter_time = now()
         time.sleep(WORK_TIMES[ident % len(WORK_TIMES)])
         exit_time = now()
         start_stops.append((lock.WRITER, enter_time, exit_time))
         time.sleep(NAPPY_TIME)
Ejemplo n.º 7
0
def count_or_show_by_generator(gen, count_enable, row_count, col_count):
    """
    gen: a generator returned by find_solutions_*
    count_enable: bool, only count solutions/configurations, don't show them
    """
    if count_enable:
        print('Calculating, please wait... (Control+C to cancel)')
        tm0 = now()
        try:
            solution_count = sum(1 for _ in gen)
        except KeyboardInterrupt:
            print('\nGoodbye')
            return
        delta = now() - tm0
        print('Number of Unique Configurations: %s' % solution_count)
        print('Running Time: %.4f seconds' % delta)
    else:
        print('Found Configurations:\n')
        for board in gen:
            print(format_board(board, row_count, col_count))
            try:
                input('Press Enter to see the next, Control+C to exit')
            except KeyboardInterrupt:
                print('\nGoodbye')
                break
Ejemplo n.º 8
0
def main():
    global verbose, f, db, co, ldap, auth, start

    parser = argparse.ArgumentParser()
    parser.add_argument('-v', "--verbose", action="count", default=0)
    parser.add_argument('-m', "--mail-file")
    parser.add_argument('-s', "--spread", default=ldapconf('MAIL', 'spread', None))
    parser.add_argument('-i', "--ignore-size", dest="max_change", action="store_const", const=100)
    parser.add_argument('-a', "--no-auth-data", dest="auth", action="store_false", default=True)
    args = parser.parse_args()

    verbose = args.verbose
    auth = args.auth

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    start = now()
    curr = now()

    if verbose:
        logger.debug("Loading the EmailLDAP module...")
    ldap = Factory.get('EmailLDAP')(db)
    if verbose:
        logger.debug("  done in %d sec." % (now() - curr))

    spread = args.spread
    if spread is not None:
        spread = map_spreads(spread, int)

    f = ldif_outfile('MAIL', args.mail_file, max_change=args.max_change)
    get_data(spread)
    end_ldif_outfile('MAIL', f)
Ejemplo n.º 9
0
	def __init__(sz, start_position, r_winch, c, looptime, armflag):
			
		#Lengths set the initial configuration of the system.
        	# Lengths: array 1x4 [L0, L1, L2, L3]
        	#create spiral zipper object
        	#physical notes
        	#sz ckbot direction: CCW subtracts tether and CW adds/releases tether.
		sz.startposition = start_position
		sz.start_detected = False
		sz.entered = False
		sz.tether_subtract_CCW = True
		sz.looptime = looptime  #control speed to be enforced
		sz.timeold = 0
		sz.timeold2 = 0

		sz.goal_prev = start_position
		sz.goal_start = now()
		sz.goal_stop =now()
		sz.target_achieved = [0, 0]
		sz.target_reached = False
		sz.P = 10*np.eye(2) #System covariance. Trust in initial Process Model Conditions
		sz.Q = 0.01*np.eye(2) #System noise Covariance. What we think the initial process noise is
		sz.R = 1*np.eye(2) #Sensor Noise Covariance. What we think of the sensor noise is. 

		got_port = [1, 1, 1]

		ser = [0, 0, 0]

		try:
			ser[0] = serial.Serial('/dev/ttyACM0', 57600)
		except Exception, e:
			print "No Arduino on ACM0"
			print str(e)
			got_port[0] = 0
Ejemplo n.º 10
0
def compare_find_solutions_time():
    """
    run and compare the running time of 3 implementations of find_solutions
    """

    row_count, col_count, count_by_symbol = input_problem()

    time_list = []

    func_list = (
        find_solutions_s,
        find_solutions_r,
        find_solutions_q,
        find_solutions_s,
        find_solutions_r,
        find_solutions_q,
    )

    for func in func_list:  # pylint!
        tm0 = now()
        for _ in func(row_count, col_count, count_by_symbol):
            pass
        delta = now() - tm0
        time_list.append(delta)
        print('%.4f seconds   (%s)' % (delta, func))
Ejemplo n.º 11
0
def pb47():
    n = 0
    n1 = []
    n2 = []
    n3 = []
    print(now())
    primes = simpleseive(200000)
    print(now())
    while n < 200000:
        m = n
        n4 = []
        for p in primes:
            #print(m,p)
            if p > m:
                break
            if m % p == 0:
                while m % p == 0:
                    m = m / p
                    #print(m,p)
                n4.append(p)
                if len(n4) == 5:
                    break
        if len(n4) == len(n3) == len(n2) == len(n1)== 4:
            print(n-3,n-2,n-1,n,n*(n-1)*(n-2)*(n-3))
            return
        #print(n1,n2,n3,n4)
        n1,n2,n3 = n2 + [],n3 + [],n4 + []
        n += 1
    print(now())
Ejemplo n.º 12
0
def wait_net_service(host, port, timeout=None):
    import socket
    from time import sleep, time as now
    log('Waiting for web server: ' + host + ':' + str(port))

    s = socket.socket()
    if timeout:
        end = now() + timeout

    while True:
        try:
            if timeout:
                if now() > end:
                    log('ERROR! Network sockets connect waiting timeout!')
                    return False

            s.connect((host, port))

        except socket.timeout:
            sleep(0.1)
            pass
        except socket.error:
            sleep(0.1)
            pass

        else:
            s.close()
            return True
Ejemplo n.º 13
0
def change_status(target, status):
    target_name = target.getName()
    old_status = None
    if target_name in busy_players:
        value = busy_players[target_name]
        time_left = value[1] - now()
        if time_left > 0:
            msg(target, "&cYou must wait %.2fs untill you can change your status" % time_left)
            return
        old_status = value[0]

    if old_status is status:
        if status is True:
            msg(target, "&cYou are already SUPER busy")
        elif status is False:
            msg(target, "&cYou are already busy")
        else:
            msg(target, "&cYou weren't busy yet")
        return

    busy_players[target_name] = (status, now() + busy_status_change_timeout)
    if status is True:
        broadcast(None, target.getDisplayName() + " &7is now SUPER busy")
    elif status is False:
        broadcast(None, target.getDisplayName() + " &7is now busy")
    else:
        broadcast(None, target.getDisplayName() + " &7is not busy anymore")
Ejemplo n.º 14
0
 def run(self):
     while 1:
         sleep(self.expected - now())
         if now() > self.expected:
             message = "WARNING: Message not processed for {0} seconds, giving up\n".format(self.timeout)
             sys.stderr.write(message)
             os.kill(os.getpid(),9)
def main():
    path = input("path:")
    all_size = {}  
    total_file = 0 
    total_delete = 0 
    start = now()  
    for file in os.listdir(path):  
        total_file += 1  
        real_path = os.path.join(path, file)  
        if os.path.isfile(real_path) == True:  
            size = os.stat(real_path).st_size  
            name_and_md5 = [real_path, '']  
            if size in all_size.keys():  
                new_md5 = getmd5(real_path)  
                if all_size[size][1] == '':  
                    all_size[size][1] = getmd5(all_size[size][0])  
                if new_md5 in all_size[size]:  
                    print ('删除'), file  
                    total_delete += 1  
                else:  
                    all_size[size].append(new_md5)  
            else:  
                all_size[size] = name_and_md5  
    end = now()  
    time_last = end - start  
    print ('文件总数:'), total_file  
    print ('删除个数:'), total_delete  
    print ('耗时:'), time_last, '秒'  
def wait_net_service(server, port, timeout=None):
    """ Wait for network service to appear
        @param timeout: in seconds, if None or 0 wait forever
        @return: True of False, if timeout is None may return only True or
                 throw unhandled network exception
    """

    s = socket.socket()
    if timeout:
        from time import time as now
        # time module is needed to calc timeout shared between two exceptions
        end = now() + timeout

    while True:
        try:
            if timeout:
                next_timeout = end - now()
                if next_timeout < 0:
                    return False
                else:
                    s.settimeout(next_timeout)

            s.connect((server, port))

        except socket.timeout, err:
            # this exception occurs only if timeout is set
            if timeout:
                return False

        except socket.error, err:
            # catch timeout exception from underlying network library
            # this one is different from socket.timeout
            pass
Ejemplo n.º 17
0
def _wait_net_service(server, port, timeout=None):
    """ Wait for network service to appear
        @param timeout: in seconds, if None or 0 wait forever
        @return: True of False, if timeout is None may return only True or
                 throw unhandled network exception
    """
    s = socket.socket()
    if timeout:
        end = now() + timeout

    while True:
        try:
            if timeout:
                next_timeout = end - now()
                if next_timeout < 0:
                    return False
                else:
                    s.settimeout(next_timeout)

            s.connect((server, port))

        except socket.timeout:
            # this exception occurs only if timeout is set
            if timeout:
                return False

        except socket.error:
            pass
        else:
            s.close()
            return True
Ejemplo n.º 18
0
def main():
    path = u'D:\\test2'
    all_md5 = {}
    all_size = {}
    total_file = 0
    total_delete = 0
    start = now()
    print("start")
    for file in os.listdir(path):
        total_file += 1
        real_path = os.path.join(path, file)
        if os.path.isfile(real_path):
            size = os.stat(real_path).st_size
            name_and_md5 = [real_path, '']
            if size in all_size.keys():
                print('finded')
                new_md5 = getmd5(real_path)
                if all_size[size][1] == '':
                    all_size[size][1] = getmd5(all_size[size][0])
                if new_md5 in all_size[size]:
                    print("DELETE:" + file)
                    os.remove(path+'\\'+file)
                    total_delete += 1
                else:
                    all_size[size].append(new_md5)
            else:
                all_size[size] = name_and_md5
    end = now()
    time_last = end - start
    print('TOTAL NUMBER:', total_file)
    print('DELETED NUMBER:', total_delete)
    print('TIME COST:', time_last, 'SEC')
Ejemplo n.º 19
0
    def __init__(self, test_data_fn):
        start = now()
        if os.path.isfile(test_data_fn):
            print("Reading test data...")
            self.prepop_rows, self.idens, self.props, self.rows = \
                pickle.load(open(test_data_fn, 'rb'))
        else:
            print("Generating test data...")
            random.seed(4)  # 4 chosen by fair dice roll.  Guaranteed to be random
            forms = [gen_random_form() for x in range(NUM_FORMS)]
            # FIXME:  don't use random.choice!!! Super duper slow
            self.prepop_rows = flatten(_rows_from_tufo(gen_random_tufo(random.choice(forms)))
                                       for x in range(NUM_PREEXISTING_TUFOS))
            tufos = [gen_random_tufo(random.choice(forms)) for x in range(NUM_TUFOS)]
            self.idens = [t[0] for t in tufos]
            self.props = [get_random_keyval(t[1]) for t in tufos]
            random.shuffle(self.idens)
            random.shuffle(self.props)

            self.rows = flatten(_rows_from_tufo(x) for x in tufos)
            pickle.dump((self.prepop_rows, self.idens, self.props, self.rows),
                        open(test_data_fn, 'wb'))

        print("Test data generation took: %.2f" % (now() - start))
        print('addRows: # Tufos:%8d, # Rows: %8d' % (NUM_TUFOS, len(self.rows)))
        print('len count: small:%d, medium:%d, large:%d, huge:%d' %
              (small_count, medium_count, large_count, huge_count))
Ejemplo n.º 20
0
def main():
    path = raw_input("Path: ")
    all_size = {}
    total_file = 0
    total_delete = 0
    start = now()

    for file in os.listdir(path):
        total_file += 1;
        real_path = os.path.join(path, file)
        if os.path.isfile(real_path) == True:
            filesize = os.stat(real_path).st_size
            name_md5 = [real_path, '']
            if filesize in all_size.keys():
                new_md5 = getmd5(real_path)
                if all_size[filesize][1] == '':
                    all_size[filesize][1] = getmd5(all_size[filesize][0])
                if new_md5 in all_size[filesize]:
                    total_delete += 1
                    os.remove(real_path)
                    print 'Delete ', file
                else:
                    all_size[filesize].append(new_md5)
            else:
                all_size[filesize] = name_md5
    end = now()
    time_last = end - start

    print 'File total: ', total_file
    print 'Del  total: ', total_delete
    print 'Time consuming: ', time_last, 's'
Ejemplo n.º 21
0
 def any_unknown(self, message, *args, **kwargs):
     found = False
     key = (message, repr(args), repr(kwargs))
     try:
         value = self._cache.peek(key)
     except KeyError:
         pass
     else:
         found = True
         if not self._cache.hasExpired(key):
             raise StopIteration(value)
     if self._backoffStarted:
         if self._backoffStarted + self._backoffTimeout < now():
             self._backoffStarted = None
         elif found:
             raise StopIteration(value)
         else:
             raise BackoffException()
     try:
         value = yield self.any.unknown(message, *args, **kwargs)
         self._cache[key] = value
     except (SystemExit, KeyboardInterrupt, AssertionError):
         raise
     except Exception, e:
         if self._backoffTimeout and isinstance(e, TimeoutException):
             self._backoffStarted = now()
             if not (self._returnCachedValueInCaseOfException and found):
                 raise BackoffException()
         if not (self._returnCachedValueInCaseOfException and found):
             raise
def make_the_timeseries():
    print('Read and filter data')
    # Load data from the star
    time, flux = kic.getdata(ID, kernelsize, quarter, sigma, noisecut)
    #time = time[:((len(time)+1)//2)]
    #flux = flux[:((len(flux)+1)//2)]
    assert len(time) == len(flux)

    # Calculate and print Nyquist-frequency
    dt = np.diff(time)
    nyquist = 1 / (2 * np.median(dt))
    print('Nyquist frequency: %s µHz' % str(nyquist))

    # Plot the time series
    """
    plt.figure()
    plt.plot(time, flux, 'k.')
    plt.xlabel(r'Relative time [Ms]')
    plt.ylabel(r'Photometry')
    plt.xlim([np.amin(time), np.amax(time)])
    plt.savefig('%s_time.pdf' % (starname), bbox_inches='tight')
    """

    # Save data in textfile
    print('Write %d entries to %s' % (len(time), ts))
    timerStart = now()

    savenpz(ts, np.transpose([time, flux]))

    elapsedTime = now() - timerStart
    print('Took %.2f s' % elapsedTime)
Ejemplo n.º 23
0
def generate_test_cases(cases=5):
    logfile = os.path.join(os.path.expanduser("~"), "sn", "py", "spikes", "qn", "bigcases.h5")
    h5 = h5py.File(logfile, "w")
    t = 0.0

    for i in range(cases):
        n = 2000 + 0 * np.random.randint(10, 1000)
        a = np.random.randn(n)
        x = np.random.randn(n) * 3
        A = power(n, 2)

        args = (A, a)
        lam = np.random.rand() * 0.01
        m = 8
        maxit = 100

        tic = now()
        res = lbfgs.bfgsl1(f, x, lam=lam, args=args, debug=False, log=None)
        t += now() - tic

        subgroup = h5.create_group("case-%03d" % i)
        subgroup.create_dataset("A", data=A)
        subgroup.create_dataset("a", data=a)
        subgroup.create_dataset("x", data=x)
        subgroup.create_dataset("r", data=res)
        subgroup.create_dataset("lam", data=np.array([lam]))

    print "Computation time: ", t
    h5.close()
Ejemplo n.º 24
0
    def _wait_until_ready(self, timeout=5, raise_if_not_ready=True):
        # we're only ready when it's possible to connect to the CrossBar
        # over TCP - so let's just try it.
        end = now() + timeout
        ready = False

        while not ready:
            timeout = end - now()
            if timeout < 0:
                if raise_if_not_ready:
                    raise ConnectionError(
                        'Failed to connect to CrossBar over {}: {}:{}'.format(
                            self.ipv, self.host, self.port)
                    )
                else:
                    return ready

            try:
                self.try_connection()
            except ConnectionError:
                pass
            else:
                ready = True

        return ready
Ejemplo n.º 25
0
    def __locked_change_state(self, state):
        """the critical section of __change_state"""
        pgctl_print(state.strings.changing, commafy(self.service_names))
        services = [state(service) for service in self.services]
        failed = []
        start_time = now()
        while services:
            for service in services:
                try:
                    service.change()
                except Unsupervised:
                    pass  # handled in state assertion, below
            for service in tuple(services):
                check_time = now()
                try:
                    service.assert_()
                except PgctlUserMessage as error:
                    if timeout(service.name, error, state.strings.change, start_time, service.get_timeout(), check_time):
                        services.remove(service)
                        failed.append(service.name)
                else:
                    # TODO: debug() takes a lambda
                    debug('loop: check_time %.3f', now() - check_time)
                    pgctl_print(state.strings.changed, service.name)
                    service.service.message(state)
                    services.remove(service)

            time.sleep(self.poll)

        return failed
Ejemplo n.º 26
0
def find_best_lin(train,test):
	best_r=0
	T=[]
	for pen in [0.1,0.5,1,10,50]:
		start=now()
		clf=svm.LinearSVC(C=pen,class_weight="auto")
		clf.fit(train[:,2:18],train[:,1])
		finish=now()
		T.append(finish-start)
		scores=clf.predict(test[:,2:18])
		print pen
		scaled_score=scores
		# for i in range(len(scores)):
		# 	scaled_score[i]=(scores[i]-min(scores))/(max(scores)-min(scores))

		fpr, tpr, thresholds = roc_curve(test[:,1], scaled_score)
		roc_auc = auc(fpr, tpr)
		print roc_auc
		r_score=clf.score(test[:,2:18],test[:,1])
		if best_r<roc_auc:
			best_clf=clf
			best_r=roc_auc
			best_pen=pen
			best_scores=scaled_score
	return best_pen,best_r,best_clf,best_scores,T
Ejemplo n.º 27
0
	def get_CR_delta(sz):
		#compare previous value and current value for modules
		# returns the delta in radians assums CCW is positive and CCW is defined facing the servo front
		#get current readings
		#Assume the current value of the sensor is stored and the previous value is correct.
		timeelapsed = now() - sz.timeold
		diff = [0.0,0.0,0.0,0.0]
		#diff = [0.0, 0.0]
		for i in range(4):
			diff[i] = sz.theta_curr[i]-sz.theta_prev[i]
			if abs(diff[i])>sz.limit_t: #calculate valid delta and switch signs to be correct direction
				if sz.theta_curr[i] >= sz.theta_prev[i]:# diff is +ve, so the solution should be -ve
					diff[i] = ((sz.max_t-sz.theta_curr[i])+(sz.theta_prev[i]-sz.min_t)+1)*(-1)
				else: # diff is negative, therfore the solution should be positive
					diff[i] = ((sz.max_t-sz.theta_prev[i])+(sz.theta_curr[i]-sz.min_t)+1)
			else:  #tether length update based on IMU only

				diff[i] = diff[i] #valid calculation

		#print "motor position change "
		#print np.array(diff)#*sz.radian_const
		
		sz.timeold = now()
		if timeelapsed < .2:
			return np.array(diff)*sz.radian_const # convert ticks to radians
		else:
			return [0.0,0.0,0.0,0.0]
def oneclass_best_rbf(train_oneclass,test):
	best_r=0
	best_gamma=0
	T=[]
	S=[]
	#for para in [2e-6,2e-3,0.02,0.05,0.1,0.2,1]:
	#for para in [2e-12,2e-8,2e-6,2e-4,0.02]:
	for para in [2e-6]:
		clf_oneclass=svm.OneClassSVM(kernel='rbf',gamma=para)
		start=now()
		clf_oneclass.fit(train_oneclass[:,2:18])
		finish=now()
		T.append(finish-start)
		print para
		scores=clf_oneclass.decision_function(test[:,2:18])
		fpr, tpr, thresholds = roc_curve(test[:,1], scores)
		roc_auc = auc(fpr, tpr)
		print roc_auc
		#score=0.0
		#for i in range(np.shape(test)[0]):
			#if ((predict[i]==-1.0)&(test[i,1]==0)) or ((predict[i]==1.0)&(test[i,1]==1)):
				#score=score+1.0

		#score=score/np.shape(test)[0]
		#S.append(score)

		if best_r<roc_auc:
			best_r=roc_auc
			best_gamma=para
			best_clf=clf_oneclass

	return best_gamma,best_r,T,best_clf,S
Ejemplo n.º 29
0
	def writeSynFile(self, altIndexList):
		"""
		Build .syn file
		"""
		if not altIndexList:
			return

		log.info("Sorting %s synonyms..." % len(altIndexList))
		t0 = now()

		altIndexList.sort(
			key=lambda x: sortKeyBytes(x[0])
		)
		# 28 seconds with old sort key (converted from custom cmp)
		# 0.63 seconds with my new sort key
		# 0.20 seconds without key function (default sort)

		log.info("Sorting %s synonyms took %.2f seconds" % (
			len(altIndexList),
			now() - t0,
		))
		log.info("Writing %s synonyms..." % len(altIndexList))
		t0 = now()
		with open(self._filename+".syn", "wb") as synFile:
			synFile.write(b"".join([
				b_alt + b"\x00" + intToBinStr(wordIndex, 4)
				for b_alt, wordIndex in altIndexList
			]))
		log.info("Writing %s synonyms took %.2f seconds" % (
			len(altIndexList),
			now() - t0,
		))
Ejemplo n.º 30
0
 def wrapped(*args, **kwargs):
     start = now()
     result = func(*args, **kwargs)
     end = now()
     ms_delta = (end - start) * 1000
     print "Execution time: {0}ms".format(ms_delta)
     return result
Ejemplo n.º 31
0
    def __init__(self,
                 numbering_mode=None,
                 pin_rs=None,
                 pin_rw=None,
                 pin_e=None,
                 pins_data=None,
                 pin_backlight=None,
                 backlight_mode='active_low',
                 backlight_enabled=True,
                 cols=20,
                 rows=4,
                 dotsize=8,
                 charmap='A02',
                 auto_linebreaks=True,
                 compat_mode=False):
        """
        Character LCD controller.

        The default pin numbers are based on the BOARD numbering scheme (1-26).

        You can save 1 pin by not using RW. Set ``pin_rw`` to ``None`` if you
        want this.

        :param pin_rs: Pin for register select (RS). Default: ``15``.
        :type pin_rs: int
        :param pin_rw: Pin for selecting read or write mode (R/W). Set this to
            ``None`` for read only mode. Default: ``18``.
        :type pin_rw: int
        :param pin_e: Pin to start data read or write (E). Default: ``16``.
        :type pin_e: int
        :param pins_data: List of data bus pins in 8 bit mode (DB0-DB7) or in 4
            bit mode (DB4-DB7) in ascending order. Default: ``[21, 22, 23, 24]``.
        :type pins_data: list of int
        :param pin_backlight: Pin for controlling backlight on/off. Set this to
            ``None`` for no backlight control. Default: ``None``.
        :type pin_backlight: int
        :param backlight_mode: Set this to either ``active_high`` or ``active_low``
            to configure the operating control for the backlight. Has no effect if
            pin_backlight is ``None``
        :type backlight_mode: str
        :param backlight_enabled: Whether the backlight is enabled initially.
            Default: ``True``. Has no effect if pin_backlight is ``None``
        :type backlight_enabled: bool
        :param numbering_mode: Which scheme to use for numbering of the GPIO pins,
            either ``GPIO.BOARD`` or ``GPIO.BCM``. Default: ``GPIO.BOARD`` (1-26).
        :type numbering_mode: int
        :param rows: Number of display rows (usually 1, 2 or 4). Default: ``4``.
        :type rows: int
        :param cols: Number of columns per row (usually 16 or 20). Default ``20``.
        :type cols: int
        :param dotsize: Some 1 line displays allow a font height of 10px.
            Allowed: ``8`` or ``10``. Default: ``8``.
        :type dotsize: int
        :param charmap: The character map used. Depends on your LCD. This must
            be either ``A00`` or ``A02``. Default: ``A02``.
        :type charmap: str
        :param auto_linebreaks: Whether or not to automatically insert line
            breaks. Default: ``True``.
        :type auto_linebreaks: bool
        :param compat_mode: Whether to run additional checks to support older LCDs
            that may not run at the reference clock (or keep up with it).
        :type compat_mode: bool

        """
        # Configure compatibility mode
        self.compat_mode = compat_mode
        if compat_mode:
            self.last_send_event = now()

        # Set attributes
        if numbering_mode == GPIO.BCM or numbering_mode == GPIO.BOARD:
            self.numbering_mode = numbering_mode
        else:
            raise ValueError(
                'Invalid GPIO numbering mode: numbering_mode=%s, '
                'must be either GPIO.BOARD or GPIO.BCM.\n'
                'See https://gist.github.com/dbrgn/77d984a822bfc9fddc844f67016d0f7e '
                'for more details.' % numbering_mode)
        if pin_rs is None:
            raise ValueError('pin_rs is not defined.')
        if pin_e is None:
            raise ValueError('pin_e is not defined.')

        if len(pins_data) == 4:  # 4 bit mode
            self.data_bus_mode = c.LCD_4BITMODE
            block1 = [None] * 4
        elif len(pins_data) == 8:  # 8 bit mode
            self.data_bus_mode = c.LCD_8BITMODE
            block1 = pins_data[:4]
        else:
            raise ValueError('There should be exactly 4 or 8 data pins.')
        block2 = pins_data[-4:]
        self.pins = PinConfig(rs=pin_rs,
                              rw=pin_rw,
                              e=pin_e,
                              d0=block1[0],
                              d1=block1[1],
                              d2=block1[2],
                              d3=block1[3],
                              d4=block2[0],
                              d5=block2[1],
                              d6=block2[2],
                              d7=block2[3],
                              backlight=pin_backlight,
                              mode=numbering_mode)
        self.backlight_mode = backlight_mode

        # Call superclass
        super(CharLCD, self).__init__(cols,
                                      rows,
                                      dotsize,
                                      charmap=charmap,
                                      auto_linebreaks=auto_linebreaks)

        # Set backlight status
        if pin_backlight is not None:
            self.backlight_enabled = backlight_enabled
Ejemplo n.º 32
0
False       incremental     4       0.790
False       incremental     5       3.087
switched to benchmarking search function
False       incremental     3       0.035
False       incremental     4       0.698
False       incremental     5       2.558
'''

if __name__ == '__main__':
    difficultPosition = '''
r . b q . . . r
p p p p n k p p
. . n b . p . .
. . . . p . . .
. . P . N . . .
P . . P B N . .
. P . . P P P P
R . . Q K B . R'''
    test_board = [line for line in difficultPosition.replace(' ', '').split()]
    test_board.reverse()
    startTime = now()
    # main([test_board], 50, 0)

    _possible_moves = moves(test_board, True)
    _possible_moves.sort(key=lambda x: x[1], reverse=True)
    bestMove = search(_possible_moves, True, 2)
    print('{:.3f}'.format(now() - startTime))
    print(total_moves)
    print('\n'.join(' '.join(piece for piece in row)
                    for row in bestMove.__reversed__()) + '\n')
Ejemplo n.º 33
0
    def writeGeneral(self) -> None:
        """
		Build StarDict dictionary in general case.
		Every item definition may consist of an arbitrary number of articles.
		sametypesequence option is not used.
		"""
        dictMark = 0
        altIndexList = []  # list of tuples (b"alternate", entryIndex)

        dictFile = open(self._filename + ".dict", "wb")
        idxFile = open(self._filename + ".idx", "wb")
        indexFileSize = 0

        t0 = now()
        wordCount = 0
        defiFormatCounter = Counter()
        if not isdir(self._resDir):
            os.mkdir(self._resDir)

        entryIndex = -1
        while True:
            entry = yield
            if entry is None:
                break
            if entry.isData():
                entry.save(self._resDir)
                continue
            entryIndex += 1

            entry.detectDefiFormat()  # call no more than once
            defiFormat = entry.defiFormat
            defiFormatCounter[defiFormat] += 1
            if defiFormat not in ("m", "h", "x"):
                log.error(f"invalid defiFormat={defiFormat}, using 'm'")
                defiFormat = "m"

            words = entry.l_word  # list of strs
            word = words[0]  # str
            defi = self.fixDefi(entry.defi, defiFormat)
            # defi is str

            for alt in words[1:]:
                altIndexList.append((alt.encode("utf-8"), entryIndex))

            b_dictBlock = (defiFormat + defi).encode("utf-8") + b"\x00"
            dictFile.write(b_dictBlock)
            blockLen = len(b_dictBlock)

            b_idxBlock = word.encode("utf-8") + b"\x00" + \
             uint32ToBytes(dictMark) + \
             uint32ToBytes(blockLen)
            idxFile.write(b_idxBlock)

            dictMark += blockLen
            indexFileSize += len(b_idxBlock)

            wordCount += 1

        dictFile.close()
        idxFile.close()
        if not os.listdir(self._resDir):
            os.rmdir(self._resDir)
        log.info(f"Writing dict file took {now()-t0:.2f} seconds")
        log.debug("defiFormatsCount = " +
                  pformat(defiFormatCounter.most_common()))

        self.writeSynFile(altIndexList)
        self.writeIfoFile(
            wordCount,
            indexFileSize,
            len(altIndexList),
        )
Ejemplo n.º 34
0
def _animation(f1):
    global s, app
    # Open socket
    try:
        s = socket(AF_INET, SOCK_DGRAM)
        s.bind(("", APRIL_DATA_PORT))
        s.setblocking(0)
        # Server socket
        srv = socket(AF_INET, SOCK_STREAM)
        srv.bind(("0.0.0.0", 8080))
        srv.listen(2)
    except:
        app.stop()
        raise
    client = None
    logfile = None
    # Axes for arena display
    ax = array(
        [min(ref[:, 0]),
         max(ref[:, 0]),
         min(ref[:, 1]),
         max(ref[:, 1])]) * 1.2
    # Allowed tag IDS
    allow = set(corners + waypoints + ROBOT_TAGID)
    # Array size; must store all allowed tags
    N = max(allow) + 1
    # Array holding all point locations
    pts = zeros((N, 4, 3), dtype=float)
    # Indicator array for point that are assumed static
    #   and whose locations will be lowpass filtered
    statics = zeros(N, dtype=bool)
    statics[corners + waypoints] = True
    # Legend for point update indicator strings
    lbl = array(list(".+ld:*LD"))
    # (CONST) Reference locations for tag corners
    ang0 = array([-1 + 1j, 1 + 1j, 1 - 1j, -1 - 1j]) * -1j
    # (CONST) Point on a circle
    circ = exp(1j * linspace(0, 2 * pi, 16))
    ### Initial values for variables
    # Configure sensor line-types
    sensorF = Sensor(':om', lw=2)
    sensorB = Sensor(':oc', lw=2)
    # Last message received from TagStreamer
    msg = None
    # Last waypoint visited
    M = 0
    # Dynamic zoom scale for robot view
    zoom = None
    # Last homography
    prj = None
    # Start time
    T0 = now()
    # Time last waypoint message was sent
    lastWay = T0 - WAY_RATE - 1
    # Number of messages received
    nmsg = 0
    #
    ### MAIN LOOP ###
    #
    while len(waypoints) > M + 1:  # continue until goal is reached
        #
        ### Read data from April
        #
        try:
            while True:
                # read data as fast as possible
                msg = s.recv(1 << 16)
                nmsg += 1
        except SocketError, se:
            # until we've run out; last message remains in m
            pass
        # make sure we got something
        if not msg:
            continue
        # Parse tag information from UDP packet
        dat = json_loads(msg)
        msg = ''
        # Collect allowed tags
        h = empty_like(pts)
        h[:] = nan
        for d in dat:
            nm = d['i']
            if not nm in allow:
                continue
            #if ~isnan(h[nm,0,0]):
            #  print '(dup)',
            p = asarray(d['p']) / 100
            h[nm, :, :2] = p
            h[nm, :, 2] = 1
        #
        # at this point, all observed tag locations are in the dictionary h
        #
        ### Update pts array
        #
        # Tags seen in this frame
        fidx = ~isnan(h[:, 0, 0])
        # Tags previously unseen
        uidx = isnan(pts[:, 0, 0])
        # Tags to update directly: non static, or static and first time seen
        didx = (uidx & fidx) | ~statics
        if any(didx):
            pts[didx, ...] = h[didx, ...]
        # Tags to update with lowpass: static, seen and previously seen
        lidx = fidx & statics & ~uidx
        if any(lidx):
            pts[lidx, ...] *= (1 - alpha)
            pts[lidx, ...] += alpha * h[lidx, ...]
        # Print indicator telling operator what we did
        progress("%7.2f %5d  " % (now() - T0, nmsg) +
                 lbl[didx + 2 * lidx + 4 * fidx].tostring(),
                 sameLine=True)
        #
        # Collect the corner tags and estimate homography
        nprj = None
        try:
            roi = array([mean(pts[nm], 0) for nm in corners])
            # Homography mapping roi to ref
            nprj = fitHomography(roi, ref)
        except KeyError, ck:
            progress("-- missing corner %s" % str(c))
Ejemplo n.º 35
0
def train():
    tok_path = get_tokenizer()
    model, vocab = get_mxnet_kogpt2_model(ctx=ctx)
    # tok = SentencepieceTokenizer(tok_path, num_best=0, alpha=0)

    data = pd.read_csv('Chatbot_data/ChatbotData.csv')

    max_len = opt.max_seq_len
    train_set = ChatDataset(data, tok_path, vocab, max_len=max_len)
    batch_size = opt.batch_size

    train_dataloader = mx.gluon.data.DataLoader(train_set,
                                                batch_size=batch_size,
                                                num_workers=5,
                                                shuffle=True)
    kogptqa = KoGPT2Chat(model)
    kogptqa.hybridize()

    # softmax cross entropy loss for classification
    loss_function = gluon.loss.SoftmaxCrossEntropyLoss()
    loss_function.hybridize()

    num_epochs = opt.num_epoch
    lr = 5e-5
    trainer = gluon.Trainer(kogptqa.collect_params(), 'bertadam', {
        'learning_rate': lr,
        'epsilon': 1e-8,
        'wd': 0.01
    })
    # LayerNorm과 Bias에는 Weight Decay를 적용하지 않는다.
    for _, v in kogptqa.collect_params('.*beta|.*gamma|.*bias').items():
        v.wd_mult = 0.0
    params = [
        p for p in kogptqa.collect_params().values() if p.grad_req != 'null'
    ]
    # learning rate warmup
    accumulate = opt.accumulate
    step_size = batch_size * accumulate if accumulate else batch_size
    num_train_examples = len(train_set)
    num_train_steps = int(num_train_examples / step_size * num_epochs)
    warmup_ratio = 0.1
    num_warmup_steps = int(num_train_steps * warmup_ratio)
    step_num = 0
    all_model_params = kogptqa.collect_params()

    log_interval = 50
    neg = -1e18
    # Set grad_req if gradient accumulation is required
    if accumulate and accumulate > 1:
        for p in params:
            p.grad_req = 'add'

    start_train = time.now()
    max_gpu_load = 0
    gpu_memory = 0
    for epoch_id in range(num_epochs):
        step_loss = 0
        for batch_id, (token_ids, mask, label) in enumerate(train_dataloader):
            if GPUtil.getGPUs()[0].load > max_gpu_load:
                max_gpu_load = GPUtil.getGPUs()[0].load
            if step_num < num_warmup_steps:
                new_lr = lr * step_num / num_warmup_steps
            else:
                non_warmup_steps = step_num - num_warmup_steps
                offset = non_warmup_steps / (num_train_steps -
                                             num_warmup_steps)
                new_lr = lr - offset * lr
            trainer.set_learning_rate(new_lr)
            with mx.autograd.record():
                # load data to GPU or GPU
                token_ids = token_ids.as_in_context(ctx)
                mask = mask.as_in_context(ctx)
                label = label.as_in_context(ctx)
                # forward computation
                out = kogptqa(token_ids)
                masked_out = nd.where(
                    mask.expand_dims(axis=2).repeat(repeats=out.shape[2],
                                                    axis=2), out,
                    neg * nd.ones_like(out))
                # loss for responses exincluding MASK and PAD
                ls = loss_function(masked_out, label).sum() / mask.sum()
            # backward computation
            ls.backward()
            if not accumulate or (batch_id + 1) % accumulate == 0:
                trainer.allreduce_grads()
                nlp.utils.clip_grad_global_norm(params, 1)
                trainer.update(accumulate if accumulate else 1)
                step_num += 1
                if accumulate and accumulate > 1:
                    # set grad to zero for gradient accumulation
                    all_model_params.zero_grad()
            step_loss += ls.asscalar()
            if GPUtil.getGPUs()[0].load > max_gpu_load:
                max_gpu_load = GPUtil.getGPUs()[0].load
            gpu_memory = GPUtil.getGPUs()[0].memoryUsed
            if step_num % log_interval == 0 and step_num > 0:
                print(
                    '[Epoch {} Batch {}/{}] loss={:.4f}, lr={:.10f}, train ppl={:.3f}'
                    .format(epoch_id + 1, batch_id + 1, len(train_dataloader),
                            step_loss / log_interval, trainer.learning_rate,
                            math.exp(step_loss / log_interval)))
                step_loss = 0
    print(1234)
    log_train(gpu_memory, max_gpu_load, start_train, num_epochs, batch_size)
    logging.info('saving model file to {}'.format(opt.model_params))
    kogptqa.save_parameters(opt.model_params)
Ejemplo n.º 36
0
 def render_time(self, ctx, data):
     ctx.remember(now(), itodo.ITimer)
     return ctx.tag
Ejemplo n.º 37
0
    def writeGeneral(self) -> None:
        """
		Build StarDict dictionary in general case.
		Every item definition may consist of an arbitrary number of articles.
		sametypesequence option is not used.
		"""
        dictMark = 0
        altIndexList = []  # list of tuples (b"alternate", wordIndex)

        dictFile = open(self._filename + ".dict", "wb")
        idxFile = open(self._filename + ".idx", "wb")
        indexFileSize = 0

        t0 = now()
        wordCount = 0
        defiFormatCounter = Counter()
        if not isdir(self._resDir):
            os.mkdir(self._resDir)

        entryI = -1
        for entry in self._glos:
            if entry.isData():
                entry.save(self._resDir)
                continue
            entryI += 1

            words = entry.getWords()  # list of strs
            word = words[0]  # str
            defis = entry.getDefis()  # list of strs

            entry.detectDefiFormat()  # call no more than once
            defiFormat = entry.getDefiFormat()
            defiFormatCounter[defiFormat] += 1
            if defiFormat not in ("m", "h"):
                defiFormat = "m"

            b_dictBlock = b""

            for alt in words[1:]:
                altIndexList.append((alt.encode("utf-8"), entryI))

            b_dictBlock += (defiFormat + defis[0]).encode("utf-8") + b"\x00"

            for altDefi in defis[1:]:
                b_dictBlock += (defiFormat + altDefi).encode("utf-8") + b"\x00"

            dictFile.write(b_dictBlock)

            blockLen = len(b_dictBlock)
            b_idxBlock = word.encode("utf-8") + b"\x00" + \
             intToBinStr(dictMark, 4) + \
             intToBinStr(blockLen, 4)
            idxFile.write(b_idxBlock)

            dictMark += blockLen
            indexFileSize += len(b_idxBlock)

            wordCount += 1

        dictFile.close()
        idxFile.close()
        if not os.listdir(self._resDir):
            os.rmdir(self._resDir)
        log.info(f"Writing dict file took {now()-t0:.2f} seconds")
        log.debug("defiFormatsCount = " +
                  pformat(defiFormatCounter.most_common()))

        self.writeSynFile(altIndexList)
        self.writeIfoFile(wordCount, indexFileSize, len(altIndexList))
Ejemplo n.º 38
0
Archivo: ui.py Proyecto: ilius/starcal2
def saveLiveConfLoop():
    tm = now()
    if tm - lastLiveConfChangeTime > saveLiveConfDelay:
        saveLiveConf()
        return False  ## Finish loop
    return True  ## Continue loop
Ejemplo n.º 39
0
def run():
    t0 = now()

    # parse command line options
    known_args, beam_args = runtime_args()

    # BigQuery utility
    bq_utils = BigQueryUtils()

    options = PipelineOptions(beam_args)
    options.view_as(SetupOptions).save_main_session = True

    with beam.Pipeline(options=options) as p:
        rows = (p
                | beam.io.ReadFromText(known_args.input, skip_header_lines=1)
                | beam.ParDo(BeamReadCSV(header_cols=FLIGHTS_CSV_COLUMNS))
                | beam.ParDo(BeamTransformRecords(),
                             date_fmt='%Y-%m-%d',
                             time_fmt='%H%M'))

        # load the routes table into a lookup dict
        sql = f"""select airline, src, dest from {known_args.routes_table}"""
        routes = bq_utils.execute_as_dict(sql,
                                          keycols=['airline', 'src', 'dest'])

        # lookup routes
        rows, routes_rejects, missing_routes = (
            rows
            | beam.ParDo(BeamLookupRoute(), routes=routes).with_outputs(
                'rejects', 'missing_routes', main='main'))

        # write parquet output files
        output = (rows
                  | beam.io.WriteToParquet(
                      os.path.join(known_args.output, 'flights'),
                      schema=datamodel_flights_parquet_schema(),
                      file_name_suffix='.parquet'))

        # write missing routes to another output as CSV
        output_routes = (
            missing_routes
            | "gbr" >> beam.GroupByKey()  # calculate distinct missing routes
            | "missing_routes_csv" >> beam.Map(
                lambda e: ','.join(list(e[0]))
            )  # csv output the key (e[0] of key value tuple) which is (airline,src,dest)
            | "missing_routes_out" >> beam.io.WriteToText(
                os.path.join(known_args.output, 'rejects/missing-routes'),
                file_name_suffix='.csv',
                header='airline,src,dest'))

        # alternative: write (simple) newline delimited json output files
        #              a very flexible output file format for bigquery and other big data tools
        #              much slower to write and larger in size than binary formats such as Parquet, ORC, or Avro
        #              but provides flexibility over schema for smaller data files
        #              larger file sizes should use Avro, Parquet, ORC. Avro provides fastest write speeds where
        #              parquet and orc provide faster read performance for analytical queries

        # output = (rows
        #           | beam.Map(lambda e: {k: v if k != 'flight_date' else v.strftime('%Y-%m-%d') for k, v in e.items()})  # convert flight_date back to string type for json conversion
        #           | beam.Map(lambda e: json.dumps(e))  # json dump row
        #           | beam.io.WriteToText(os.path.join(known_args.output, 'flights'),
        #                                 file_name_suffix='.json')
        #           )

    logger.info("beam pipiline completed.")

    # create bigquery external table and insert into bq flights table
    bq_utils.create_external_table(known_args.flights_ext_table,
                                   source_uris=os.path.join(
                                       known_args.output, "flights*.parquet"),
                                   source_format='PARQUET',
                                   delete_if_exists=True)

    # create and replace existing bigquery flights table
    bq_utils.create_table(known_args.flights_table,
                          schema=datamodel_flights_bigquery_schema(),
                          delete_if_exists=True)

    # insert into table as select (itas) statement
    sql = f"""
        INSERT INTO `{known_args.flights_table}`
        SELECT
          a.day_of_week,
          a.flight_date,
          a.airline,
          a.tailnumber,
          a.flight_number,
          a.src,
          a.src_city,
          a.src_state,
          a.dest,
          a.dest_city,
          a.dest_state,
          PARSE_TIME('%H:%M:%S', a.departure_time) as departure_time,
          PARSE_TIME('%H:%M:%S', a.actual_departure_time) as actual_departure_time,
          a.departure_delay,
          a.taxi_out,
          PARSE_TIME('%H:%M:%S', a.wheels_off) as wheels_off,
          PARSE_TIME('%H:%M:%S', a.wheels_on) as wheels_on,
          a.taxi_in,
          PARSE_TIME('%H:%M:%S', a.arrival_time) as arrival_time,
          PARSE_TIME('%H:%M:%S', a.actual_arrival_time) as actual_arrival_time,
          a.arrival_delay,
          a.cancelled,
          a.cancellation_code,
          a.flight_time,
          a.actual_flight_time,
          a.air_time,
          a.flights,
          a.distance,
          a.airline_delay,
          a.weather_delay,
          a.nas_delay,
          a.security_delay,
          a.late_aircraft_delay,
          -- CONCAT(a.flight_date, '_', a.airline, '_', a.flight_number) AS flightDate_airline_flightNumber
        FROM
          `{known_args.flights_ext_table}`  a
        """
    # insert records form parquet external table into final bq managed flights table
    r = bq_utils.execute(sql)

    logger.info(f"total time: {(now() - t0):,.6f} secs")
Ejemplo n.º 40
0
 def was_unused_last(self, seconds):
     return now() > self.__last_access + seconds
Ejemplo n.º 41
0
 def refresh(self):
     """
     Set the time of last access to now.
     """
     self.__last_access = now()
     return self
Ejemplo n.º 42
0
def power(time, flux, minfreq, maxfreq, step, chunksize):
    """
    This function returns the power spectrum of the desired star.
    Arguments:
        - 'time': Time in megaseconds from the timeserie analysis.
        - 'flux': Photometry data from the timeserie analysis.
        - 'minfreq': The lower bound for the frequency interval
        - 'maxfreq': The upper bound for the frequency interval
        - 'step': The spacing between frequencies.
        - 'chunksize': Define a chunk for the least mean square Fourier
                       in order to save time.
    """

    # Import modules
    import numpy as np
    from time import time as now

    # Generate cyclic frequencies
    freq = np.arange(minfreq, maxfreq, step)

    # Generate list to store the calculated power
    power = np.zeros((len(freq), 1))

    # Convert frequencies to angular frequencies
    nu = 2 * np.pi * freq

    # Iterate over the frequencies
    timerStart = now()

    # After this many frequencies, print progress info
    print_every = 75e6 // len(time)

    # Ensure chunksize divides print_every
    print_every = (print_every // chunksize) * chunksize

    for i in range(0, len(nu), chunksize):
        # Define chunk
        j = min(i + chunksize, len(nu))
        rows = j - i

        if i % print_every == 0:
            # Info-print
            elapsedTime = now() - timerStart
            if i == 0:
                totalTime = 462 / 6000 * len(nu)
            else:
                totalTime = (elapsedTime / i) * len(nu)

            print("Progress: %.2f%% (%d..%d of %d)  "
                  "Elapsed: %.2f s  Estimated total: %.2f s" %
                  (np.divide(100.0 * i,
                             len(nu)), i, j, len(nu), elapsedTime, totalTime))
        """
        The outer product is calculated. This way, the product between
        time and ang. freq. will be calculated elementwise; one column
        per frequency. This is done in order to save computing time.
        """
        nutime = np.outer(time, nu[i:j])
        """
        An array with the measured flux is made so it has the same size
        as "nutime", since we want to multiply the two.
        """
        fluxrep = np.repeat(flux[:, np.newaxis], repeats=rows, axis=1)

        # The Fourier subroutine
        sin_nutime = np.sin(nutime)
        cos_nutime = np.cos(nutime)

        s = np.sum(sin_nutime * fluxrep, axis=0)
        c = np.sum(cos_nutime * fluxrep, axis=0)
        ss = np.sum(sin_nutime**2, axis=0)
        cc = np.sum(cos_nutime**2, axis=0)
        sc = np.sum(sin_nutime * cos_nutime, axis=0)

        alpha = ((s * cc) - (c * sc)) / ((ss * cc) - (sc**2))
        beta = ((c * ss) - (s * sc)) / ((ss * cc) - (sc**2))

        power[i:j] = np.reshape(alpha**2 + beta**2, (rows, 1))

    power = power.reshape(-1, 1)
    freq = freq.reshape(-1, 1)
    elapsedTime = now() - timerStart
    print('Computed power spectrum with chunk size %d in %.2f s' %
          (chunksize, elapsedTime))

    return (freq, power)
Ejemplo n.º 43
0
        'connected': timing_connected_layer,
        'dropout': timing_dropout_layer,
        'input': timing_input_layer,
        'logistic': timing_logistic_layer,
        'maxpool': timing_maxpool_layer,
        'route': timing_route_layer,
        'shortcut': timing_shortcut_layer,
        'shuffler': timing_shuffler_layer,
        'softmax': timing_softmax_layer,
        'upsample': timing_upsample_layer,
        'yolo': timing_yolo_layer
    }

    timing = []
    for input_shape in input_shapes:

        tic = now()
        times = timing_layers[args.layer](input_shape)
        toc = now()

        print('Elapsed time: {:.3f} sec'.format(toc - tic))

        timing += [pd.DataFrame(times)]

    timing = pd.concat(timing)

    if args.out:
        timing.to_csv(args.out, sep=',', header=True, index=False)
    else:
        print(timing)
Ejemplo n.º 44
0
	def updateLine(self):
		self.label.set_label(self.lines[self.index])
		self.startXpos = -self.label.width if self.label.rtl else screenWidth
		self.startTime = now()
Ejemplo n.º 45
0
    def writeCompact(self, defiFormat):
        """
		Build StarDict dictionary with sametypesequence option specified.
		Every item definition consists of a single article.
		All articles have the same format, specified in defiFormat parameter.

		Parameters:
		defiFormat - format of article definition: h - html, m - plain text
		"""
        dictMark = 0
        altIndexList = []  # list of tuples (b"alternate", wordIndex)

        dictFile = open(self._filename + ".dict", "wb")
        idxFile = open(self._filename + ".idx", "wb")
        indexFileSize = 0

        t0 = now()
        wordCount = 0
        if not isdir(self._resDir):
            os.mkdir(self._resDir)

        entryI = -1
        for entry in self._glos:
            if entry.isData():
                entry.save(self._resDir)
                continue
            entryI += 1

            words = entry.getWords()  # list of strs
            word = words[0]  # str
            defis = entry.getDefis()  # list of strs

            b_dictBlock = b""

            for alt in words[1:]:
                altIndexList.append((alt.encode("utf-8"), entryI))

            b_dictBlock += (defis[0]).encode("utf-8")

            for altDefi in defis[1:]:
                b_dictBlock += b"\x00" + (altDefi).encode("utf-8")

            dictFile.write(b_dictBlock)

            blockLen = len(b_dictBlock)
            b_idxBlock = word.encode("utf-8") + b"\x00" + \
             intToBinStr(dictMark, 4) + \
             intToBinStr(blockLen, 4)
            idxFile.write(b_idxBlock)

            dictMark += blockLen
            indexFileSize += len(b_idxBlock)

            wordCount += 1

        dictFile.close()
        idxFile.close()
        if not os.listdir(self._resDir):
            os.rmdir(self._resDir)
        log.info(f"Writing dict file took {now()-t0:.2f} seconds")
        log.debug("defiFormat = " + pformat(defiFormat))

        self.writeSynFile(altIndexList)
        self.writeIfoFile(
            wordCount,
            indexFileSize,
            len(altIndexList),
            defiFormat,
        )
Ejemplo n.º 46
0
Archivo: ui.py Proyecto: ilius/starcal2
def updateFocusTime(*args):
    global focusTime
    focusTime = now()
Ejemplo n.º 47
0
 def did_complete(self, order):
     print "Completed order " + ("matches"
                                 if order == self.pending_order else
                                 "does not match") + " the pending order"
     self.executed_orders.append([order, now()])
     self.pending_order = None
Ejemplo n.º 48
0
 def log(*args):
     if not printed:
         print()
         printed.append(1)
     print("\t ->", now(), *args)
Ejemplo n.º 49
0
 def render_end(self, ctx, data):
     return ctx.tag["%.3f" % (now() - itodo.ITimer(ctx))]
Ejemplo n.º 50
0
# Initializes all necessary parameters to run model
session = tf.InteractiveSession()  # Session to run model
train_writer = tf.summary.FileWriter('convNetSummary',
                                     session.graph)  # File to write reporting
saver = tf.train.Saver(
)  # Handler for simple initialization or restoration of save files
tf.train.start_queue_runners(
)  # Coordinator to enqueue records with FixedLengthRecordReader

# Initializes value of variable appropriately
cifar10_model.load_variable_value(saver, session, reset_variables)

to_print_1 = '%d, train CE: %g, train ACC: %g.'
to_print_2 = '%d, train CE: %g, train ACC: %g. Inputs per second: %0.3f. Seconds per batch: %0.3f.'
start_time = now()
for index in range(100000):

    # Runs one training/summary step
    _, cross_entropy_value, accuracy_value, summary = session.run(
        [train_step, train_cross_entropy, train_accuracy, merged])

    # Periodically tests evolution in accuracy for test set and training set
    if index % progress_report_interval == 0:

        # Saves current state of weights
        saver.save(session, variable_save_file)

        if index == 0:

            print(to_print_1 % (index, cross_entropy_value, accuracy_value))
Ejemplo n.º 51
0
 # If robot not seen --> we're done
 if isnan(zc[ROBOT_TAGID]):
     yield
     continue
 #
 ### robot tag related updates
 #
 # robot tag corners
 rbt = z[ROBOT_TAGID, ...]
 # robot heading angle phasor
 ang = mean((rbt - zc[ROBOT_TAGID]) / ang0)
 ang /= abs(ang)
 # If logging data --> put into log
 if logfile is not None:
     lo = [
         now(), zc[ROBOT_TAGID].real, zc[ROBOT_TAGID].imag, ang,
         int(zc[waypoints[M]].real),
         int(zc[waypoints[M]].imag)
     ]
     logfile.write(", ".join(["%.3f" % x for x in lo]) + "\n")
 # indicate robot
 a1.plot(zc[ROBOT_TAGID].real, zc[ROBOT_TAGID].imag, '*r', ms=15)
 #
 ### robot relative view
 #
 a2 = f1.add_subplot(122)
 #
 # Show the waypoints
 c = zc[waypoints]
 vc = ~isnan(c)
 cr = (c - zc[ROBOT_TAGID]) / ang
Ejemplo n.º 52
0
 def render_starttimer(self, ctx, data):
     ctx.remember(now(), ITimer)
     return ctx.tag
Ejemplo n.º 53
0
    def writeCompact(self, defiFormat):
        """
		Build StarDict dictionary with sametypesequence option specified.
		Every item definition consists of a single article.
		All articles have the same format, specified in defiFormat parameter.

		Parameters:
		defiFormat - format of article definition: h - html, m - plain text
		"""
        dictMark = 0
        altIndexList = []  # list of tuples (b"alternate", entryIndex)

        dictFile = open(self._filename + ".dict", "wb")
        idxFile = open(self._filename + ".idx", "wb")
        indexFileSize = 0

        t0 = now()
        wordCount = 0
        if not isdir(self._resDir):
            os.mkdir(self._resDir)

        entryIndex = -1
        while True:
            entry = yield
            if entry is None:
                break
            if entry.isData():
                entry.save(self._resDir)
                continue
            entryIndex += 1

            words = entry.l_word  # list of strs
            word = words[0]  # str
            defi = self.fixDefi(entry.defi, defiFormat)
            # defi is str

            for alt in words[1:]:
                altIndexList.append((alt.encode("utf-8"), entryIndex))

            b_dictBlock = defi.encode("utf-8")
            dictFile.write(b_dictBlock)
            blockLen = len(b_dictBlock)

            b_idxBlock = word.encode("utf-8") + b"\x00" + \
             uint32ToBytes(dictMark) + \
             uint32ToBytes(blockLen)
            idxFile.write(b_idxBlock)

            dictMark += blockLen
            indexFileSize += len(b_idxBlock)

            wordCount += 1

        dictFile.close()
        idxFile.close()
        if not os.listdir(self._resDir):
            os.rmdir(self._resDir)
        log.info(f"Writing dict file took {now()-t0:.2f} seconds")

        self.writeSynFile(altIndexList)
        self.writeIfoFile(
            wordCount,
            indexFileSize,
            len(altIndexList),
            defiFormat=defiFormat,
        )
Ejemplo n.º 54
0
 def render_stoptimer(self, ctx, data):
     start = ITimer(ctx)
     return ctx.tag['%s' % (now()-start)]
Ejemplo n.º 55
0
    def _wait(self, parsed_args):
        from time import time as now, sleep

        min_score = parsed_args.score
        delay = parsed_args.delay
        deadline = now() + delay
        descr = []
        ko = -1
        exc_msg = ("Timeout ({0}s) while waiting for the services to get a "
                   "score >= {1}, {2}")

        def maybe_unlock(allsrv):
            if not parsed_args.unlock:
                return
            if not allsrv:
                return
            self.app.client_manager.conscience.unlock_score(allsrv)

        def check_deadline():
            if now() > deadline:
                if ko < 0:
                    msg = exc_msg.format(delay, min_score,
                                         "proxy and/or conscience not ready")
                else:
                    msg = exc_msg.format(delay, min_score,
                                         "still %d are not." % ko)
                for srv in descr:
                    if srv['score'] < min_score:
                        self.log.warn("%s %s %s", srv['type'],
                                      srv.get('id', None), srv['score'])
                raise Exception(msg)

        interval = _sleep_interval(0.0, 1.0, 2.0, 4.0)
        types = parsed_args.types
        if not parsed_args.types:
            while True:
                check_deadline()
                sleep(next(interval))

                try:
                    types = self.app.client_manager.conscience.service_types()
                    break
                except OioNetworkException as exc:
                    self.log.debug("Proxy error: %s", exc)
                except ServiceBusy as exc:
                    self.log.debug("Conscience busy: %s", exc)

        interval = _sleep_interval(0.0, 1.0, 2.0, 4.0)
        while True:
            check_deadline()
            maybe_unlock(descr)
            sleep(next(interval))

            descr = []
            ko = -1
            try:
                for typ in types:
                    tmp = self.app.client_manager.conscience.all_services(typ)
                    for srv in tmp:
                        srv['type'] = typ
                    descr += tmp
            except OioNetworkException as exc:
                self.log.debug("Proxy error: %s", exc)
                continue
            except ServiceBusy as exc:
                self.log.debug("Conscience busy: %s", exc)
                continue

            ko = len([s['score'] for s in descr if s['score'] < min_score])
            if ko > 0:
                self.log.debug("Still %d services down", ko)
                continue

            # If a minimum has been specified, let's check we have enough
            # services
            if parsed_args.count:
                ok = len([s for s in descr if s['score'] >= min_score])
                if ok < parsed_args.count:
                    self.log.debug("Only %d services up", ok)
                    continue

            # No service down, and enough services, we are done.
            for srv in descr:
                yield srv['type'], srv['addr'], srv['score']
            return
Ejemplo n.º 56
0
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/lgpl.txt>.
# Also avalable in /usr/share/common-licenses/LGPL on Debian systems
# or /usr/share/licenses/common/LGPL/license.txt on ArchLinux
import time
from time import localtime, strftime
from time import time as now

from gi.repository.GObject import timeout_add
from gi.repository import GdkPixbuf

from scal3.ui_gtk import *
from scal3.ui_gtk.font_utils import *

time_rem = lambda: int(1000 * (1.01 - now() % 1))


class ClockLabel(gtk.Label):
    def __init__(self, bold=False, seconds=True, selectable=False):
        gtk.Label.__init__(self)
        self.set_use_markup(True)
        self.set_selectable(selectable)
        self.bold = bold
        self.seconds = seconds
        self.running = False
        #self.connect('button-press-event', self.button_press)
        self.start()  #???

    def start(self):
        self.running = True
Ejemplo n.º 57
0
 def _wait(self):
     """Rate limit the number of send events."""
     end = self.last_send_event + COMPAT_MODE_WAIT_TIME
     while now() < end:
         pass
Ejemplo n.º 58
0
def runMCMCmodel(args):
    """
  Simulate the survey data and run the MCMC luminosity calibration model.

  Parameters
  ----------

  args - Command line arguments
  """
    mcmcParams = args['mcmcString']
    surveyParams = args['surveyString']
    priorParams = args['priorsString']

    maxIter = int(mcmcParams[0])
    burnIter = int(mcmcParams[1])
    thinFactor = int(mcmcParams[2])
    walkerFactor = int(mcmcParams[3])

    minParallax = float(surveyParams[1])
    maxParallax = float(surveyParams[2])
    meanAbsoluteMagnitude = float(surveyParams[3])
    varianceAbsoluteMagnitude = float(surveyParams[4])

    if surveyParams[5] == 'Inf':
        magLim = np.Inf
    else:
        magLim = float(surveyParams[5])

    simulatedSurvey = U.UniformDistributionSingleLuminosity(
        int(surveyParams[0]),
        float(surveyParams[1]),
        float(surveyParams[2]),
        float(surveyParams[3]),
        float(surveyParams[4]),
        surveyLimit=magLim)
    #simulatedSurvey.setRandomNumberSeed(53949896)
    simulatedSurvey.generateObservations()
    numberOfStarsInSurvey = simulatedSurvey.numberOfStarsInSurvey

    # Calculate initial guesses for the true parallaxes and absolute magnitudes of the stars.
    clippedObservedParallaxes = simulatedSurvey.observedParallaxes.clip(
        minParallax, maxParallax)
    initialAbsMagGuesses = simulatedSurvey.observedMagnitudes + 5.0 * np.log10(
        clippedObservedParallaxes) - 10.0
    meanAbsoluteMagnitudeGuess = initialAbsMagGuesses.mean()

    # Initial guesses for hyper parameters (mean absolute magnitude and sigma^2)
    #
    # Mean absolute magnitude uniform on (meanAbsMagLow, meanAbsMagHigh)
    meanAbsMagLow = float(priorParams[0])
    meanAbsMagHigh = float(priorParams[1])
    # Variance has 1/x distribution with lower and upper limit as prior
    varianceLow = float(priorParams[2])
    varianceHigh = float(priorParams[3])
    varianceInit = (varianceHigh - varianceLow) / (np.log(varianceHigh) -
                                                   np.log(varianceLow))

    initialParameters = np.concatenate(
        (np.array([meanAbsoluteMagnitudeGuess, varianceInit]),
         clippedObservedParallaxes, initialAbsMagGuesses))

    # Parameters for emcee ln-posterior function
    posteriorDict = {
        'minParallax': minParallax,
        'maxParallax': maxParallax,
        'muLow': meanAbsMagLow,
        'muHigh': meanAbsMagHigh,
        'varLow': varianceLow,
        'varHigh': varianceHigh
    }
    observations = np.concatenate((simulatedSurvey.observedParallaxes,
                                   simulatedSurvey.observedMagnitudes))
    observationalErrors = inverseVariance(
        np.concatenate(
            (simulatedSurvey.parallaxErrors, simulatedSurvey.magnitudeErrors)))

    # MCMC sampler parameters
    ndim = 2 * numberOfStarsInSurvey + 2
    nwalkers = walkerFactor * ndim

    # Generate initial positions for each walker
    initialPositions = [np.empty((ndim)) for i in xrange(nwalkers)]
    initialPositions[0] = initialParameters
    for i in xrange(nwalkers - 1):
        ranMeanAbsMag = np.random.rand() * (meanAbsMagHigh -
                                            meanAbsMagLow) + meanAbsMagLow
        ranVariance = random_oneOverX(varianceLow, varianceHigh, 1)
        ranParallaxes = np.zeros_like(clippedObservedParallaxes)
        for j in xrange(numberOfStarsInSurvey):
            #if (i<nwalkers/2):
            ranParallaxes[j] = clippedObservedParallaxes[
                j] + simulatedSurvey.parallaxErrors[j] * np.random.randn()
            #else:
            #  ranParallaxes[j]=random_oneOverXFourth(minParallax,maxParallax,1)
        ranAbsMag = np.sqrt(ranVariance) * np.random.randn(
            numberOfStarsInSurvey) + ranMeanAbsMag
        initialPositions[i + 1] = np.concatenate(
            (np.array([ranMeanAbsMag, ranVariance]),
             ranParallaxes.clip(minParallax, maxParallax), ranAbsMag))

    print '** Building sampler **'
    sampler = emcee.EnsembleSampler(
        nwalkers,
        ndim,
        UniformSpaceDensityGaussianLFBookemcee,
        threads=4,
        args=[posteriorDict, observations, observationalErrors])
    # burn-in
    print '** Burn in **'
    start = now()
    pos, prob, state = sampler.run_mcmc(initialPositions, burnIter)
    print '** Finished burning in **'
    print '                Time (s): ', now() - start
    print 'Median acceptance fraction: ', np.median(
        sampler.acceptance_fraction)
    print(
        'Acceptance fraction IQR: {0}'.format(
            np.percentile(sampler.acceptance_fraction, 25)) +
        ' -- {0}'.format(np.percentile(sampler.acceptance_fraction, 75)))
    correlationTimes = sampler.acor
    print 'Autocorrelation times: '
    print '  Mean absolute magnitude: ', correlationTimes[0]
    print '  Variance absolute magnitude: ', correlationTimes[1]
    print '  Median for parallaxes: ', np.median(
        correlationTimes[2:numberOfStarsInSurvey + 2])
    print '  Median for magnitudes: ', np.median(
        correlationTimes[numberOfStarsInSurvey + 2:])
    print
    # final chain
    sampler.reset()
    start = now()
    print '** Starting sampling **'
    sampler.run_mcmc(pos, maxIter, rstate0=state, thin=thinFactor)
    print '** Finished sampling **'
    print '                Time (s): ', now() - start
    print 'Median acceptance fraction: ', np.median(
        sampler.acceptance_fraction)
    print(
        'Acceptance fraction IQR: {0}'.format(
            np.percentile(sampler.acceptance_fraction, 25)) +
        ' -- {0}'.format(np.percentile(sampler.acceptance_fraction, 75)))
    correlationTimes = sampler.acor
    print 'Autocorrelation times: '
    print '  Mean absolute magnitude: ', correlationTimes[0]
    print '  Variance absolute magnitude: ', correlationTimes[1]
    print '  Median for parallaxes: ', np.median(
        correlationTimes[2:numberOfStarsInSurvey + 2])
    print '  Median for magnitudes: ', np.median(
        correlationTimes[numberOfStarsInSurvey + 2:])

    # Extract the samples of the posterior distribution
    chain = sampler.flatchain

    # Point estimates of mean Absolute Magnitude and its standard deviation.
    meanAbsoluteMagnitudeSamples = chain[:, 0].flatten()
    varAbsoluteMagnitudeSamples = chain[:, 1].flatten()
    estimatedAbsMag = meanAbsoluteMagnitudeSamples.mean()
    errorEstimatedAbsMag = meanAbsoluteMagnitudeSamples.std()
    estimatedVarMag = varAbsoluteMagnitudeSamples.mean()
    errorEstimatedVarMag = varAbsoluteMagnitudeSamples.std()
    print "emcee estimates"
    print "mu_M={:4.2f}".format(estimatedAbsMag) + " +/- {:4.2f}".format(
        errorEstimatedAbsMag)
    print "sigma^2_M={:4.2f}".format(estimatedVarMag) + " +/- {:4.2f}".format(
        errorEstimatedVarMag)

    # Plot results

    # MAP estimates
    muDensity = gaussian_kde(meanAbsoluteMagnitudeSamples)
    mapValueMu = scipy.optimize.fmin(lambda x: -1.0 * muDensity(x),
                                     np.median(meanAbsoluteMagnitudeSamples),
                                     maxiter=1000,
                                     ftol=0.0001)

    varDensity = gaussian_kde(varAbsoluteMagnitudeSamples)
    mapValueVar = scipy.optimize.fmin(lambda x: -1.0 * varDensity(x),
                                      np.median(varAbsoluteMagnitudeSamples),
                                      maxiter=1000,
                                      ftol=0.0001)

    fig = plt.figure(figsize=(12, 8.5))
    fig.add_subplot(2, 2, 1)
    x = np.linspace(meanAbsoluteMagnitudeSamples.min(),
                    meanAbsoluteMagnitudeSamples.max(), 500)
    plt.plot(x, muDensity(x), 'k-')
    plt.axvline(meanAbsoluteMagnitude, linewidth=2, color="red")
    plt.xlabel("$\\mu_M$")
    plt.ylabel("$P(\\mu_M)$")

    fig.add_subplot(2, 2, 2)
    x = np.linspace(varAbsoluteMagnitudeSamples.min(),
                    varAbsoluteMagnitudeSamples.max(), 500)
    plt.plot(x, varDensity(x), 'k-')
    plt.axvline(varianceAbsoluteMagnitude, linewidth=2, color="red")
    plt.xlabel("$\\sigma^2_M$")
    plt.ylabel("$P(\\sigma^2_M)$")

    fig.add_subplot(2, 2, 3)
    plt.hexbin(meanAbsoluteMagnitudeSamples,
               varAbsoluteMagnitudeSamples,
               C=None,
               bins='log',
               cmap=cm.gray_r)
    plt.xlabel("$\\mu_M$")
    plt.ylabel("$\\sigma^2_M$")

    plt.figtext(0.55,
                0.4,
                "$\\widetilde{\\mu_M}=" + "{:4.2f}".format(estimatedAbsMag) +
                "$ $\\pm$ ${:4.2f}$".format(errorEstimatedAbsMag),
                ha='left')
    plt.figtext(
        0.75, 0.4, "$\\mathrm{MAP}(\\widetilde{\\mu_M})=" +
        "{:4.2f}".format(mapValueMu[0]) + "$")
    plt.figtext(0.55,
                0.35,
                "$\\widetilde{\\sigma^2_M}=" +
                "{:4.2f}".format(estimatedVarMag) +
                "$ $\\pm$ ${:4.2f}$".format(errorEstimatedVarMag),
                ha='left')
    plt.figtext(
        0.75, 0.35, "$\\mathrm{MAP}(\\widetilde{\\sigma^2_M})=" +
        "{:4.2f}".format(mapValueVar[0]) + "$")

    titelA = ("$N_\\mathrm{stars}" + "={0}".format(numberOfStarsInSurvey) +
              "$, True values: $\\mu_M={0}".format(meanAbsoluteMagnitude) +
              "$, $\\sigma^2_M={0}".format(varianceAbsoluteMagnitude) + "$")
    titelB = ("Iterations = {0}".format(maxIter) +
              ", Burn = {0}".format(burnIter) +
              ", Thin = {0}".format(thinFactor))
    plt.suptitle(titelA + "\\quad\\quad " + titelB)
    titelA = ("$N_\\mathrm{stars}" + "={0}".format(numberOfStarsInSurvey) +
              "$, True values: $\\mu_M={0}".format(meanAbsoluteMagnitude) +
              "$, $\\sigma^2_M={0}".format(varianceAbsoluteMagnitude) + "$")
    titelB = ("Iterations = {0}".format(maxIter) +
              ", Burn = {0}".format(burnIter) +
              ", Thin = {0}".format(thinFactor))
    plt.suptitle(titelA + "\\quad\\quad " + titelB)

    titelC = []
    titelC.append("MCMC sampling with emcee")
    titelC.append("$N_\\mathrm{walkers}" + "={0}".format(nwalkers) +
                  "$, $N_\\mathrm{dim}" + "={0}".format(ndim) + "$")
    plt.figtext(0.55, 0.15, titelC[0], horizontalalignment='left')
    plt.figtext(0.60, 0.10, titelC[1], horizontalalignment='left')

    priorInfo = []
    priorInfo.append(
        "Prior on $\\mu_M$: flat $\\quad{0}".format(meanAbsMagLow) +
        "<\\mu_M<{0}".format(meanAbsMagHigh) + "$")
    priorInfo.append(
        "Prior on $\\sigma^2_M$: $1/\\sigma^2_M\\quad{0}".format(varianceLow) +
        "<\\sigma^2_M<{0}".format(varianceHigh) + "$")

    plt.figtext(0.55, 0.25, priorInfo[0], horizontalalignment='left')
    plt.figtext(0.55, 0.20, priorInfo[1], horizontalalignment='left')

    if (args['pdfOutput']):
        plt.savefig('luminosityCalibrationResultsEmcee.pdf')
    elif (args['pngOutput']):
        plt.savefig('luminosityCalibrationResultsEmcee.png')
    elif (args['epsOutput']):
        plt.savefig('luminosityCalibrationResultsEmcee.eps')
    else:
        plt.show()
    def new_experiment(self, **experiment_info):
        #
        # load from file
        #
        self.ensure_loaded()

        # add basic data to the experiment
        # there are 3 levels:
        # - self.collection_keeper.local_data (root)
        # - self.experiment_keeper
        # - self.experiment_info_keeper
        self.experiment_keeper = self.collection_keeper.sub_record_keeper(
            experiment_number=self.
            prev_experiment_local_data["experiment_number"] +
            1 if not self.prev_experiment_local_data["had_error"] else
            self.prev_experiment_local_data["experiment_number"],
            error_number=self.prev_experiment_local_data["error_number"] + 1,
            had_error=True,
            experiment_start_time=now(),
        )
        # create experiment record keeper
        if len(experiment_info) == 0:
            self.experiment_info_keeper = self.experiment_keeper
        else:
            self.experiment_info_keeper = self.experiment_keeper.sub_record_keeper(
                **experiment_info)

        def save_experiment(_, error, traceback):
            # mutate the root one based on having an error or not
            no_error = error is None
            experiment_info = self.experiment_keeper.local_data
            experiment_info["experiment_end_time"] = now()
            experiment_info["experiment_duration"] = experiment_info[
                "experiment_end_time"] - experiment_info[
                    "experiment_start_time"]
            if no_error:
                experiment_info["had_error"] = False
                experiment_info["error_number"] = 0

            # refresh the all_record_keepers dict
            # especially after mutating the self.experiment_keeper.local_data
            # (this ends up acting like a set, but keys are based on mutable values)
            self.record_keepers = {
                super_hash(each_value): each_value
                for each_value in self.record_keepers.values()
            }

            #
            # save to file
            #
            # ensure folder exists
            import os
            os.makedirs(os.path.dirname(self.file_path), exist_ok=True)
            self.prev_experiment_local_data = self.experiment_keeper.local_data
            data = (self.collection_keeper.local_data,
                    self.experiment_keeper.local_data, self.record_keepers,
                    self._records)
            # update self encase multiple experiments are run without re-reading the file
            print("Saving " + str(len(self._records)) + " records")
            large_pickle_save(data, self.file_path)
            print("Records saved to: " + self.file_path)

            # re-throw the error
            if not no_error:
                print(
                    f'There was an error when running an experiment. Experiment collection: "{self.collection_name}"'
                )
                print(f'However, the partial experiment data was saved')
                experiment_number = self.experiment_keeper.local_data[
                    "experiment_number"]
                error_number = self.experiment_keeper.local_data[
                    "error_number"]
                print(
                    f'This happend on:\n    dict(experiment_number={experiment_number}, error_number={error_number})'
                )
                raise error

        return Experiment(experiment_info_keeper=self.experiment_info_keeper,
                          save_experiment=save_experiment)
Ejemplo n.º 60
0
	def animateStart(self):
		self.updateLine()
		self.startTime = now()
		self.animateUpdate()