def _merge_friends(self, friend1, friend2):
        print('These friends seem to be equal.', file=sys.stderr)
        print('''First friend
{}
_______________
Second friend:
{}
'''.format(str(friend1), str(friend2)))

        if get_input('Do you want to merge them?') != 'y':
            return None
        print('\n', file=sys.stderr)

        result = Friend(**vars(friend1))
        for attr in vars(friend1):
            if getattr(friend1, attr) != getattr(friend2, attr):
                if getattr(friend1, attr) == '':
                    setattr(result, attr, getattr(friend2, attr))
                elif getattr(friend2, attr) == '':
                    setattr(result, attr, getattr(friend1, attr))
                else:
                    print('Difference in {} field:\n'.format(
                        self.csv_fields[attr]))
                    print('User1 value: {} | User2 value: {}\n'.format(
                        getattr(friend1, attr), getattr(friend2, attr)))
                    res = get_input('What field is preferable?', ('1', '2'))
                    if res == '2':
                        setattr(result, attr, getattr(friend2, attr))
        sys.stdout.write('____________________')
        return result
Beispiel #2
0
def prepare_input(tp, params, my_units=None):
    n_units = tp.shape[0]
    dt = params['dt_rate'] # [ms] time step for the non-homogenous Poisson process 

    time = np.arange(0, params['t_stimulus'], dt)

    if (my_units == None):
        my_units = xrange(n_units)
    else:
        my_units = xrange(my_units[0], my_units[1])

    n_cells = len(my_units)
    L_input = np.zeros((n_cells, time.shape[0]))
#    offset = 100
    for i_time, time_ in enumerate(time):
        if (i_time % 100 == 0):
            print "t:", time_
#        i_time += offset
#        i_time = min(i_time, max(i_time, len(time)-1))
        L_input[:, i_time] = utils.get_input(tuning_prop[my_units, :], params, time_/params['t_sim'])

    for i_, unit in enumerate(my_units):
        output_fn = params['input_rate_fn_base'] + str(unit) + '.dat'
        print 'output_fn:', output_fn
        np.savetxt(output_fn, L_input[i_, :])
Beispiel #3
0
	def get_maxlink(self,url):
		response=self.fetch(url);result=pw=None
		if not response:print 'Not response'
		elif response.status==302:result=response.headers['location']
		elif response.status==200:
			if re.search('<title>.*Lỗi 404.*</title>|"index-404"',response.body):
				utils.mess(u'Tập tin quý khách yêu cầu không tồn tại!','Fshare.vn');result='fail'
			elif 'sử dụng nhiều địa chỉ IP' in response.body:
				utils.mess(u'Quý khách đang sử dụng nhiều địa chỉ IP để tải xuống!','Fshare.vn',10000)
				result='fail'
			elif re.search('<i class="fa fa-star">',response.body):utils.mess('Your Fshare acc is FREE','Fshare.vn')
			
			if re.search('class="fa fa-lock"',response.body):
				pw=utils.get_input(u'Hãy nhập: Mật khẩu tập tin')
				if pw:
					try:
						data={'fs_csrf':utils.xsearch('value="(.+?)" name="fs_csrf"',response.body),
						'DownloadForm[pwd]':pw,'ajax':'download-form','DownloadForm[linkcode]':url.split('/')[4]}
						response=self.fetch('https://www.fshare.vn/download/get',data).json
					except:response={}

					if not response:utils.mess(u'Get maxspeed link fail!','Fshare.vn');result='fail'
					elif response.get('url'):result=response.get('url')
					elif response.get('DownloadForm_pwd'):utils.mess(u'Mật khẩu không chính xác!','Fshare.vn')
					else: print response
			elif re.search('action="/download/get"',response.body):
				href='https://www.fshare.vn'+utils.xsearch('action="(/download/get)"',response.body)
				fs_csrf=utils.xsearch('value="(.+?)" name="fs_csrf"',response.body)
				downloadForm=utils.xsearch('id="DownloadForm_linkcode" type="hidden" value="(.+?)"',response.body)
				data={'fs_csrf':fs_csrf,'DownloadForm[pwd]':'','DownloadForm[linkcode]':downloadForm,
						'ajax':'download-form','undefined':'undefined'}
				response=self.fetch(href,data)
				try:result=response.json.get('url')
				except:pass
    def create_input_vectors(self, normalize=True):
        output_fn_base = self.training_input_folder + self.params['abstract_input_fn_base']
        n_cells = len(self.my_units)
        dt = self.params['dt_rate'] # [ms] time step for the non-homogenous Poisson process 
        time = np.arange(0, params['t_sim'], dt)
        L_input = np.zeros((n_cells, time.shape[0]))
        for i_time, time_ in enumerate(time):
            if (i_time % 100 == 0):
                print "t:", time_
            L_input[:, i_time] = utils.get_input(self.tuning_prop[self.my_units, :], params, time_/params['t_stimulus'], motion_params=self.params['motion_params'])

        for i_, unit in enumerate(self.my_units):
            output_fn = output_fn_base + str(unit) + '.dat'
            np.savetxt(output_fn, L_input[i_, :])

        if pc_id == 0:
            full_stim_input = '%sANNActivity/input_%d.dat' % (self.params['folder_name'], self.iteration)
            print 'Saving input for stim %d to %s' % (self.iteration, full_stim_input)
            np.savetxt(full_stim_input, L_input)

        if self.comm != None:
            self.comm.barrier()

        if normalize:
            self.normalize_input(output_fn_base)

        if self.comm != None:
            self.comm.barrier()
    def create_input_vectors_blanking(self, t_blank=(0.25, 0.75), normalize=True):
        """
        Stimulus is calculated only until
            t_stop * self.params['t_stimulus']
        """
        output_fn_base = self.training_input_folder + self.params['abstract_input_fn_base']
        n_cells = len(self.my_units)
        dt = self.params['dt_rate'] # [ms] time step for the non-homogenous Poisson process 
        time = np.arange(0, self.params['t_sim'], dt) # only stimulate until 
        L_input = np.zeros((n_cells, self.params['t_sim'] / dt))

        blank_idx = np.arange(time.shape[0] * t_blank[0], time.shape[0] * t_blank[1])

        for i_time, time_ in enumerate(time):
            if (i_time % 100 == 0):
                print "t:", time_
            L_input[:, i_time] = utils.get_input(self.tuning_prop[self.my_units, :], self.params, time_/self.params['t_stimulus'], motion_params=self.params['motion_params'])
        for i in blank_idx:
            L_input[:, i] = 0.
        for i_, unit in enumerate(self.my_units):
            output_fn = output_fn_base + str(unit) + '.dat'
            np.savetxt(output_fn, L_input[i_, :])

        if self.comm != None:
            self.comm.barrier()
        if normalize:
            self.normalize_input(output_fn_base)
        if self.comm != None:
            self.comm.barrier()
Beispiel #6
0
def upload_file(stackname, local_path, remote_path=None, overwrite=False, confirm=False, node=1):
    remote_path = remote_path or os.path.join("/tmp", os.path.basename(local_path))
    overwrite = str(overwrite).lower() == "true"
    confirm = str(confirm).lower() == "true"
    node = int(node)
    with stack_conn(stackname, node=node):
        print('stack:', stackname, 'node', node)
        print('local:', local_path)
        print('remote:', remote_path)
        print('overwrite:', overwrite)
        if not confirm:
            utils.get_input('continue?')
        if files.exists(remote_path) and not overwrite:
            print('remote file exists, not overwriting')
            exit(1)
        put(local_path, remote_path)
def main():
    found_words = []
    words_value = 0

    search_range = 2
    fail_counter = 0
    MAX_FAILS = 20

    t, n, s, puzzles, possibile_solutions, all_puzzle_keys = get_input()
    start_time = time.time()
    dic = HmapStructure()
    with open('z2/dict.txt') as f:
        dic.load(f)

    max_len = len(all_puzzle_keys)
    population = init_population(dic, puzzles, 100, all_puzzle_keys,
                                 search_range)

    while check_time(start_time, t) or True:
        prev_value = words_value
        words_value += search_for_words(found_words, population, dic, puzzles,
                                        search_range)
        if prev_value == words_value and search_range < max_len:
            fail_counter += 1
            if fail_counter == MAX_FAILS * search_range // 2:
                search_range += 1
                population = init_population(dic, puzzles, 100,
                                             all_puzzle_keys, search_range)
                fail_counter = 0
        else:
            fail_counter = 0

        population.recombinate()
        population.mutate()
        print(words_value, int(time.time() - start_time), search_range)
def main():
    t, _, _, puzzles, starter_words, all_puzzle_keys = get_input()
    dic = HmapStructure()
    with open("dict.txt") as f:
        dic.load(f)

    search(dic, puzzles, all_puzzle_keys, t, starter_words)
Beispiel #9
0
def main():
    t, n, s, puzzles, possibile_solutions, all_puzzle_keys = get_input()
    dic = HmapStructure()
    with open('z2/dict.txt') as f:
        dic.load(f)

    search(dic, puzzles, all_puzzle_keys, t)
Beispiel #10
0
 def input_age(self,input_test):
     try:
         self.age = int( get_input("Please type your age in years (1 to 110): ",input_test) )
         if (self.age < 1 or self.age > 110):
             self.input_age()
     except ValueError:
         print("Invalid age.")
         self.input_age()
Beispiel #11
0
def token_create():
    _warning_root_token()
    token = utils.get_input('token display name: ')
    if not token or not token.strip():
        print("a token display name is required")
        sys.exit(1)
    cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
    local(cmd)
Beispiel #12
0
def configure():
    """
    Update config
    """
    jira_url = utils.get_input(raw_input, "Jira url")
    username = utils.get_input(raw_input, "username")
    password = utils.get_input(getpass.getpass, "password")
    error_reporting = True \
        if 'n' not in raw_input("Would you like to automatically report errors to help improve the software? [y]/N: ").lower() \
        else False
    configuration._save_config(jira_url, username, password, error_reporting)

    try:
        connection.jira_connection(configuration.load_config())
    except jira_exceptions.JIRAError as e:
        configuration._delete_config()
        logging.error("You have an error in your jira connection/configuration: {error}. Please fix the configuration before attempting to use jtime.\n We suggest trying your username without using the email address.".format(error=e))
    def create_input(self, load_files=False, save_output=False):


        if load_files:
            if self.pc_id == 0:
                print "Loading input spiketrains..."
            for i_, tgt in enumerate(self.local_idx_exc):
                try:
                    fn = self.params['input_st_fn_base'] + str(tgt) + '.npy'
                    spike_times = np.load(fn)
                except: # this cell does not get any input
                    print "Missing file: ", fn
                    spike_times = []
                self.spike_times_container[i_] = spike_times
        else:
            if self.pc_id == 0:
                print "Computing input spiketrains..."
            nprnd.seed(self.params['input_spikes_seed'])
            dt = self.params['dt_rate'] # [ms] time step for the non-homogenous Poisson process 
            time = np.arange(0, self.params['t_sim'], dt)
            blank_idx = np.arange(1./dt * self.params['t_before_blank'], 1. / dt * (self.params['t_before_blank'] + self.params['t_blank']))

            my_units = self.local_idx_exc
            n_cells = len(my_units)
            L_input = np.zeros((n_cells, time.shape[0]))
            # get the input signal
            for i_time, time_ in enumerate(time):
                if (i_time % 500 == 0):
                    print "t:", time_
                L_input[:, i_time] = utils.get_input(self.tuning_prop_exc[my_units, :], self.params, time_/1000.)
#                L_input[:, i_time] = utils.get_input(self.tuning_prop_exc[my_units, :], self.params, time_/self.params['t_stimulus'])
                L_input[:, i_time] *= self.params['f_max_stim']
            # blanking 
            for i_time in blank_idx:
                L_input[:, i_time] = 0.

            # create the spike trains
            for i_, unit in enumerate(my_units):
                rate_of_t = np.array(L_input[i_, :]) 
                # each cell will get its own spike train stored in the following file + cell gid
                n_steps = rate_of_t.size
                spike_times = []
                for i in xrange(n_steps):
                    r = nprnd.rand()
                    if (r <= ((rate_of_t[i]/1000.) * dt)): # rate is given in Hz -> 1/1000.
                        spike_times.append(i * dt) 
                self.spike_times_container[i_] = spike_times
                if save_output:
                    output_fn = self.params['input_rate_fn_base'] + str(unit) + '.npy'
                    np.save(output_fn, rate_of_t)
                    output_fn = self.params['input_st_fn_base'] + str(unit) + '.npy'
                    np.save(output_fn, np.array(spike_times))
        self.times['create_input'] = self.timer.diff()
        return self.spike_times_container
    def _test(self, check):
        if self.no_test:
            return True

        result, msg, responses = check.test()

        self.logger.info(msg)
        self.logger.debug('Check Test Result:\n%s' % pprint.pformat(responses))
        if not result:
            if utils.get_input('Ignore this check?', options=['y', 'n'], default='y') == 'y':
                return False
        return True
Beispiel #15
0
def destroy(stackname):
    "Delete a stack of resources."
    print('this is a BIG DEAL. you cannot recover from this.')
    print('type the name of the stack to continue or anything else to quit')
    uin = utils.get_input('> ')
    if not uin or not uin.strip().lower() == stackname.lower():
        import difflib
        print('you needed to type "%s" to continue.' % stackname)
        print('got:')
        print('\n'.join(difflib.ndiff([stackname], [uin])))
        exit(1)
    return bootstrap.destroy(stackname)
    def migrate(self):
        self.logger.info('\nChecks')
        self.logger.info('------\n')

        for migrated_entity in self.migrator.migrated_entities:

            self.logger.info('Migrating checks for node %s\n' % migrated_entity.ck_node)

            rs_checks = self.rs_api.list_checks(migrated_entity.rs_entity)
            for ck_check in self.ck_api.list_checks(migrated_entity.ck_node):

                self.logger.info('Migrating Check %s' % (ck_check))

                try:
                    check = MigratedCheck(migrated_entity, ck_check, monitoring_zones=self.monitoring_zones, rs_checks_cache=rs_checks)
                except UnsupportedCheckType as e:
                    self.logger.info(e)
                    self.logger.info('')
                    continue

                action, result = check.save(commit=False)
                if action == 'Created':
                    if self._test(check):
                        self.logger.info('Creating new check:\n%s' % (pprint.pformat(result)))
                        if self.auto or utils.get_input('Create this check?', options=['y', 'n'], default='y') == 'y':
                            check.save()
                            migrated_entity.migrated_checks.append(check)
                elif action == 'Updated':
                    if self._test(check):
                        self.logger.info('Updating check %s - changes:\n%s' % (check.rs_check.id, pprint.pformat(result)))
                        if self.auto or utils.get_input('Update this check?', options=['y', 'n'], default='y') == 'y':
                            check.save()
                            migrated_entity.migrated_checks.append(check)
                else:
                    self.logger.info('No changes needed for check %s' % (check.rs_check.id))
                    migrated_entity.migrated_checks.append(check)

                self.logger.info('')
            self.logger.info('')
    def migrate(self):
        """
        adds or updates entities in rs from nodes in ck
        """
        self.logger.info('\nEntities')
        self.logger.info('------\n')

        for ck_node in self.ck_api.list_nodes():
            self.logger.info('Migrating Cloudkick Node - %s' % ck_node)

            # set up obj and see if there are any changes necessary
            entity = MigratedEntity(self.migrator, ck_node)
            action, result = entity.save(commit=False)

            # print action and prompt for commit
            if action == 'Created':
                self.logger.info('Creating new entity:\n%s' % (pprint.pformat(result)))
                if self.auto or utils.get_input('Create this entity?', options=['y', 'n'], default='y') == 'y':
                    try:
                        entity.save()
                    except Exception as e:
                        self.logger.error('Exception creating entity:\n%s' % e)
                    else:
                        self.migrator.migrated_entities.append(entity)
            elif action == 'Updated':
                self.logger.info('Updating entity %s - changes:\n%s' % (entity.rs_entity.id, pprint.pformat(result)))
                if self.auto or utils.get_input('Update this entity?', options=['y', 'n'], default='y') == 'y':
                    try:
                        entity.save()
                    except Exception as e:
                        self.logger.error('Exception updating entity:\n%s' % e)
                    else:
                        self.migrator.migrated_entities.append(entity)
            else:
                self.logger.info('No changes needed for entity %s' % (entity.rs_entity.id))
                self.migrator.migrated_entities.append(entity)

            self.logger.info('')
    def migrate(self):
        self.logger.info('\nAlarms')
        self.logger.info('------\n')
        self.logger.info('NOTE: You must have at least one active notification endpoint applied to the')
        self.logger.info('Cloudkick monitor or alarms will not be created. (You can do this in Cloudkick')
        self.logger.info('and re-run the script)\n')

        for migrated_entity in self.migrator.migrated_entities:
            for migrated_check in migrated_entity.migrated_checks:

                alarm = MigratedAlarm.create_from_migrated_check(migrated_check)

                self.logger.info('Node: %s' % migrated_check.ck_node)
                self.logger.info('Check: %s' % migrated_check.ck_check)

                if not alarm:
                    self.logger.info('No alarm to create\n')
                    continue

                self.logger.info('Alarm: %s' % alarm)
                self.logger.debug('Alarm Criteria:\n%s' % alarm._alarm_cache['criteria'])
                action, result = alarm.save(commit=False)
                if action in ['Created', 'Updated']:
                    if not self.no_test:
                        valid, msg, results = alarm.test()
                        self.logger.info(msg)
                        self.logger.debug('%s' % (pprint.pformat(results)))
                        if not valid:
                            if utils.get_input('Ignore this alarm?', options=['y', 'n'], default='y') == 'y':
                                continue
                    if self.auto or utils.get_input('Save this alarm?', options=['y', 'n'], default='y') == 'y':
                        action, _ = alarm.save()
                        self.logger.info('%s alarm %s' % (action, alarm.rs_alarm.id))
                else:
                    self.logger.info('Found alarm %s' % alarm.rs_alarm.id)

                self.logger.info('')
Beispiel #19
0
    def create_input(self):

        nprnd.seed(self.params['input_spikes_seed'])
        dt = self.params['dt_rate'] # [ms] time step for the non-homogenous Poisson process 
        self.time = np.arange(0, self.params['t_sim'], dt)
        blank_idx = np.arange(1./dt * self.params['t_stimulus'], 1. / dt * (self.params['t_stimulus'] + self.params['t_blank']))

        L_input = np.zeros((self.n_cells, self.time.shape[0]))
        for i_time, time_ in enumerate(self.time):
            if (i_time % 1000 == 0):
                print "t:", time_
            L_input[:, i_time] = utils.get_input(self.tuning_prop_exc[self.gids_to_plot, :], self.params, time_/self.params['t_stimulus'])
            L_input[:, i_time] *= self.params['f_max_stim']

        L_input_noblank = L_input.copy()
        for i_time in blank_idx:
            L_input[:, i_time] = 0.

        # create input with blank
        for i_, gid in enumerate(self.gids_to_plot):
            rate_of_t = L_input[i_, :]
            n_steps = rate_of_t.size
            spike_times= []
            for i in xrange(n_steps):
                r = nprnd.rand()
                if (r <= ((rate_of_t[i]/1000.) * dt)): # rate is given in Hz -> 1/1000.
                    spike_times.append(i * dt) 
            self.input_spike_trains[gid]['with_blank'] = spike_times
            self.input_rate[gid]['with_blank'] = L_input[i_, :]

        # create input without blank
        print 'GID\tTime of max stim'
        for i_, gid in enumerate(self.gids_to_plot):
            rate_of_t = L_input_noblank[i_, :]
            n_steps = rate_of_t.size
            spike_times= []
            for i in xrange(n_steps):
                r = nprnd.rand()
                if (r <= ((rate_of_t[i]/1000.) * dt)): # rate is given in Hz -> 1/1000.
                    spike_times.append(i * dt) 
            self.input_spike_trains[gid]['no_blank'] = spike_times
            self.input_rate[gid]['no_blank'] = L_input_noblank[i_, :]

            self.time_of_max_stim[gid] = L_input_noblank[i_, :].argmax() * dt
            print '%d\t%.1f' % (gid, L_input_noblank[i_, :].argmax() * dt)
Beispiel #20
0
	def get_maxlink(self,url):
		response=self.fetch(url);result=pw=None
		if not response:print 'Not response'
		elif response.status==302:result=response.headers['location']
		elif response.status==200:
			if re.search('<title>.*Lỗi 404.*</title>|"index-404"',response.body):
				utils.mess(u'Tập tin quý khách yêu cầu không tồn tại!','Fshare.vn');result='fail'
			elif re.search('<i class="fa fa-star">',response.body):utils.mess('Your Fshare acc is FREE','Fshare.vn')
			
			if re.search('class="fa fa-lock"',response.body):pw=utils.get_input(u'Hãy nhập: Mật khẩu tập tin')
			if pw:
				try:
					data={'fs_csrf':utils.xsearch('value="(.+?)" name="fs_csrf"',response.body),
					'DownloadForm[pwd]':pw,'ajax':'download-form','DownloadForm[linkcode]':url.split('/')[4]}
					response=self.fetch('https://www.fshare.vn/download/get',data).json
				except:response={}

				if not response:utils.mess(u'Get maxspeed link fail!','Fshare.vn');result='fail'
				elif response.get('url'):result=response.get('url')
				elif response.get('DownloadForm_pwd'):utils.mess(u'Mật khẩu không chính xác!','Fshare.vn')
				else: print response
Beispiel #21
0
try:
    from mpi4py import MPI
    USE_MPI = True
    comm = MPI.COMM_WORLD
    pc_id, n_proc = comm.rank, comm.size
    print "USE_MPI:", USE_MPI, 'pc_id, n_proc:', pc_id, n_proc
except:
    USE_MPI = False
    pc_id, n_proc, comm = 0, 1, None
    print "MPI not used"

#my_units = utils.distribute_n(params['n_exc'], n_proc, pc_id)

for j_, blur_x in enumerate(blur_x_range):
    L_input = np.zeros((n_cells, time.shape[0]))
    params['blur_X'], params['blur_V'] = blur_x, blur_v
    print 'Blur', params['blur_X'], params['blur_V']
    tuning_prop = utils.set_tuning_prop(params, mode='hexgrid', v_max=params['v_max'])        # set the tuning properties of exc cells: space (x, y) and velocity (u, v)
    for i_time, time_ in enumerate(time):
        if (i_time % 100 == 0):
            print "t:", time_
        L_input[:, i_time] = utils.get_input(tuning_prop, params, time_/params['t_sim'])
#        L_input[:, i_time] = utils.get_input(tuning_prop[my_units, :], params, time_/params['t_sim'])
#        L_input[:, i_time] *= params['f_max_stim']
    for cell in xrange(n_cells):
        L_net[j_, cell+2] = L_input[cell, :].sum()

L_net_output_fn = 'L_net.dat'
np.savetxt(L_net_output_fn, L_net)

    
    print ("Ding! Message with priority %d received: %s" % (priority, message))

    # Re-register for notifications
    mq.request_notification(MY_SIGNAL)
    

# Create the message queue.
mq = posix_ipc.MessageQueue(utils.QUEUE_NAME, posix_ipc.O_CREX)

# Request notifications
mq.request_notification(MY_SIGNAL)

# Register my signal handler 
signal.signal(MY_SIGNAL, handle_signal)

# Get user input and send it to the queue.
msg = "42"
while msg:
    print ("\nEnter a message. A blank message will end the demo:")
    msg = utils.get_input()
    if msg:
        mq.send(msg)

print ("Destroying the message queue.")
mq.close()
# I could call simply mq.unlink() here but in order to demonstrate 
# unlinking at the module level I'll do it that way.
posix_ipc.unlink_message_queue(utils.QUEUE_NAME)


def process_notification(mq):
    message, priority = mq.receive()
    
    print ("Ding! Message with priority %d received: %s" % (priority, message))
    


# Create the message queue.
mq = posix_ipc.MessageQueue(utils.QUEUE_NAME, posix_ipc.O_CREX)

# Request notifications
mq.request_notification( (process_notification, mq) )

# Get user input and send it to the queue.
print ("Enter a message:")
mq.send(utils.get_input())

# The callback happens almost instantly, but if I don't pause at least 
# briefly then the main thread may exit before the notification fires.
print ("Sleeping for one second to allow the notification to happen.")
time.sleep(1)

print ("Destroying the message queue.")
mq.close()
# I could call simply mq.unlink() here but in order to demonstrate 
# unlinking at the module level I'll do it that way.
posix_ipc.unlink_message_queue(utils.QUEUE_NAME)

Beispiel #24
0
 def _sync_config(self):
     indices  = self._config.get('ES_INDICES',  [])
     mappings = self._config.get('ES_MAPPINGS', [])
     
     utils.log()
     utils.log("-" * 80)
     utils.log("[%s] SYNCING CONFIG:" % self)
     utils.log()
     utils.log("Desired indices:  %s" % pformat(indices))
     utils.log("Actual indices:   %s" % pformat(self.indices))
     utils.log()
     utils.log("Desired mappings: %s" % pformat(mappings))
     utils.log("Actual mappings:  %s" % pformat(self.mappings))
     utils.log("-" * 80)
     utils.log()
     
     # sync indices
     # ------------
     missing  = []
     remove   = []
     
     # find all extraneous indices
     for index in self.indices:
         found = False
         
         for index2 in indices:
             if index == index2['name']:
                 found = True
                 break
         
         if not found:
             remove.append(index)
     
     # find all missing indices
     for index in indices:
         name  = index['name']
         found = False
         
         for index2 in self.indices:
             if name == index2:
                 found = True
                 break
         
         if not found:
             missing.append(index)
     
     # remove all extraneous indices
     for index in remove:
         if not self._force:
             utils.log("[%s] remove invalid index '%s'?" % (self, index))
             response = utils.get_input()
             
             if response == 'n': # no
                 continue
             elif response == 'a': # abort
                 sys.exit(1)
         
         self._delete_index(index)
     
     # add all missing indices
     for index in missing:
         name = index['name']
         
         if not self._force:
             utils.log("[%s] add new index '%s'?" % (self, name))
             response = utils.get_input()
             
             if response == 'n': # no
                 continue
             elif response == 'a': # abort
                 sys.exit(1)
         
         self._create_index(name, index.get('settings', None))
     
     # sync mappings
     # -------------
     missing   = []
     remove    = []
     blacklist = frozenset(['boost', 'index'])
     
     def _prune(d):
         if isinstance(d, dict):
             return dict((k, _prune(v)) for k, v in d.iteritems() if k not in blacklist)
         else:
             return d
     
     # find all extraneous mappings
     for index, types in self.mappings.iteritems():
         for doc_type, properties in types.iteritems():
             prefix = 'remove invalid'
             found = False
             
             for mapping2 in mappings:
                 if index in mapping2['indices'] and doc_type == mapping2['type']:
                     if deep_comparison(_prune(mapping2['mapping']), _prune(properties)):
                         found = True
                         break
                     else:
                         prefix = 'remove stale'
             
             if not found:
                 remove.append((index, doc_type, prefix))
     
     # find all missing mappings
     for mapping2 in mappings:
         properties2 = _prune(mapping2['mapping'])
         type2 = mapping2['type']
         
         for index2 in mapping2['indices']:
             prefix = 'add new'
             found  = False
             
             for index, types in self.mappings.iteritems():
                 if index == index2:
                     for doc_type, properties in types.iteritems():
                         if doc_type == mapping2['type']:
                             if deep_comparison(properties2, _prune(properties)):
                                 found = True
                                 break
                             else:
                                 prefix = 'update'
                 
                 if found:
                     break
             
             if not found:
                 missing.append((mapping2, prefix))
     
     # remove all extraneous mappings
     for index, doc_type, prefix in remove:
         if not self._force:
             utils.log("[%s] %s mapping '%s:%s'?" % (self, prefix, index, doc_type))
             response = utils.get_input()
             
             if response == 'n': # no
                 continue
             elif response == 'a': # abort
                 sys.exit(1)
         
         self._delete_mapping(index, doc_type)
     
     # add all missing mappings
     for mapping, prefix in missing:
         doc_type = mapping['type']
         
         for index in mapping['indices']:
             if not self._force:
                 utils.log("[%s] %s mapping '%s:%s'?" % (self, prefix, index, doc_type))
                 response = utils.get_input()
                 
                 if response == 'n': # no
                     continue
                 elif response == 'a': # abort
                     sys.exit(1)
             
             properties = mapping['mapping']
             self._put_mapping(doc_type, properties, [ index ])
     
     self.update()
Beispiel #25
0
 def input_name(self,input_test):
     self.name = get_input("What is your name:",input_test)
Beispiel #26
0
 def input_gender(self,input_test):
     self.gender = get_input("Please type your gender(M/F): ",input_test)
     if self.gender != "M" and self.gender != "F":
         self.input_gender()
Beispiel #27
0
Datei: 8.py Projekt: medecau/aoc
from utils import get_input
import re


lines = r'''""
"abc"
"aaa\"aaa"
"\x27"'''.splitlines()

lines = get_input('8.txt').split('\n')
for l in lines:
    print
    print l
    print l.decode('string-escape')
    print re.escape(l)
string_code = sum(len(l) for l in lines)
memory_characters = sum(len(l[1:-1].decode('string_escape')) for l in lines)
encoded_strings = sum(len(re.escape(l)) + 2 for l in lines)
print string_code, memory_characters, encoded_strings
print string_code - memory_characters
print encoded_strings - string_code
Beispiel #28
0
Datei: 9.py Projekt: medecau/aoc
from utils import get_input
import itertools


lines = """London to Dublin = 464
London to Belfast = 518
Dublin to Belfast = 141""".splitlines()

lines = get_input('9.txt').splitlines()

pairs = [l.split(' = ') for l in lines]
pairs = [tuple([sorted(c.split(' to ')), d]) for c, d in pairs]

all_cities = [cities[0] for cities, distance in pairs]
all_cities += [cities[1] for cities, distance in pairs]
all_cities = set(all_cities)

permutations = itertools.permutations(all_cities, len(all_cities))

distances = list()
for p in permutations:
    current = 0
    for i in range(len(p) - 1):
        pair = sorted(p[i:i + 2])
        for e in pairs:
            if pair == e[0]:
                distance = e[1]
                break
        current += int(distance)
    distances.append(current)
    def create_input(self, load_files=False, save_output=False):

        if load_files:
            if self.pc_id == 0:
                print "Loading input spiketrains..."
            for i_, tgt in enumerate(self.local_idx_exc):
                try:
                    fn = self.params["input_st_fn_base"] + str(tgt) + ".npy"
                    spike_times = np.load(fn)
                except:  # this cell does not get any input
                    print "Missing file: ", fn
                    spike_times = []
                self.spike_times_container[i_] = spike_times
        else:
            if self.pc_id == 0:
                print "Computing input spiketrains..."
            nprnd.seed(self.params["input_spikes_seed"])
            dt = self.params["dt_rate"]  # [ms] time step for the non-homogenous Poisson process
            time = np.arange(0, self.params["t_sim"], dt)
            blank_idx = np.arange(
                1.0 / dt * self.params["t_before_blank"],
                1.0 / dt * (self.params["t_before_blank"] + self.params["t_blank"]),
            )
            before_stim_idx = np.arange(0, self.params["t_start"] * 1.0 / dt)
            blank_idx = np.concatenate((blank_idx, before_stim_idx))

            my_units = self.local_idx_exc
            n_cells = len(my_units)
            L_input = np.zeros((n_cells, time.shape[0]))

            # get the input signal
            print "Calculating input signal"
            for i_time, time_ in enumerate(time):
                L_input[:, i_time] = utils.get_input(
                    self.tuning_prop_exc[my_units, :], self.params, time_ / self.params["t_stimulus"]
                )
                L_input[:, i_time] *= self.params["f_max_stim"]
                if i_time % 500 == 0:
                    print "t:", time_
            #                    print 'L_input[:, %d].max()', L_input[:, i_time].max()
            # blanking
            for i_time in blank_idx:
                #                L_input[:, i_time] = 0.
                L_input[:, i_time] = np.random.permutation(L_input[:, i_time])

            # create the spike trains
            print "Creating input spiketrains for unit"
            for i_, unit in enumerate(my_units):
                print unit,
                rate_of_t = np.array(L_input[i_, :])
                # each cell will get its own spike train stored in the following file + cell gid
                n_steps = rate_of_t.size
                spike_times = []
                for i in xrange(n_steps):
                    r = nprnd.rand()
                    if r <= ((rate_of_t[i] / 1000.0) * dt):  # rate is given in Hz -> 1/1000.
                        spike_times.append(i * dt)
                self.spike_times_container[i_] = spike_times
                if save_output:
                    output_fn = self.params["input_rate_fn_base"] + str(unit) + ".npy"
                    np.save(output_fn, rate_of_t)
                    output_fn = self.params["input_st_fn_base"] + str(unit) + ".npy"
                    np.save(output_fn, np.array(spike_times))

        self.times["create_input"] = self.timer.diff()
        return self.spike_times_container
Beispiel #30
0
Datei: 2.py Projekt: medecau/aoc
import itertools
from utils import get_input

dimensions_list = get_input('2.txt').split('\n')

total_area = 0
total_ribon = 0
for dims in dimensions_list:
    dims = sorted(map(int, dims.split('x')))

    smallest_perimeter = (dims[0] + dims[1]) * 2
    volume = reduce(lambda x, y: x * y, dims)
    total_ribon += volume + smallest_perimeter

    sides = list(itertools.combinations(dims, 2))

    areas = map(lambda side: side[0] * side[1], sides)
    areas += areas
    areas.append(areas[0])

    total_area += sum(areas)

print total_area, total_ribon
Beispiel #31
0
Datei: 3.py Projekt: medecau/aoc
from utils import get_input

moves = get_input('3.txt')

x, y = 0, 0
log = [tuple([x, y])]
for move in moves:
    if move == '^':
        y += 1
    elif move == 'v':
        y -= 1
    elif move == '<':
        x -= 1
    elif move == '>':
        x += 1
    log.append(tuple([x, y]))

houses = ['%dx%d' % h for h in log]
unique_houses = set(houses)
print len(unique_houses)


santa = dict(x=0, y=0, log=[tuple([0, 0])])
robot = dict(x=0, y=0, log=[tuple([0, 0])])

queue = [santa, robot]
for move in moves:
    current = queue.pop(0)
    queue.append(current)
    if move == '^':
        current['y'] += 1
Beispiel #32
0
def remaster_all(new_master_stackname):
    LOG.info('new master is: %s', new_master_stackname)
    ec2stacks = project.ec2_projects()
    ignore = [
        'master-server',
        'jats4r',
    ]
    ec2stacks = exsubdict(ec2stacks, ignore)

    def sortbypname(n):
        unknown = 9
        porder = {
            #'observer': 1,
            'elife-metrics': 2,
            'lax': 3,
            'basebox': 4,
            'containers': 5,
            'elife-dashboard': 6,
            'elife-ink': 7
        }
        return porder.get(n, unknown)

    # pname_list = sorted(ec2stacks.keys(), key=sortbypname) # lets do this alphabetically
    pname_list = sorted(ec2stacks.keys()) # lets do this alphabetically

    # only update ec2 instances in the same region as the new master
    region = utils.find_region(new_master_stackname)
    active_stacks = core.active_stack_names(region)
    stack_idx = mkidx(lambda v: core.parse_stackname(v)[0], active_stacks)

    def sortbyenv(n):
        adhoc = 0 # do these first
        order = {
            'continuumtest': 1,
            'ci': 2,
            'end2end': 3,
            'prod': 4, # update prod last
        }
        pname, iid = core.parse_stackname(n)
        return order.get(iid, adhoc)

    remastered_list = open('remastered.txt', 'r').read().splitlines() if os.path.exists('remastered.txt') else []

    for pname in pname_list:
        if pname not in stack_idx:
            continue
        project_stack_list = sorted(stack_idx[pname], key=sortbyenv)
        LOG.info("%r instances: %s" % (pname, ", ".join(project_stack_list)))
        try:
            for stackname in project_stack_list:
                try:
                    if stackname in remastered_list:
                        LOG.info("already updated, skipping stack: %s", stackname)
                        open('remastered.txt', 'a').write("%s\n" % stackname)
                        continue
                    LOG.info("*" * 80)
                    LOG.info("updating: %s" % stackname)
                    utils.get_input('continue? ctrl-c to quit')
                    if not remaster(stackname, new_master_stackname):
                        LOG.warn("failed to remaster %s, stopping further remasters to project %r", stackname, pname)
                        break
                    open('remastered.txt', 'a').write("%s\n" % stackname)
                except KeyboardInterrupt:
                    LOG.warn("ctrl-c, skipping stack: %s", stackname)
                    time.sleep(1)
                except BaseException:
                    LOG.exception("unhandled exception updating stack: %s", stackname)
        except KeyboardInterrupt:
            LOG.warn("quitting")
            break

    LOG.info("wrote 'remastered.txt'")
Beispiel #33
0
Datei: 1.py Projekt: medecau/aoc
from utils import get_input


address = get_input("1.txt")
current_floor = 0
basement_char = None

for n, c in enumerate(address):
    if c == "(":
        current_floor += 1
    elif c == ")":
        current_floor -= 1

    if current_floor < 0 and basement_char is None:
        basement_char = n + 1

print current_floor, basement_char