def test_take_safer_path(self): test_data = default_data() my_snake = { "id": "me", "head": (3, 3), "body": [(3, 3), (3, 2), (3, 1), (2, 1)], "health": 100 } merge_dict(test_data, { "board": { "width": 5, "height": 5, "snakes": [ my_snake, { "id": "them", "head": (2, 4), "body": [(2, 4), (1, 4), (0, 4), (0, 3), (0, 2), (0, 1), (0, 0)], "health": 100 } ] }, "you": my_snake }) snake = Snake(test_data) """ |■|■|>| | | |■| | |^| | |■| | |ø| | |■| |ø|ø| | |■| | | | | """ self.assertEqual(snake.next_move(), "right")
def test_avoid_pathing_to_long_snake_head(self): test_data = default_data() my_snake = { "id": "me", "head": (2, 3), "body": [(2, 3), (2, 4), (1, 4)], "health": 100 } merge_dict(test_data, { "board": { "width": 5, "height": 5, "snakes": [ my_snake, { "id": "them", "head": (0, 3), "body": [(0, 3), (0, 2), (1, 2), (1, 1), (1, 0)], "health": 100 } ] }, "you": my_snake }) snake = Snake(test_data) """ | |ø|ø| | | |^| |v| | | |■|■| | | | | |■| | | | | |■| | | | """ self.assertNotEqual(snake.next_move(), "left")
def assign_code(values_list, code=''): """ Method to assign the codes for the Balanced Tree Encoding :param values_list: list of values to encode :param code: the code for the current "node" :return: values_to_codes - dict with list of codes for every value, max_len - the length of the codes """ values_to_codes = {} if len(values_list) == 1: values_to_codes[values_list[0]] = [code] return values_to_codes, len(code) right_side = False right_list = [] left_list = [] while len(values_list): value = random.choices(values_list, weights=values_list, k=1).pop() values_list.remove(value) if right_side: right_list.append(value) right_side = False else: left_list.append(value) right_side = True values_to_codes_left, max_len_left = assign_code(left_list, code + '0') values_to_codes_right, max_len_right = assign_code(right_list, code + '1') max_len = max(max_len_right, max_len_left) values_to_codes = merge_dict(values_to_codes_left, values_to_codes_right, max_len) return values_to_codes, max_len
def get(klass, options = {}): if isinstance(options, str): options = { 'access_token': options } options = util.merge_dict({ 'schema': 'openid' }, options) return klass.post(klass.path, options)
def logout_url(options=None, api=None): api = api or default_api() options = util.merge_dict({ 'logout': 'true', 'redirect_uri': redirect_uri(api) }, options or {}) return session_url(end_session_path, options, api=api)
def authorize_url(options = {}): options = util.merge_dict({ 'response_type': 'code', 'scope': 'openid', 'client_id': client_id(), 'redirect_uri': redirect_uri() }, options) return util.join_url_params(start_session_path, options)
def delete(self, action, headers=None): """Make DELETE request """ http_headers = util.merge_dict(self.headers(), headers or {}) return self.request(util.join_url(self.endpoint, action), 'DELETE', headers=http_headers or {})
def get(cls, options=None): options = options or {} if isinstance(options, string_types): options = {'access_token': options} options = util.merge_dict({'schema': 'openid'}, options) return cls.post(cls.path, options)
def get(cls, options=None, api=None): options = options or {} if isinstance(options, string_types): options = {'access_token': options} options = util.merge_dict({'schema': 'openid'}, options) api = api or default_api() return cls.post(cls.path, options, api=api)
def headers(self): """Default HTTP headers """ return util.merge_dict( self.make_common_signature(), { "content-type": "application/x-www-form-urlencoded", "user-agent": self.user_agent })
def post(klass, action, options = {}, headers = {}): url = util.join_url(endpoint(), action) body = util.urlencode(options) headers = util.merge_dict({ 'User-Agent': klass.user_agent, 'Content-Type': 'application/x-www-form-urlencoded'}, headers) data = api.default().http_call(url, 'POST', body= body, headers= headers) return klass(data)
def post(cls, action, options=None, headers=None, api=None): api = api or default_api() url = util.join_url(endpoint(api), action) body = util.urlencode(options or {}) headers = util.merge_dict({ 'User-Agent': cls.user_agent, 'Content-Type': 'application/x-www-form-urlencoded'}, headers or {}) data = api.http_call(url, 'POST', data=body, headers=headers) return cls(data, api=api)
def authorize_url(options=None, api=None): api = api or default_api() options = util.merge_dict({ 'response_type': 'code', 'scope': 'openid', 'client_id': client_id(api), 'redirect_uri': redirect_uri(api) }, options or {}) return session_url(start_session_path, options, api=api)
def headers(self): """Default HTTP headers """ return util.merge_dict( self.make_common_signature(), { "content-type": "application/x-www-form-urlencoded", "user-agent": self.user_agent } )
def create(klass, options = {}): if isinstance(options, str): options = { 'code': options } options = util.merge_dict({ 'grant_type': 'authorization_code', 'client_id': client_id(), 'client_secret': client_secret() }, options) return klass.post(klass.path, options)
def create_with_refresh_token(klass, options = {}): if isinstance(options, str): options = { 'refresh_token': options } options = util.merge_dict({ 'grant_type': 'refresh_token', 'client_id': client_id(), 'client_secret': client_secret() }, options) return klass.post(klass.path, options)
def get(self, action, headers=None): """Make GET request Usage:: >>> api.get("v1/payments/payment?count=1") >>> api.get("v1/payments/payment/PAY-1234") """ http_headers = util.merge_dict(self.headers(), headers or {}) return self.request(util.join_url(self.endpoint, action), 'GET', headers=http_headers or {})
def post(self, action, params=None, headers=None): """Make POST request Usage:: >>> api.post("v1/payments/payment", { 'indent': 'sale' }) >>> api.post("v1/payments/payment/PAY-1234/execute", { 'payer_id': '1234' }) """ http_headers = util.merge_dict(self.headers(), headers or {}) return self.request(util.join_url(self.endpoint, action), 'POST', body=params or {}, headers=http_headers or {})
def create(cls, options=None): options = options or {} if isinstance(options, string_types): options = {'code': options} options = util.merge_dict({ 'grant_type': 'authorization_code', 'client_id': client_id(), 'client_secret': client_secret() }, options) return cls.post(cls.path, options)
def create_with_refresh_token(cls, options=None, api=None): options = options or {} api = api or default_api() if isinstance(options, string_types): options = {'refresh_token': options} options = util.merge_dict({ 'grant_type': 'refresh_token', 'client_id': client_id(api), 'client_secret': client_secret(api) }, options) return cls.post(cls.path, options, api=api)
def create(cls, options=None, api=None): options = options or {} api = api or default_api() if isinstance(options, string_types): options = {'code': options} options = util.merge_dict({ 'grant_type': 'authorization_code', 'client_id': client_id(api), 'client_secret': client_secret(api) }, options) return cls.post(cls.path, options, api=api)
def create_with_refresh_token(cls, options=None): options = options or {} if isinstance(options, string_types): options = {'refresh_token': options} options = util.merge_dict({ 'grant_type': 'refresh_token', 'client_id': client_id(), 'client_secret': client_secret() }, options) return cls.post(cls.path, options)
def test_head_to_head_worse_than_hazards(self): test_data = default_data() my_snake = { "id": "me", "head": (4, 2), "body": [(4, 2), (3, 2), (3, 1), (2, 1), (2, 2)], "health": 100 } merge_dict(test_data, { "board": { "width": 7, "height": 7, "hazards": [(0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (0, 5), (0, 6)], "snakes": [ my_snake, { "id": "them", "head": (4, 4), "body": [(4, 4), (3, 4), (3, 5), (4, 5), (4, 6), (3, 6)], "health": 100 } ] }, "you": my_snake }) snake = Snake(test_data) """ |■| | |■|■| | | |■| | |■|■| | | |■| | |■|>| | | |■| | | | | | | |■| |ø|ø|>| | | |■| |ø|ø| | | | |■| | | | | | | """ self.assertEqual(snake.next_move(), "down")
def test_safe_tail(self): test_data = default_data() merge_dict( test_data, { "board": { "width": 2, "height": 2, "snakes": [{ "id": "me", "health": 99, "body": [(1, 1), (1, 0), (0, 0), (0, 1)] }] } }) board = Board(test_data) """ |o|<| |o|o| """ self.assertEqual([(0, 1)], board.safe_neighbors((1, 1)))
def test_avoid_long_snake_bodies(self): test_data = default_data() my_snake = { "id": "me", "health": 100, "head": (1, 0), "body": [(1, 0), (1, 1), (2, 1)] } merge_dict(test_data, { "board": { "width": 5, "height": 5, "food": [(0, 0)], "snakes": [ my_snake, { "id": "them", "head": (3, 2), "body": [(3, 2), (3, 1), (3, 0), (4, 0), (4, 1)], "health": 100 } ] }, "you": my_snake }) snake = Snake(test_data) """ | | | | | | | | | | | | | | | |^| | | |ø|ø|■|■| |o|v| |■|■| """ self.assertEqual(snake.next_move(), "left")
def test_navigate_around_obstacle(self): test_data = default_data() merge_dict( test_data, { "board": { "snakes": [{ "id": "them", "health": 100, "body": [(1, 0), (1, 1)] }] } }) board = Board(test_data) start = (0, 0) goal = (2, 0) """ | | | | | |█| | |S|█|G| """ path = a_star(board, start, goal) self.assertEqual(path, [start, (0, 1), (0, 2), (1, 2), (2, 2), (2, 1), goal])
def __init__(self, options = {}, **args): args = util.merge_dict(options, args) self.mode = args.get("mode", "sandbox") self.endpoint = args.get("endpoint", self.default_endpoint()) self.token_endpoint = args.get("token_endpoint", self.endpoint) self.client_id = args.get("client_id") self.client_secret = args.get("client_secret") self.ssl_options = args.get("ssl_options", {}) self.token_hash = None self.token_request_at = None if args.get("token"): self.token_hash = { "access_token": args.get("token"), "token_type": "Bearer" } self.options = args
def request(self, url, method, body=None, headers=None, refresh_token=None): """Make HTTP call, formats response and does error handling. Uses http_call method in API class. Usage:: >>> api.request("https://api.sandbox.paypal.com/v1/payments/payment?count=10", "GET", {}) >>> api.request("https://api.sandbox.paypal.com/v1/payments/payment", "POST", "{}", {} ) """ http_headers = util.merge_dict( self.headers(refresh_token=refresh_token, headers=headers or {}), headers or {}) if http_headers.get('PayPal-Request-Id'): log.info('PayPal-Request-Id: %s' % (http_headers['PayPal-Request-Id'])) self._check_openssl_version() try: return self.http_call(url, method, data=json.dumps(body), headers=http_headers) # Format Error message for bad request except exceptions.BadRequest as error: return {"error": json.loads(error.content)} # Handle Expired token except exceptions.UnauthorizedAccess as error: if (self.token_hash and self.client_id): self.token_hash = None return self.request(url, method, body, headers) else: raise error
def __init__(self, options=None, **kwargs): """Create API object Usage:: >>> api = paypalrestsdk.Api(mode="sandbox", client_id='CLIENT_ID', client_secret='CLIENT_SECRET', ssl_options={"cert": "/path/to/server.pem"}) """ kwargs = util.merge_dict(options or {}, kwargs) self.mode = kwargs.get("mode", "sandbox") if self.mode != "live" and self.mode != "sandbox": raise exceptions.InvalidConfig("Configuration Mode Invalid", "Received: %s" % (self.mode), "Required: live or sandbox") self.endpoint = kwargs.get("endpoint", self.default_endpoint()) self.token_endpoint = kwargs.get("token_endpoint", self.endpoint) # Mandatory parameter, so not using `dict.get` self.app_id = kwargs["app_id"] # Mandatory parameter, so not using `dict.get` self.private_key = '-----BEGIN PRIVATE KEY-----\n{0}\n-----END PRIVATE KEY-----'.format( kwargs["private_key"]) self.alipay_public_key = u'-----BEGIN PUBLIC KEY-----\n{0}\n-----END PUBLIC KEY-----'.format( kwargs["alipay_public_key"]) self.sign_type = kwargs["sign_type"] self.charset = kwargs["charset"] self.proxies = kwargs.get("proxies", None) self.token_hash = None self.token_request_at = None # setup SSL certificate verification if private certificate provided ssl_options = kwargs.get("ssl_options", {}) if "cert" in ssl_options: os.environ["REQUESTS_CA_BUNDLE"] = ssl_options["cert"] if kwargs.get("token"): self.token_hash = { "access_token": kwargs["token"], "token_type": "Bearer" } self.options = kwargs
def request(self, url, method, body = None, headers = {}): http_headers = util.merge_dict(self.headers(), headers) if http_headers.get('PayPal-Request-Id'): logging.info('PayPal-Request-Id: %s'%(http_headers['PayPal-Request-Id'])) try: return self.http_call(url, method, body= body, headers= http_headers) # Format Error message for bad request except BadRequest as error: return { "error": json.loads(error.content) } # Handle Exipre token except UnauthorizedAccess as error: if(self.token_hash and self.client_id): self.token_hash = None return self.request(url, method, body, headers) else: raise error
def encode_classes(node, code, level, value_attr, unique_values): """ Encode all the children nodes of the current node, builds two dictionaries that keep the encoding :param node: the node of the tree beginning with each to encode :param code: the code to be assigned to the node :param level: the level to be assigned to the node :param value_attr: the attribute of the node that designates the value :param unique_values: boolean value that indicates if the values are uniques for each node or not If unique_values true, the value should be a container :return: two dictionaries: - between values and codes (if unique_values false -> value of the dictionary - list) - between codes and values """ values_to_codes = {} codes_to_values = {} initial_level = node.level node.change_level(level) node.assign_code(code) if node.right_node: values_to_codes_right, codes_to_values_right = encode_classes( node.right_node, code + '1', level - 1, value_attr, unique_values) if node.left_node: values_to_codes_left, codes_to_values_left = encode_classes( node.left_node, code + '0', level - 1, value_attr, unique_values) if initial_level == 0: value = getattr(node, value_attr) if unique_values: value = value.pop() values_to_codes[value] = node.code else: values_to_codes[value] = [node.code] codes_to_values[node.code] = value return values_to_codes, codes_to_values if unique_values: values_to_codes = {**values_to_codes_right, **values_to_codes_left} else: values_to_codes = merge_dict(values_to_codes_left, values_to_codes_right) codes_to_values = {**codes_to_values_left, **codes_to_values_right} return values_to_codes, codes_to_values
def logout_url(self, options=None): return logout_url(util.merge_dict({'id_token': self.id_token}, options or {}))
def visualize(self, iter): progress = float(iter) / self.cfg.max_iter_step lower_regions = [] pool_images, pool_states, pool_features = self.memory.records_to_images_states_features( self.memory.image_pool[:self.cfg.num_samples]) if self.cfg.supervised: gt0 = [x[1] for x in pool_images] pool_images = [x[0] for x in pool_images] else: gt0 = None lower_regions.append(pool_images) # Generated data feed_dict = merge_dict(self.fixed_feed_dict_random, { self.is_train: self.cfg.test_random_walk, self.progress: progress }) eval_images = [] eval_states = [] gt1 = self.fixed_feed_dict_random[self.ground_truth] for i in range(self.cfg.test_steps): output_images, output_states = self.sess.run( [self.fake_output, self.new_states], feed_dict=feed_dict) feed_dict[self.fake_input] = output_images feed_dict[self.states] = output_states eval_images.append(output_images) eval_states.append(output_states) best_outputs = [] best_indices = [] for i in range(self.cfg.num_samples): best_index = self.cfg.test_steps - 1 for j in range(self.cfg.test_steps): if eval_states[j][i][STATE_REWARD_DIM] > 0: best_index = j break best_image = eval_images[best_index][i] best_indices.append(best_index + 1) best_outputs.append(best_image) lower_regions.append(best_outputs) # Real data lower_regions.append(self.fixed_feed_dict_random[self.real_data]) if self.cfg.vis_draw_critic_scores: lower_regions[0] = self.draw_critic_scores( lower_regions[0], ground_truth=gt0) lower_regions[1] = self.draw_critic_scores( lower_regions[1], ground_truth=gt1) if not self.cfg.supervised: lower_regions[2] = self.draw_critic_scores(lower_regions[2]) for img, state in zip(lower_regions[0], pool_states): cv2.putText(img, str(state), (4, 33), cv2.FONT_HERSHEY_SIMPLEX, 0.25, (1.0, 0.0, 0.0)) for img, ind in zip(lower_regions[1], best_indices): cv2.putText(img, str(ind), (23, 23), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (1.0, 0.0, 0.0)) lower_regions = list(map(make_image_grid, lower_regions)) seperator = np.ones( (lower_regions[0].shape[0], 16, lower_regions[0].shape[2]), dtype=np.float32) lower_region = np.hstack([ lower_regions[0], seperator, lower_regions[1], seperator, lower_regions[2] ]) upper_region = np.ones_like(lower_region) per_row = lower_region.shape[1] // (self.generator_debugger.width + 4) # The upper part h, w = self.cfg.source_img_size, self.cfg.source_img_size images = [] debug_plots = [] gradients = [] rows = lower_region.shape[0] // (h + 2) // 3 groups_per_row = per_row // (self.cfg.test_steps + 1) per_row = (self.cfg.test_steps + 1) * groups_per_row gts = [] for j in range(min(self.cfg.num_samples, rows * groups_per_row)): if self.cfg.supervised: img_gt = self.memory.get_next_RAW(1, test=self.cfg.vis_step_test)[0][0] img, gt = img_gt[0], img_gt[1] else: img = self.memory.get_next_RAW(1)[0][0] gt = None # z is useless at test time... images_, debug_plots_, gradients_ = self.draw_steps( img, ground_truth=gt, is_train=self.cfg.test_random_walk, progress=progress) images += images_ if self.cfg.supervised: gts += [gt] * len(images_) gradients_ = [gt] * len(images_) debug_plots += debug_plots_ gradients += gradients_ if not self.cfg.supervised: gradients = self.gradient_processor(np.stack(gradients, axis=0)) pad = 0 for i in range(rows): for j in range(per_row): start_x, start_y = pad + 3 * i * (h + 2), pad + j * (w + 4) index = i * per_row + j if index < len(images): upper_region[start_x:start_x + h, start_y:start_y + w] = images[index] upper_region[start_x + h + 1:start_x + h * 2 + 1, start_y: start_y + w] = gradients[index] upper_region[start_x + 2 * (h + 1):start_x + h * 3 + 2, start_y: start_y + w] = debug_plots[index] seperator = np.ones( (16, upper_region.shape[1], upper_region.shape[2]), dtype=np.float32) upper_region = np.vstack([seperator, upper_region, seperator]) img = np.vstack([upper_region, lower_region]) if self.cfg.realtime_vis: cv2.imshow('vis', img[:, :, ::-1]) cv2.waitKey(20) if iter % self.cfg.write_image_interval == 0: fn = os.path.join(self.image_dir, '%06d.png' % iter) cv2.imwrite(fn, img[:, :, ::-1] * 255.0)
def http_headers(self): """Generate HTTP header """ return util.merge_dict( self.header, self.headers, {'PayPal-Request-Id': self.generate_request_id()})
def raw_node_dict_to_formatted_node_dict(dic: Dict): return merge_dict(dic, {'level': {}}, lambda x: next(x))
def merge_level(acc_level: Dict[str, Any], act_level: Dict[str, Any]) -> Dict[str, Any]: return modify_dict(merge_dict(acc_level, act_level, lambda x: x), {'level': lambda x: reduce(union_sets, x, {})}, lambda x: next(x))
def refresh(self, options = {}): options = util.merge_dict({ 'refresh_token': self.refresh_token }, options) tokeninfo = self.__class__.create_with_refresh_token(options) self.merge(tokeninfo.to_dict()) return self
def raw_edge_dict_to_formatted_edge_dict(dic: Dict[str, Any]): return merge_dict( del_keys(dic, {'level', 'type'}), {'level': { dic.get('type', ''): set(dict.get('level', [])) }})
def http_headers(self): return util.merge_dict(self.header, self.headers, { 'PayPal-Request-Id': self.generate_request_id() })
def logout_url(self, options = {}): options = util.merge_dict({ 'id_token': self.id_token }, options) return logout_url(options)
hashtag = textdict['text'].lower() if hashtag not in hashtags: hashtags[hashtag] = 1 else: hashtags[hashtag] += 1 if language_code not in language: language[language_code] = 1 else: language[language_code] += 1 all_hashtags = comm.gather(hashtags, root=0) all_language = comm.gather(language, root=0) print(f"allhash: {len(all_hashtags)} , alllang: {len(all_language)}") if comm_rank == 0: final_hashtags = util.merge_dict(all_hashtags) final_language = util.merge_dict(all_language) print("prepared to sort data") sorted_hashtags = sorted(final_hashtags, key=final_hashtags.get, reverse=True)[0:10] sorted_language = sorted(final_language, key=final_language.get, reverse=True)[0:10] # print out results print("Top hashtags:") for i in range(10): print( f'{i+1:2d}. #{sorted_hashtags[i]}, {final_hashtags[sorted_hashtags[i]]}'
def decls(smpls): global _decls if not _decls: declss = map(op.attrgetter("decls"), smpls) _decls = util.merge_dict(declss) return _decls
def logout_url(self, options=None, api=None): return logout_url(util.merge_dict({'id_token': self.id_token}, options or {}), api=api)
def objs(smpls): objss = map(op.attrgetter("objs"), smpls) return util.merge_dict(objss)
def userinfo(self, options = {}): options = util.merge_dict({ 'access_token': self.access_token }, options) return Userinfo.get(options)
def userinfo(self, options=None, api=None): return Userinfo.get(util.merge_dict({'access_token': self.access_token}, options or {}), api=api)
def execute(node, previous, experiment_folder): """ Execute a task defined by the given node in the experiment graph. Parameters ---------- node : Element The node to be executed. previous : dict (or list of dict) Dictionary of the experiment's running-time variables after the end of the parent node's execution. May be a list of dictionaries in the special case of a fusion node, which has more than one parent. experiment_folder : string String with the path to the experiment folder, where the files of the experiment will be saved. Returns ------- exp_param : dict The updated dictionary of the experiment's running-time variables after the node's execution. """ global execution_time global tex_path global tex_dict global openset_experiment exp_param = previous parameters = ast.literal_eval(node.get("parameters")) node_id = node.attrib['id'] #Get node name node_name = node.get('name') if node.tag == "collection": print "Collection", exp_param.keys() images, classes, extract_path, read_time = \ read_collection.main(node_name, openset_experiment, parameters, node_id) execution_time += read_time exp_param['images'] = images exp_param['classes'] = classes exp_param['extract_path'] = extract_path elif node.tag == "train_test_method": print "train_test_method", exp_param.keys() images = exp_param['images'] classes = exp_param['classes'] images, classes, train_test_list, train_test_time = \ train_test.main(images, classes, experiment_folder, node_name, parameters, openset_experiment, node_id) execution_time += train_test_time exp_param['images'] = images exp_param['classes'] = classes exp_param['train_test_list'] = train_test_list exp_param['train_test_method'] = node_name exp_param['train_test_parameters'] = parameters elif node.tag == "descriptor": print "descriptor", exp_param.keys() images = exp_param['images'] extract_path = exp_param['extract_path'] classes_keys = exp_param['classes'].keys() if node_name == "bag": train_test_list = exp_param['train_test_list'] images, extract_time = extract_bag.main(images, train_test_list, extract_path, experiment_folder, parameters, node_id) elif node_name == "bovg": train_test_list = exp_param['train_test_list'] images, extract_time = extract_bovg.main(images, train_test_list, extract_path, experiment_folder, parameters, node_id) else: images, extract_time = extract_features.main( images, classes_keys, extract_path, node_name, parameters, node_id) execution_time += extract_time exp_param['images'] = images exp_param['descriptor'] = node_name elif node.tag == "normalizer": try: manager = Manager() images = manager.dict(exp_param['images']) train_test_list = exp_param['train_test_list'] except: print "\n\tMissing Input. Exiting." sys.exit(1) norm_fv_paths, normalize_time = normalize_features.main( images, train_test_list, experiment_folder, node_name, parameters, node_id) execution_time += normalize_time del exp_param['images'] exp_param['fv_paths'] = norm_fv_paths elif node.tag == "classifier": try: classes = exp_param['classes'] train_test_list = exp_param['train_test_list'] descriptor = exp_param['descriptor'] try: fv_paths = exp_param['fv_paths'] del exp_param['fv_paths'] except: images = exp_param['images'] fv_paths = util.save_file_extract(images, train_test_list, experiment_folder) except: print "\n\tMissing Input. Exiting." sys.exit(1) images, classes_list, classify_time = classify.main( fv_paths, classes.keys(), train_test_list, experiment_folder, node_name, parameters, descriptor, node_id) execution_time += classify_time exp_param['images'] = images exp_param['classes_list'] = classes_list elif node.tag == "fusion_method": len_exp_param = len(exp_param) #list with the images dictionaries, classes dictionaries, and train and # test set list list_images = [] list_classes = [] list_train_test = [] extract_path = exp_param[INDEX_ZERO]['extract_path'] for index in range(len_exp_param): try: list_images.append(exp_param[index]['images']) except: images = {} for fv_path in exp_param[index]['fv_paths']: print "fv_path:", fv_path images_new = util.read_fv_file(fv_path) images = util.merge_dict(images, images_new) list_images.append(images) list_classes.append(exp_param[index]['classes']) #In case that it performs the fusion of collections, there is no # train_test_list try: list_train_test.append(exp_param[index]['train_test_list']) except: list_train_test.append(None) #classes_list is present only after the classification module try: classes_list = exp_param[INDEX_ZERO]['classes_list'] except: classes_list = None try: train_test_method = exp_param[INDEX_ZERO]['train_test_method'] train_test_parameters = exp_param[INDEX_ZERO][ 'train_test_parameters'] except: train_test_method = None train_test_parameters = None images, classes, train_test_list, fusion_time = \ fusion.main(list_images, list_classes, list_train_test, classes_list, experiment_folder, node_name, parameters, node_id) execution_time += fusion_time exp_param = {} exp_param['images'] = images exp_param['classes'] = classes if train_test_list is not None: exp_param['train_test_list'] = train_test_list if classes_list is not None: exp_param['classes_list'] = classes_list if train_test_method is not None: exp_param['train_test_method'] = train_test_method exp_param['train_test_parameters'] = train_test_parameters exp_param['descriptor'] = None exp_param['extract_path'] = extract_path elif node.tag == "evaluation_measure": try: images = exp_param['images'] train_test_list = exp_param['train_test_list'] classes_list = exp_param['classes_list'] except: print "\n\tMissing Input. Exiting." sys.exit(1) evaluation_time, evaluation_path = evaluation.main( images, train_test_list, classes_list, experiment_folder, node_name, parameters, node_id) execution_time += evaluation_time #Dictionaries to create the tex file train_test_method = exp_param['train_test_method'] train_test_parameters = str(exp_param['train_test_parameters']) if train_test_method not in tex_dict: tex_dict[train_test_method] = {} train_test_dict = tex_dict[train_test_method] if train_test_parameters not in train_test_dict: train_test_dict[train_test_parameters] = {} output_dict = train_test_dict[train_test_parameters] if node_name not in output_dict: output_dict[node_name] = [] list_output = [evaluation_path, classes_list[0], node_id] if list_output not in output_dict[node_name]: output_dict[node_name].append(list_output) train_test_dict[train_test_parameters] = output_dict tex_dict[train_test_method] = train_test_dict elif node.tag == "preprocessing": images = exp_param['images'] classes = exp_param['classes'] images, classes, preprocessing_time = preprocessing.main( images, classes, experiment_folder, node_name, parameters, node_id) execution_time += preprocessing_time exp_param['images'] = images exp_param['classes'] = classes else: print "Error. Unknown Tag." sys.exit(1) return exp_param
def refresh(self, options=None, api=None): options = util.merge_dict({'refresh_token': self.refresh_token}, options or {}) tokeninfo = self.__class__.create_with_refresh_token(options, api=api) self.merge(tokeninfo.to_dict()) return self
def logout_url(options = {}): options = util.merge_dict({ 'logout': 'true', 'redirect_uri': redirect_uri() }, options) return util.join_url_params(end_session_path, options)
def logout_url(options=None): options = util.merge_dict({ 'logout': 'true', 'redirect_uri': redirect_uri() }, options or {}) return session_url(end_session_path, options)
def userinfo(self, options=None): return Userinfo.get(util.merge_dict({'access_token': self.access_token}, options or {}))