def _parse(self, page: BeautifulSoup, url): seasons = OrderedDict() eqg = OrderedSet() child = page.select_one('#WikiaArticle h2') season = child.text while child.next_sibling: child = child.next_sibling if child.name == 'table': for a in child.find_all('a', string='Transcript'): if not a.has_attr('class') or 'new' not in a['class']: episode_url, fragment = urldefrag(a['href']) episode_url = urljoin(url, episode_url) if 'Equestria Girls' not in season: if season not in seasons: seasons[season] = OrderedSet() seasons[season].append(episode_url) else: eqg.append(episode_url) continue if child.name == 'h2': season = child.text continue seasons['Equestria Girls'] = eqg return seasons
def _index_tile(self, tx, ty): current_local_tile_palette = OrderedSet() logger.debug("[%s] Processing tile %d x %d", self._id, tx, ty) # Collect all colors and try to fit them in palettes for y in range(ty * self._tile_height, (ty + 1) * self._tile_height): for x in range(tx * self._tile_width, (tx + 1) * self._tile_width): cc_idx = self.img.getpixel((x, y)) cc = self.colors[cc_idx] if not self._pixels_to_ignore[y * self.img.width + x]: current_local_tile_palette.append(cc) if len(current_local_tile_palette) > self._colors_per_palette: # We don't even have to continue... This single tile already has to many colors logger.info( "[%s] Tile %d x %d contains to many colors, aborting...", self._id, tx, ty) return False # Get possible palettes of the tile possible_palettes = self._get_suitable_palettes( current_local_tile_palette) if len(possible_palettes) < 1: # No palette contains all colors... We need to create a new one possible_palettes = [len(self.palettes)] self.palettes.append(OrderedSet(current_local_tile_palette)) logger.debug("[%s] Tile %d x %d can use palettes %s", self._id, tx, ty, possible_palettes) self.palettes_for_tiles[self._tile_coord(tx, ty)] = possible_palettes return True
def send(): try: scope = ['https://www.googleapis.com/auth/youtube'] fromInet = YoutubeClient(scope).fetch_links_to_all_videos() fromFile = load_data_from_file('urls.json') queue = OrderedSet(fromInet) - OrderedSet(fromFile) ready = OrderedSet() env = config.load('config.prod.yml') bot = Bot(env['TM_TOKEN']) channel = env['CHANNEL'] for msg in queue: with suppress(TelegramError): bot.sendMessage(channel, msg, disable_notification=True) ready.append(msg) sleep(randint(2, 5)) lost = len(queue) - len(ready) save_as_json_to_file(fromFile + list(ready), 'urls.json') return f'Can\'t send {lost} of {len(queue)} videos' if lost else 'ok' except HttpError as e: print('An HTTP error %d occurred:\n%s' % (e.resp.status, e.content))
def _parse(self, page: BeautifulSoup, url): seasons = OrderedDict() eqg = OrderedSet() child = page.select_one("#WikiaArticle h2") season = child.text while child.next_sibling: child = child.next_sibling if child.name == "table": for a in child.find_all("a", string="Transcript"): if not a.has_attr("class") or "new" not in a["class"]: episode_url, fragment = urldefrag(a["href"]) episode_url = urljoin(url, episode_url) if "Equestria Girls" not in season: if season not in seasons: seasons[season] = OrderedSet() seasons[season].append(episode_url) else: eqg.append(episode_url) continue if child.name == "h2": season = child.text continue seasons["Equestria Girls"] = eqg return seasons
def upcoming_flow_questions(self): questions = OrderedSet() started = False for q in self.qset.flow_questions: if q.pk == self.pk: started = True if started: questions.append(q) return questions
def load_ordered_set(filename): """ Load a set of words from a text file, and represent them in an OrderedSet object. """ oset = OrderedSet() for line in open(filename, encoding='utf-8'): oset.append(line.rstrip('\n')) return oset
def _survey_questions(): inline_ques = self.questions_inline() questions = OrderedSet(inline_ques) survey_questions = OrderedSet() for ques in inline_ques: survey_questions.append(ques) map(lambda q: survey_questions.add(q), ques.direct_sub_questions() ) #boldly assuming subquests dont go #more than quest subquestion deep for present implemnt return survey_questions
def survey_questions(self): inline_ques = self.questions_inline() questions = OrderedSet(inline_ques) survey_questions = OrderedSet() other_flows = QuestionFlow.objects.exclude(validation_test__isnull=True, question__pk__in=[q.pk for q in inline_ques]).exclude( next_question__pk__in=[q.pk for q in inline_ques] #skip questions ) for ques in inline_ques: survey_questions.append(ques) map(lambda q: survey_questions.add(q), sub_questions(ques, other_flows)) return survey_questions
def _calculate_topological_order(self, coloring_parent_gid: Block.GlobalID, leaves: AbstractSet[Block.GlobalID], coloring: AbstractSet[Block.GlobalID], unordered: AbstractSet[Block.GlobalID]) \ -> Iterable[Block.GlobalID]: """ :param coloring_parent_gid: the coloring parent of the sub-DAG to order. :param leaves: leaves of the sub-DAG to order. :param coloring: the coloring of the sub-DAG to order. :param unordered: all the unordered blocks in the sub-DAG to order. :return: an iterable sorted according to a topological order on the input leaves and their ancestors. """ def sort_blocks(last_block_gid: Block.GlobalID, later_blocks: AbstractSet[Block.GlobalID], to_sort: AbstractSet, unsorted: AbstractSet[Block.GlobalID]) -> \ List[Block.GlobalID]: """ :return: a reversely sorted list of the blocks in to_sort. """ remaining_gids = (to_sort - {last_block_gid}) & unsorted # Sort the blue blocks blue_gids_set = remaining_gids & later_blocks blue_gids_list = sorted(blue_gids_set, reverse=True) # Sort the red blocks red_gids_list = sorted(remaining_gids - blue_gids_set, reverse=True) # last_block is the coloring parent if last_block_gid is not None: blue_gids_list.append(last_block_gid) return red_gids_list + blue_gids_list to_order = list( sort_blocks(coloring_parent_gid, coloring, leaves, unordered)) ordered = OrderedSet() while to_order: cur_gid = to_order.pop() if cur_gid in ordered: continue cur_parents = set(self._G.successors(cur_gid)) & unordered if cur_parents <= ordered: ordered.append(cur_gid) else: to_order.append(cur_gid) to_order.extend( sort_blocks( self._G.node[cur_gid][self._COLORING_PARENT_KEY], coloring, cur_parents, unordered)) return ordered
def _flow_questions(): # next line is to normalize to question set. Otherwise it seems to be causing some issues with flows # since the flow is more native to Qset. Additional attributes in subclasses are just extras qset = QuestionSet.get(id=self.id) inline_ques = qset.questions_inline() OrderedSet(inline_ques) flow_questions = OrderedSet() for ques in inline_ques: flow_questions.append(ques) # boldly assuming subquests dont go map(lambda q: flow_questions.add( q), ques.direct_sub_questions()) # more than quest subquestion deep for present implemnt return flow_questions
def __lift_ports(self): # we assume it's a rectangular grid # we only care about the perimeter x_range = {self.x_min, self.x_max} y_range = {self.y_min, self.y_max} coordinates = OrderedSet() for (x, y) in self.tile_circuits: if x in x_range or y in y_range: coordinates.append((x, y)) for x, y in coordinates: tile = self.tile_circuits[(x, y)] # we only lift sb ports sbs = tile.sbs for bit_width, switchbox in sbs.items(): all_sbs = switchbox.switchbox.get_all_sbs() working_set = [] if x == self.x_min: # we lift west/left ports for sb_node in all_sbs: if sb_node.side != SwitchBoxSide.WEST: continue working_set.append(sb_node) elif x == self.x_max: # we lift east/right ports for sb_node in all_sbs: if sb_node.side != SwitchBoxSide.EAST: continue working_set.append(sb_node) if y == self.y_min: # we lift north/top ports for sb_node in all_sbs: if sb_node.side != SwitchBoxSide.NORTH: continue working_set.append(sb_node) elif y == self.y_max: # we lift south/bottom ports for sb_node in all_sbs: if sb_node.side != SwitchBoxSide.SOUTH: continue working_set.append(sb_node) for sb_node in working_set: sb_name = create_name(str(sb_node)) sb_port = tile.ports[sb_name] # because the lifted port will conflict with each other # we need to add x and y to the sb_name to avoid conflict new_sb_name = sb_name + f"_X{sb_node.x}_Y{sb_node.y}" self.add_port(new_sb_name, sb_port.base_type()) self.__interface[new_sb_name] = sb_node self.wire(self.ports[new_sb_name], sb_port)
def load_dir(cls, dirname): """ Load an AssocSpace from a directory on disk. The returned object will be an instance of the class that you called .load_dir on. """ dirname = dirname.rstrip('/') u = np.load(dirname + '/u.npy', mmap_mode='r') sigma = np.load(dirname + '/sigma.npy') label_file = codecs.open(dirname + '/labels.txt', 'r', encoding='utf-8') labels = OrderedSet() for label in label_file: labels.append(label.rstrip('\n')) return cls(u, sigma, labels)
def send(): try: scope = ['https://www.googleapis.com/auth/youtube'] fromInet = YoutubeClient(scope).fetch_thumbnails_of_new_videos() fromFile = load_data_from_file('thumbnails.json') queue = OrderedSet(fromInet) - OrderedSet(fromFile) ready = OrderedSet() env = config.load('config.prod.yml') bot = Bot(env['TM_TOKEN']) channel = env['DEV_CHAT'] for video_id in queue: try: markup = InlineKeyboardMarkup([[ Btn(" 1️⃣ ", callback_data=f"{video_id}||1"), Btn(" 2️⃣ ", callback_data=f"{video_id}||2"), Btn(" 3️⃣ ", callback_data=f"{video_id}||3"), ]]) success = True for i in range(1, 4): photo_url = f'https://i.ytimg.com/vi_webp/{video_id}/maxres{i}.webp' err = not bot.sendSticker( channel, photo_url, timeout=20, disable_notification=True, reply_markup=markup if i == 3 else None) print(f'image {i}: ' + ('err' if err else 'ok')) success &= not err sleep(2) if success: ready.append(video_id) print(f'{video_id}: {"ok" if success else "err"}') sleep(randint(2, 5)) except TelegramError as e: bot.sendMessage(channel, e.message + "\n" + photo_url) lost = len(queue) - len(ready) save_as_json_to_file(fromFile + list(ready), 'thumbnails.json') if lost: return f'Can\'t send {lost} of {len(queue)} videos' if not queue: return 'No new videos' return 'ok' except HttpError as e: print('An HTTP error %d occurred:\n%s' % (e.resp.status, e.content))
def load_word_file(word_file): fd = os.path.expanduser(word_file) try: w = open(fd) except FileNotFoundError: print("%s does not exist. You need a wordlist." % (word_file)) sys.exit(127) # OrderedSet only accepts unique keys, ensures no duplicate words wrds = OrderedSet() for l in w: wrds.append(cleanup(l)) wrds_len = len(wrds) if wrds_len < MIN_WORDS: print("Word list too small, need at least %u, got %u." % (MIN_WORDS, wrds_len)) sys.exit(127) return wrds
def _find_empty_folders(self, root_path: Path, recursive: bool) -> [str]: """ Finds empty folders within the given root_path :param root_path: folder to search in """ result = OrderedSet() # traverse bottom-up to remove folders that are empty due to file removal for root, directories, files in os.walk(str(root_path), topdown=False): # get absolute paths of all files and folders in the current root directory abs_file_paths = list( map(lambda x: os.path.abspath(os.path.join(root, x)), files)) abs_folder_paths = list( map(lambda x: os.path.abspath(os.path.join(root, x)), directories)) # find out which of those files were deleted by the deduplication process files_deleted = list( filter( lambda x: x in self._deduplication_result. get_removed_or_moved_files(), abs_file_paths)) # find out which of them were not filtered_files = list( filter(lambda x: x not in files_deleted, abs_file_paths)) folders_deleted = list( filter(lambda x: x in result, abs_folder_paths)) filtered_directories = list( filter(lambda x: x not in folders_deleted, abs_folder_paths)) if (len(filtered_files) == 0 and (len(filtered_directories) == 0) and (len(folders_deleted) > 0 or len(files_deleted) > 0)): # check if a parent directory is already added # TODO: when running this bottom-up this check should not be necessary if len([ directory for directory in filtered_directories if directory.startswith(root) ]) == 0: result.append(root) if not recursive: break return result
def _do_set_n_point_crossover(representation, mom, dad, points, random, max_size): chunks = [] i = 0 for point in points: chunks.append(representation[i:point]) i = point chunks.append(representation[i:]) bro = OrderedSet() sis = OrderedSet() cross = True for variables in chunks: for v in variables: if v in mom: bro.append(v) if cross else sis.append(v) if v in dad: sis.append(v) if cross else bro.append(v) cross = not cross if len(bro) > max_size: bro = random.sample(bro, max_size) if len(sis) > max_size: sis = random.sample(sis, max_size) return bro, sis
def _find_empty_folders(self, root_path: Path, recursive: bool, dry_run: bool) -> [str]: """ Finds empty folders within the given root_path :param root_path: folder to search in """ result = OrderedSet() # traverse bottom-up to remove folders that are empty due to file removal for root, directories, files in os.walk(str(root_path), topdown=False): # get absolute paths of all files and folders in the current root directory abs_file_paths = list(map(lambda x: os.path.abspath(os.path.join(root, x)), files)) abs_folder_paths = list(map(lambda x: os.path.abspath(os.path.join(root, x)), directories)) # find out which of those files were deleted by the deduplication process files_deleted = list( map(lambda x: Path(x), filter( lambda x: Path(x) in self._deduplication_result.get_removed_or_moved_files(), abs_file_paths))) files_deleted = list(set(files_deleted + list( filter(lambda x: x.parent == Path(root), self._deduplication_result.get_removed_or_moved_files())))) folders_deleted = list(filter(lambda x: x in result, abs_folder_paths)) filtered_directories = list(filter(lambda x: x not in folders_deleted, abs_folder_paths)) if dry_run: if len(files_deleted) > 0 and len(files_deleted) == len(files) and len(folders_deleted) == len( directories): result.append(root) else: if len(files_deleted) > 0 and len(files) <= 0 and len(directories) <= 0: result.append(root) if not recursive: break return result
class Gemet(VocabularyBase): cl = ConfigLoader() uri = cl.getGemetAPIString('URI') getConceptByID = cl.getGemetAPIString('GET_CONCEPT_BY_ID') getRelatives = cl.getGemetAPIString('GET_RELATIVES') conceptSuffix = cl.getGemetAPIString('CONCEPT_SUFFIX') getConceptByKeyword = cl.getGemetAPIString('GET_CONCEPT_BY_KEYWORD') thesaaurusSuffix = cl.getGemetAPIString('THESAURUS_SUFFIX') searchModeSuffix = cl.getGemetAPIString('SEARCH_MODE_SUFFIX') langSuffix = cl.getGemetAPIString('LANGUAGE_SUFFIX') searchMode = 4 # from 0 to 4, 4 is automode relatedSet = OrderedSet() broaderSet = OrderedSet() narrowerSet = OrderedSet() def __init__(self, searchTerm, language): VocabularyBase.__init__(self, searchTerm, language) self.relatedSet = OrderedSet() self.broaderSet = OrderedSet() self.narrowerSet = OrderedSet() self.supportedLang.append('de') self.supportedLang.append('en') if language in self.supportedLang: for word in self.searchTerms: self.runAPICall(word, language) @retry(Exception, tries=3) def apiCall(self, word, apiLang): return requests.get(self.getConceptByKeyword + word + self.langSuffix + apiLang + self.searchModeSuffix + str(self.searchMode) + self.thesaaurusSuffix + self.uri) def runAPICall(self, word, apiLang): searchResult = self.apiCall(word, apiLang) if searchResult.status_code < 400: searchJson = searchResult.json() for category in searchJson: conceptUri = category['uri'] thisPrefLabel = self.getConceptForUri(conceptUri, apiLang) if thisPrefLabel is not None: self.relatedSet.append(utils.eszettToSS(thisPrefLabel)) relativesResult = requests.get(self.getRelatives + self.conceptSuffix + conceptUri) if relativesResult.status_code < 400: relativesJson = relativesResult.json() for relation in relativesJson: if 'related' in relation['relation']: thisAltLabel = self.getConceptForUri( relation['target'], apiLang) if thisAltLabel is not None: self.relatedSet.append( utils.eszettToSS(thisAltLabel)) if 'narrower' in relation['relation']: thisNarrowerLabel = self.getConceptForUri( relation['target'], apiLang) if thisNarrowerLabel is not None: self.narrowerSet.append( utils.eszettToSS(thisNarrowerLabel)) if 'broader' in relation['relation']: thisBroaderLabel = self.getConceptForUri( relation['target'], apiLang) if thisBroaderLabel is not None: self.broaderSet.append( utils.eszettToSS(thisBroaderLabel)) def getConceptForUri(self, gemetConceptUri, apiLang): try: conceptResult = requests.get(self.getConceptByID + self.conceptSuffix + gemetConceptUri + self.langSuffix + apiLang) if conceptResult.status_code < 400: conceptJson = conceptResult.json() if 'preferredLabel' in conceptJson: return (conceptJson['preferredLabel'])['string'] except: return None def getRelated(self): return self.relatedSet def getNarrower(self): return self.narrowerSet def getBroader(self): return self.broaderSet def checkConnection(self): response = self.apiCall('test', 'en') if response is not None and response.status_code < 400: return True return False
class Alphabet(object): def __init__(self, name, if_use_pad, if_use_unk): self.__name = name self.__if_use_pad = if_use_pad self.__if_use_unk = if_use_unk self.__index2instance = OrderedSet() self.__instance2index = OrderedDict() self.__counter = Counter() if if_use_pad: self.__sign_pad = "<PAD>" self.add_instance(self.__sign_pad) if if_use_unk: self.__sign_unk = "<UNK>" self.add_instance(self.__sign_unk) @property def name(self): return self.__name def add_instance(self, instance): if isinstance(instance, (list, tuple)): for element in instance: self.add_instance(element) return # We only support elements of str type. assert isinstance(instance, str) # count the frequency of instances. self.__counter[instance] += 1 if instance not in self.__index2instance: self.__instance2index[instance] = len(self.__index2instance) self.__index2instance.append(instance) def get_index(self, instance): if isinstance(instance, (list, tuple)): return [self.get_index(elem) for elem in instance] assert isinstance(instance, str) try: return self.__instance2index[instance] except KeyError: if self.__if_use_unk: return self.__instance2index[self.__sign_unk] else: max_freq_item = self.__counter.most_common(1)[0][0] return self.__instance2index[max_freq_item] def get_instance(self, index): if isinstance(index, list): return [self.get_instance(elem) for elem in index] return self.__index2instance[index] def save_content(self, dir_path): # Check if dir_path exists. if not os.path.exists(dir_path): os.mkdir(dir_path) list_path = os.path.join(dir_path, self.__name + "_list.txt") with open(list_path, 'w') as fw: for element, frequency in self.__counter.most_common(): fw.write(element + '\t' + str(frequency) + '\n') dict_path = os.path.join(dir_path, self.__name + "_dict.txt") with open(dict_path, 'w') as fw: for index, element in enumerate(self.__index2instance): fw.write(element + '\t' + str(index) + '\n') def __len__(self): return len(self.__index2instance) def __str__(self): return 'Alphabet {} contains about {} words: \n\t{}'.format(self.name, len(self), self.__index2instance)
class Block: """Blockchain 의 Block Transaction 들을 담아서 Peer들과 주고 받는 Block Object. """ def __init__(self, channel_name, made_block_count=0, is_divided_block=False): # Block head self.version = "0.1a" self.prev_block_hash = "" self.prev_block_confirm = False # SiEver 구현을 위한 값, AnnounceConfirmedBlock 메시지를 대체하여 다음 블럭에 투표 결과를 담아서 전송한다. self.merkle_tree_root_hash = "" self.merkle_tree = [] self.time_stamp = 0 self.__channel_name = channel_name # 검증된 트랜젝션 목록 self.confirmed_transaction_list = OrderedSet() self.block_hash = "" self.height = -1 self.block_status = BlockStatus.unconfirmed self.__block_type = BlockType.general self.peer_id = "" self.__made_block_count = made_block_count self.__is_divided_block = is_divided_block self.__next_leader_peer_id = "" self.__peer_manager = None self.__signature = b'' self.__json_data = {} self.__commit_state = {} @property def confirmed_tx_hash_list(self): return [tx.tx_hash for tx in self.confirmed_transaction_list] @property def confirmed_tx_len(self): return len(self.confirmed_transaction_list) def get_confirmed_tx_hash_by_index(self, index): return self.confirmed_transaction_list[index].tx_hash def get_confirmed_tx_by_index(self, index): return self.confirmed_transaction_list[index] @property def commit_state(self): return self.__commit_state @commit_state.setter def commit_state(self, commit_state: dict): self.__commit_state = commit_state @property def json_data(self): return self.__json_data def get_json_data(self) -> str: self.__json_data = { "version": self.version, "prev_block_hash": self.prev_block_hash, "merkle_tree_root_hash": self.merkle_tree_root_hash, "time_stamp": self.time_stamp, "confirmed_transaction_list": [tx.icx_origin_data for tx in self.confirmed_transaction_list], "block_hash": self.block_hash, "height": self.height, "peer_id": self.peer_id, "signature": base64.b64encode(self.signature).decode(), "commit_state": self.__commit_state } return json.dumps(self.__json_data) def get_json_data_for_genesis(self) -> str: self.__json_data = { "version": self.version, "prev_block_hash": self.prev_block_hash, "merkle_tree_root_hash": self.merkle_tree_root_hash, "time_stamp": self.time_stamp, "confirmed_transaction_list": [tx.genesis_origin_data for tx in self.confirmed_transaction_list], "block_hash": self.block_hash, "height": self.height, "peer_id": self.peer_id, "signature": base64.b64encode(self.signature).decode(), "commit_state": self.__commit_state } return json.dumps(self.__json_data) @property def channel_name(self): return self.__channel_name @property def block_type(self): return self.__block_type @block_type.setter def block_type(self, block_type): if block_type is not BlockType.general: self.__made_block_count -= 1 self.__block_type = block_type @property def made_block_count(self): return self.__made_block_count @property def is_divided_block(self): return self.__is_divided_block @is_divided_block.setter def is_divided_block(self, value): self.__is_divided_block = value @property def signature(self): return self.__signature @property def next_leader_peer(self): return self.__next_leader_peer_id @next_leader_peer.setter def next_leader_peer(self, peer_id): self.__next_leader_peer_id = peer_id @property def peer_manager(self): return self.__peer_manager @peer_manager.setter def peer_manager(self, peer_manager): self.__peer_manager = peer_manager def put_transaction(self, tx, do_validate=True): """It's only available on leader. :param tx: transaction (a transaction or list) :param do_validate: set False while making test block :return: True: If success. """ if type(tx) is list: result = True for t in tx: result &= self.put_transaction(t) return result elif not isinstance(tx, Transaction): logging.error( f"Not a type of Transaction, its type is: {type(tx)}") return False tx_validator = get_tx_validator(self.channel_name) if do_validate and not tx_validator.validate(tx): return False tx.status = TransactionStatus.confirmed self.confirmed_transaction_list.append(tx) return True def put_genesis_transaction(self, tx): """Block Generator 에서만 사용한다. tx는 단수 혹은 여러개 일 수 있다 :param tx: transaction (transaction을 담고 있는 list도 처리 가능) :return: True: 성공적으로 담겼을 때. """ if type(tx) is list: result = True for t in tx: result &= self.put_genesis_transaction(t) return result elif not isinstance(tx, Transaction): logging.error( f"Not a type of Transaction, its type is: {type(tx)}") return False if tx.status == TransactionStatus.unconfirmed: # transaction 검증 # logging.debug("Transaction Hash %s", tx.tx_hash) genesis_validator = get_genesis_tx_validator(self.channel_name) if not genesis_validator.validate(tx): return False tx.status = TransactionStatus.confirmed self.confirmed_transaction_list.append(tx) return True @staticmethod def __calculate_merkle_tree_root_hash(block): """현재 들어온 Tx들만 가지고 Hash tree를 구성해서 merkle tree root hash 계산. :return: 계산된 root hash """ # 머클트리 생성 # 일단 해당 블럭에 홀수개의 트랜잭션이 있으면 마지막 트랜잭션의 Hash를 복사하여 넣어줍니다. # 바로 앞의 HASH(n) + HASH(n+1) 을 해싱해 줍니다. # 1개가 나올때까지 반복 합니다. # 마지막 1개가 merkle_tree_root_hash block.merkle_tree_root_hash = '' mt_list = block.confirmed_tx_hash_list # block.merkle_tree.extend(mt_list) while True: tree_length = len(mt_list) tmp_mt_list = [] if tree_length <= 1: # 0이나 1은 종료 break elif tree_length % 2 == 1: mt_list.append(mt_list[tree_length - 1]) tree_length += 1 # 머클해쉬 생성 for row in range(int(tree_length / 2)): idx = row * 2 mk_sum = b''.join([ mt_list[idx].encode(encoding='UTF-8'), mt_list[idx + 1].encode(encoding='UTF-8') ]) mk_hash = hashlib.sha256(mk_sum).hexdigest() tmp_mt_list.append(mk_hash) mt_list = tmp_mt_list # block.merkle_tree.extend(mt_list) if len(mt_list) == 1: block.merkle_tree_root_hash = mt_list[0] return block.merkle_tree_root_hash def serialize_block(self) -> bytes: """블럭 Class serialize Pickle 을 사용하여 serialize 함 :return: serialize 결과 """ if conf.CHANNEL_OPTION[ self.channel_name]["send_tx_type"] == conf.SendTxType.icx: if self.height == 0: json_data = self.get_json_data_for_genesis() else: json_data = self.get_json_data() return json_data.encode('utf-8') else: return pickle.dumps(self, pickle.DEFAULT_PROTOCOL) def deserialize_block(self, block_dumps): """블럭 Class deserialize 자기자신을 block_dumps의 data로 변환함 :param block_dumps: deserialize 할 Block dump data """ if conf.CHANNEL_OPTION[ self.channel_name]["send_tx_type"] == conf.SendTxType.icx: dump_obj = json.loads(block_dumps) self.version = dump_obj['version'] self.prev_block_hash = dump_obj['prev_block_hash'] self.merkle_tree_root_hash = dump_obj['merkle_tree_root_hash'] self.time_stamp = dump_obj['time_stamp'] self.height = dump_obj['height'] if self.height == 0: validator = get_genesis_tx_validator(self.channel_name) self.confirmed_transaction_list = [] else: validator = get_tx_validator(self.channel_name) for tx_json in dump_obj['confirmed_transaction_list']: tx = validator.restore(json.dumps(tx_json)) self.confirmed_transaction_list.append(tx) self.block_hash = dump_obj['block_hash'] self.peer_id = dump_obj['peer_id'] self.__signature = base64.b64decode( dump_obj['signature'].encode('UTF-8')) self.__commit_state = dump_obj[ 'commit_state'] if 'commit_state' in dump_obj else self.__commit_state self.block_status = BlockStatus.confirmed else: dump_obj = pickle.loads(block_dumps) if type(dump_obj) == Block: self.__dict__ = dump_obj.__dict__ def find_transaction_index(self, transaction_hash): for idx, tx in enumerate(self.confirmed_transaction_list): if tx.tx_hash == transaction_hash: return idx return -1 def find_tx_by_hash(self, tx_hash): for tx in self.confirmed_transaction_list: if tx.tx_hash == tx_hash: return tx return None # index = self.confirmed_tx_hash_list.index(tx_hash) # return self.confirmed_transaction_list[index] @staticmethod def validate(block) -> bool: """validate block and all transactions in block :param: block :param: tx_queue :return validate success return true """ channel_service = ObjectManager().channel_service mk_hash_old = block.merkle_tree_root_hash mk_hash = Block.__calculate_merkle_tree_root_hash(block) if block.height == 0 and block.confirmed_tx_len == 0: # Genesis Block 은 검증하지 않습니다. return True if block.confirmed_tx_len > 0: # 머클트리 검증은 Tx가 있을때에만 합니다. if mk_hash != mk_hash_old: raise BlockInValidError('Merkle Tree Root hash is not same') if block.block_hash != Block.__generate_hash(block): raise BlockInValidError('block Hash is not same generate hash') leader = channel_service.peer_manager.get_leader_object() if not leader.cert_verifier.verify_hash(block.block_hash, block.signature): raise BlockInValidError('block signature invalid') if block.time_stamp == 0: raise BlockError('block time stamp is 0') if len(block.prev_block_hash) == 0: raise BlockError('Prev Block Hash not Exist') # Transaction Validate confirmed_tx_list = [] tx_validator = get_tx_validator(block.channel_name) for tx in block.confirmed_transaction_list: if tx_validator.validate(tx): confirmed_tx_list.append(tx.tx_hash) else: raise BlockInValidError( f"block ({block.block_hash}) validate fails \n" f"tx {tx.tx_hash} is invalid") if not block.tx_validate_hash_unique(confirmed_tx_list): raise BlockInValidError('There is duplicated tx_hash') return True def tx_validate_hash_unique(self, confirmed_tx_list): block_manager = ObjectManager().channel_service.block_manager for confirmed_tx_hash in confirmed_tx_list: tx = block_manager.get_tx(confirmed_tx_hash) if tx is not None: logging.warning( f"block:tx_validate_hash_unique There is duplicated tx_hash({confirmed_tx_hash})" ) return False return True def verify_through_score_invoke(self, is_leader: bool = False): # Block에 속한 tx목록을 순회하면서 Invoke 실행 is_verified = True invoke_results = {} if ObjectManager().channel_service is None: # all results to success success_result = dict(code=int(message_code.Response.success)) invoke_results = util.create_invoke_result_specific_case( self.confirmed_transaction_list, success_result) else: try: origin_commit_state = copy.deepcopy(self.commit_state) invoke_results = ObjectManager().channel_service.score_invoke( self) if is_leader: # set commit state as a leader while do nothing, block commit_state set by score_invoke util.logger.spam( f"verify_through_score_invoke commit_state({self.commit_state})" ) else: # verify commit state with leader's(origin_commit_state) # this block must have leader's commit state if origin_commit_state != self.commit_state: logging.warning( f"block:verify_through_score_invoke fail commit state integrity!!" ) is_verified = False else: util.logger.spam( f"verify_through_score_invoke commit state verified." ) # peer have to restore origin_commit_state. # And when receive block confirm message check again origin and peer's commit state. self.commit_state = copy.deepcopy(origin_commit_state) except Exception as e: # When Grpc Connection Raise Exception # save all result{'code': ScoreResponse.SCORE_CONTAINER_EXCEPTION, 'message': str(e)} logging.error( f'This error occurred while Score_invoke has failed in verify block : {e}' ) invoke_results = {} # util.logger.spam(f'Block::verify_through_score_invoke >>>>> invoke_results :: {invoke_results}') need_rebuild = False if not conf.USE_EXTERNAL_SCORE: fail_list = [ tx_hash for tx_hash, invoke_result in invoke_results.items() if invoke_result["code"] != message_code.Response.success ] need_rebuild = len(fail_list) > 0 if is_leader: if need_rebuild: for tx_hash in fail_list: tx = self.find_tx_by_hash(tx_hash) self.confirmed_transaction_list.discard(tx) is_verified = self.confirmed_tx_len > 0 elif conf.ALLOW_MAKE_EMPTY_BLOCK and not need_rebuild: is_verified = True else: is_verified = not need_rebuild return is_verified, need_rebuild, invoke_results def generate_block(self, prev_block=None): """블럭을 생성한다 \n 이전블럭을 입력하지 않으면, 제네시스 블럭으로 생성됨 이전블럭을 입력하면 링킹된 블럭으로 생성됨 블럭 높이와 이전 블럭 hash, 현재블럭의 hash계산, 머클트리 계산을 실행함 :param prev_block: 이전 블럭 :returns: 생성된 블럭 해쉬 값 """ try: util.logger.spam( f"ENGINE-303 generate_block prev_block: {prev_block.height} {prev_block.block_hash}" ) except Exception: pass if prev_block is None: # Genesis Block Data self.prev_block_hash = "" self.height = 0 self.time_stamp = 0 elif self.time_stamp == 0: if self.prev_block_hash == "": self.prev_block_hash = prev_block.block_hash self.height = prev_block.height + 1 self.time_stamp = util.get_time_stamp() # 트랜잭션이 있을 경우 머클트리 생성 if self.confirmed_tx_len > 0: Block.__calculate_merkle_tree_root_hash(self) self.block_hash = Block.__generate_hash(self) return self.block_hash @staticmethod def __generate_hash(block): """Block Hash 생성 \n HashData 1. 트랜잭션 머클트리 2. 타임스태프 3. 이전블럭 해쉬 :return: 블럭 해쉬값 """ # 자기 블럭에 대한 해쉬 생성 # 자기 자신의 블럭해쉬는 블럭 생성후 추가되기 직전에 생성함 # transaction(s), time_stamp, prev_block_hash block_hash_data = b''.join([ block.prev_block_hash.encode(encoding='UTF-8'), block.merkle_tree_root_hash.encode(encoding='UTF-8'), struct.pack('Q', block.time_stamp) ]) if conf.CHANNEL_OPTION[ block.channel_name]["send_tx_type"] == conf.SendTxType.icx: block_hash = hashlib.sha3_256(block_hash_data).hexdigest() else: block_hash = hashlib.sha256(block_hash_data).hexdigest() return block_hash def mk_merkle_proof(self, index): """Block안의 merkle tree에서 index 번째 Transaction이 merkle tree root를 구성하기 위한 나머지 node들의 hash값을 가져온다 (BITCOIN 머클트리 검증 proof 응용) :param index: Merkle tree안의 index 번째 Transaction. :return: 머클트리 검증 데이타 (transactiontransaction, siblingssiblings, blockblock) * transaction: block안의 index번째 transaction의 hash * siblings: 검증하기 위한 node들의 hash들. * block: 원래는 block header인데 따로 빼질 않아서 self를 return. """ nodes = [ tx.tx_hash.encode(encoding='UTF-8') for tx in self.confirmed_transaction_list ] if len(nodes) % 2 and len(nodes) > 2: nodes.append(nodes[-1]) layers = [nodes] while len(nodes) > 1: new_nodes = [] for i in range(0, len(nodes) - 1, 2): new_nodes.append( hashlib.sha256(b''.join( [nodes[i], nodes[i + 1]])).hexdigest().encode(encoding='UTF-8')) if len(new_nodes) % 2 and len(new_nodes) > 2: new_nodes.append(new_nodes[-1]) nodes = new_nodes layers.append(nodes) # Sanity check, make sure merkle root is valid # assert nodes[0][::-1] == self.merkle_tree_root_hash merkle_siblings = [ layers[i][(index >> i) ^ 1] for i in range(len(layers) - 1) ] return { "transaction": self.get_confirmed_tx_hash_by_index(index), "siblings": [x.decode('utf-8') for x in merkle_siblings], "block": self } @staticmethod def merkle_path(block, index): """머클트리 검증 주어진 block에서 index 번째 transaction을 merkle tree를 계산해서 검증 transaction 의 index값을 바탕으로 검증함 :param block: 검증할 transaction이 있는 block. :param index: block안의 index 번째 transaction :return: True : 검증 완료 """ header = {} proof = block.mk_merkle_proof(index) header['merkle_root'] = block.merkle_tree_root_hash siblings = proof['siblings'] logging.debug("SLBLINGS : %s", siblings) target_tx = block.get_confirmed_tx_hash_by_index(index) # siblings = map( lambda x: x.decode('hex'), siblings) siblings = [x.encode(encoding='UTF-8') for x in siblings] resulthash = target_tx.encode(encoding='UTF-8') for i in range(len(siblings)): _proof = siblings[i] # 0 means sibling is on the right; 1 means left if index % 2 == 1: left = _proof right = resulthash else: left = resulthash right = _proof resulthash = hashlib.sha256(b''.join( [left, right])).hexdigest().encode(encoding='UTF-8') # logging.debug("%i st, %s %s => %s ", index, left, right, resulthash) index = int(index / 2) logging.debug('PROOF RESULT: %s , MK ROOT: %s', resulthash, block.merkle_tree_root_hash) return resulthash == block.merkle_tree_root_hash.encode( encoding='UTF-8') def sign(self, peer_auth): self.__signature = peer_auth.sign_data(self.block_hash, is_hash=True)
class CategorizedListbox(Frame): #AKA CLB def __init__(self, parent, listview): Frame.__init__(self, parent) self.root = parent #creates the list that will house all controlled ModlistListboxes self.modlists = [] self.selected_modlists = OrderedSet([]) self.listview = listview self.current_index = 0 self.grid_columnconfigure(0, weight=1) #checks for whether a pre-made CLB was input if len(self.modlists) <= 0: #Creates the default category 'Mods' when a new CLB is created self.insert(0, 'Mods') else: #Populates CLB with modlists given self.modlists = modlists def split(self, name, modlist_index, first, Last=None): '''Creates a new mod listbox with the mods of its original mod listbox based on the given indices, or an empty mod listbox''' modlist = self.modlists[modlist_index] if Last is None: pass def load(self, named_modlists): for i in range(len(self.modlists)): self.delete(0, force_delete=True) for named_modlist in named_modlists: self.insert(END, named_modlist[0], named_modlist[1]) def get_mod_count(self, colors): n = 0 for modlist in self.modlists: for mod in modlist.modlabel_list: if mod.color not in colors: n += 1 ## n += len(modlist.modlabel_list) return 'Total Modcount: ' + str(n) def insert(self, index, name, modlist_info=None, is_collapsed=False): '''Create and insert a new modlist at the given modlist index''' modlist = ModlistListbox(self, self.listview, name) #if modlist info given, populate modlistbox with mods if modlist_info is not None and modlist_info != []: for i in range(len(modlist_info)): modlist.insert(i, modlist_info[i]) #collapse mods if necessary if is_collapsed: modlist.force_collapse() #check last index values and set index accordingly if index == END or index >= len(self.modlists): index = len(self.modlists) #insert modlist into modlists list self.modlists.insert(index, modlist) #move modlists down if after inserted modlist if len(self.modlists) > index: for x in range(index, len(self.modlists)): self.modlists[x].grid(row=x, column=0, sticky='nsew') else: modlist.grid(row=len(self.mod_list), column=0, sticky='nsew') #Set modlist name label size self.update() def insert_input(self, index): if index == END: index = len(self.modlists) name = askstring('New Category at Index ' + str(index + 1), 'Name of new category:') if name is not None and name != '': self.insert(index, name) def merge_up(self, index): '''Merge the modlist at the given index with the modlist above it''' msgBox = messagebox.askquestion('Merge Categories Confirmation', 'Merge "' + self.modlists[index].name + '" into "' + self.modlists[index - 1].name + '"?', icon='warning') if msgBox == 'yes': #populate list of mod info lists to add to and from, then get name l1 = self.modlists[index - 1].get_all_info() l2 = self.modlists[index].get_all_info() l_name = self.modlists[index - 1].get_name() #insert new merged modlist self.insert(index - 1, l_name, l1 + l2) #delete both previous modlists for x in range(2): self.delete(index) def merge_down(self, index): '''Merge the modlist at the given index with the modlist below it''' msgBox = messagebox.askquestion('Merge Categories Confirmation', 'Merge "' + self.modlists[index].name + '" into "' + self.modlists[index + 1].name + '"?', icon='warning') if msgBox == 'yes': #populate list of mod info lists to add to and from, then get name l1 = self.modlists[index + 1].get_all_info() l2 = self.modlists[index].get_all_info() l_name = self.modlists[index + 1].get_name() #insert new merged modlist self.insert(index, l_name, l1 + l2) #delete both previous modlists for x in range(2): self.delete(index + 1) def delete_selected(self): '''delete all selected modlists''' if len(self.selected_modlists) == len(self.modlists): messagebox.showinfo('Selection Size Too Large', 'You cannot delete all the categories in the ' 'list. There must always be at least one.', icon='warning') elif len(self.selected_modlists) > 0: msgBox = messagebox.askquestion('Removing Selected Categories', 'Remove all selected categories ' 'and their contents?', icon='warning') if msgBox == 'yes': for x in range(len(self.selected_modlists)): self.delete(self.modlists.index(self.selected_modlists[0])) def delete_confirm(self, index): '''Add a confirmation to delete commands''' msgBox = messagebox.askquestion('Removing Category', 'Remove the "' + self.modlists[index].name + '" Category and its contents?', icon='warning') if msgBox == 'yes': self.delete(index) def delete(self, index, force_delete=False): '''Delete a modlist at the given index''' if not force_delete and len(self.modlists) == 1: messagebox.showinfo( 'Prohibited Action', 'You must always have at least one category in the list.') else: if index == END: index = len(self.mod_list) if index < len(self.modlists) - 1: for x in range(index, len(self.modlists) - 1): self.modlists[x + 1].grid(row=x, column=0, sticky='nsew') if self.modlists[index] in self.selected_modlists: self.selected_modlists.remove(self.modlists[index]) self.modlists[index].grid_forget() self.modlists[index].destroy() del self.modlists[index] def delete_mod(self, modlist_index, mod_index): '''Delete a mod at the given indices''' mod = self.modlists[modlist_index].modlabel_list[mod_index] msgBox = messagebox.askquestion('Removing Mod', 'Remove "{}"?'.format( mod.get_info()[1]), icon='warning') if msgBox == 'yes': self.modlists[modlist_index].delete(mod_index) def delete_selected_mod(self, event=None): selection_exists = False for modlist in self.modlists: if len(modlist.selected_modlabel_list) > 0: selection_exists = True if selection_exists: msgBox = messagebox.askquestion( 'Removing Selected', 'Remove selected mods from the list?', icon='warning') if msgBox == 'yes': for modlist in self.modlists: modlist.delete_selected() def delete_all_mods(self): msgBox = messagebox.askquestion('Removing All', 'Remove all mods from the list?', icon='warning') if msgBox == 'yes': for modlist in self.modlists: modlist.delete_all() def delete_all_cat(self, modlist_index): '''Delete all mods in a category at the given index''' modlist = self.modlists[modlist_index] msgBox = messagebox.askquestion('Removing All', 'Remove all mods from the "' + modlist.name + '" Category?', icon='warning') if msgBox == 'yes': modlist.delete_all() def collapse_all(self): '''Collapses all mod listboxes''' for mod in self.modlists: if not mod.is_collapsed: mod.force_collapse() def expand_all(self): for mod in self.modlists: if mod.is_collapsed: mod.force_expand() def get_info(self): '''Gets a list of lists of mods throughout ALL modlists''' list = [] for modlist in self.modlists: list.append(modlist.get_all_info()) return list def get_all_info(self): '''Gets a list of lists of all modlists''' return self.modlists def rename(self, index): '''Rename a category at the given index by remaking the category''' name = askstring('Rename Category at Index ' + str(index + 1), 'New name of category:') if name is not None and name != '': data = self.modlists[index].get_all_info() is_collapsed = self.modlists[index].is_collapsed self.delete(index, force_delete=True) self.insert(index, name, data) self.modlists[index].forceSelectTop() if is_collapsed: self.modlists[index].force_collapse() #====passed or modified modlist functions==== def onShiftClickEvent(self, event): if len(self.selected_modlists) > 0: #set original index to start multi-selection from origin = self.modlists.index(self.selected_modlists[-1]) for x in range(len(self.modlists)): #checks every modlist for a valid multi-selection activation if self.modlists[x].is_top_entered: #checks whether the index of the target modlists is above #or below origin, then multi-selects accordingly if (x - origin) > 0: for y in range(origin, x + 1): self.selected_modlists.append(self.modlists[y]) self.modlists[y].forceSelectTop() elif (x - origin) < 0: for y in range(x, origin): self.selected_modlists.append(self.modlists[y]) self.modlists[y].forceSelectTop() else: for modlist in self.modlists: modlist.onShiftClickEvent(event) def dragSelection(self, event): '''Moves selected mods depending on mouse movement, and moves mods into and out of categories they are moved into and out of''' for modlist in self.modlists: modlist.dragSelection(event) def moveInto(self, direction, modlist): '''Depending on the direction, move the selected mods from the modlist into the modlist below or above it''' modlist_index = self.modlists.index(modlist) if direction == -1 and modlist_index != 0: #Move up for mod in sorted(modlist.selected_modlabel_list, key=lambda x: x.get_index()): self.modlists[modlist_index - 1].insert(END, mod.get_info()) #messy code to make the mod in the new category selected new_upper_mod = self.modlists[modlist_index - 1].modlabel_list[-1] new_upper_mod.force_select() self.modlists[modlist_index - 1].selected_modlabel_list.append(new_upper_mod) selected_list_len = len(modlist.selected_modlabel_list) for i in range(selected_list_len): modlist.delete(0) modlist.selected_modlabel_list.clear() self.modlists[modlist_index - 1].force_expand() elif direction == 1 and modlist_index != len(self.modlists) - 1: #Move down for mod in sorted(modlist.selected_modlabel_list, key=lambda x: x.get_index(), reverse=True): self.modlists[modlist_index + 1].insert(0, mod.get_info()) #messy code to make the mod in the new category selected new_lower_mod = self.modlists[modlist_index + 1].modlabel_list[0] new_lower_mod.force_select() self.modlists[modlist_index + 1].selected_modlabel_list.append(new_lower_mod) selected_list_len = len(modlist.selected_modlabel_list) for i in range(selected_list_len): modlist.delete(END) modlist.selected_modlabel_list.clear() self.modlists[modlist_index + 1].force_expand() def moveSelectionUp(self, event=None): focused_widget = self.master.master.focus_get() if event is not None and type(focused_widget) in [ Entry, Text ] and focused_widget.cget('state') == 'normal': return else: top_selected = False if len(self.selected_modlists) > 0: for modlist in self.modlists: if modlist.is_top_selected: top_selected = True if top_selected: sorted_selected_modlists = sorted( self.selected_modlists, key=lambda x: self.modlists.index(x)) if sorted_selected_modlists[-1] == self.modlists[0]: return for modlist in sorted_selected_modlists: modlist_index = self.modlists.index(modlist) list_to_move = self.modlists[modlist_index - 1].get_all_info() list_to_move_name = self.modlists[modlist_index - 1].get_name() list_to_move_is_collapsed = self.modlists[modlist_index - 1].is_collapsed self.delete(modlist_index - 1) self.insert(modlist_index, list_to_move_name, list_to_move, list_to_move_is_collapsed) #Collapse the category moved if it was collapsed if list_to_move_is_collapsed: self.modlists[modlist_index].force_collapse() else: for modlist in self.modlists: n = 0 n = modlist.moveSelectionUp() if n == -1: self.moveInto(n, modlist) def moveSelectionDown(self, event=None): focused_widget = self.master.master.focus_get() if event is not None and type(focused_widget) in [ Entry, Text ] and focused_widget.cget('state') == 'normal': return else: top_selected = False if len(self.selected_modlists) > 0: for modlist in self.modlists: if modlist.is_top_selected: top_selected = True if top_selected: sorted_selected_modlists = sorted( self.selected_modlists, key=lambda x: self.modlists.index(x)) if sorted_selected_modlists[-1] == self.modlists[-1]: return for modlist in sorted_selected_modlists: modlist_index = self.modlists.index(modlist) list_to_move = self.modlists[modlist_index + 1].get_all_info() list_to_move_name = self.modlists[modlist_index + 1].get_name() list_to_move_is_collapsed = self.modlists[modlist_index + 1].is_collapsed self.delete(self.modlists.index(modlist) + 1) self.insert(modlist_index, list_to_move_name, list_to_move, list_to_move_is_collapsed) #Collapse the category moved if it was collapsed if list_to_move_is_collapsed: self.modlists[modlist_index].force_collapse() else: for modlist in self.modlists: n = 0 n = modlist.moveSelectionDown() if n == 1: self.moveInto(n, modlist) return def onClickEvent(self, event): '''When the player clicks, control whether categories should be selected''' deselect_others = True #if clicked mod is already part of selection, prevents the deselection of other mods for x in range(len(self.modlists)): if self.modlists[x].is_top_entered and self.modlists[ x].is_top_selected: deselect_others = False if deselect_others: for x in range(len(self.modlists)): #Controls the selection of category names modlist = self.modlists[x] modlist.selectTop() if modlist.is_top_selected and modlist not in self.selected_modlists: self.current_index = x self.selected_modlists.append(modlist) elif not modlist.is_top_selected and modlist in self.selected_modlists: self.selected_modlists.remove(modlist) for modlist in self.modlists: modlist.onClickEvent(event) def selectAll(self): for modlist in self.modlists: modlist.forceDeselectTop() modlist.selectAll() self.selected_modlists.clear() def insert_mod(self, modlist_index, mod_index): self.modlists[modlist_index].insertInput(mod_index) def insert_custom_mod(self, modlist_index, mod_index): self.modlists[modlist_index].insertCustomInput(mod_index) def batch_insert_mod(self, modlist_index, mod_index): l = [] LinkGrabber(self, l, nexus=True) if len(l) == 1 and l[0] == False: messagebox.showinfo( 'No Valid Data Found', 'Either none of the ' 'links provided were valid Nexus mod links, ' 'or the Nexus web server is currently unava' 'ilable.') else: for info in reversed(l): self.modlists[modlist_index].insert(mod_index, info) def move_mod_to(self, modlist_index, target_modlist): modlist = self.modlists[modlist_index] for mod in sorted(modlist.selected_modlabel_list, key=lambda x: x.get_index()): target_modlist.insert(END, mod.get_info()) modlist.delete_selected() def rightClickMenu(self, event, rc_menu): ## #Select proper categories ## for i in self.modlists: ## i.selectTop() self.onClickEvent(event) #Initialize submenus colors_menu = Menu(self.master.master, tearoff=0) remove_menu = Menu(self.master.master, tearoff=0) merge_menu = Menu(self.master.master, tearoff=0) select_menu = Menu(self.master.master, tearoff=0) links_menu = Menu(self.master.master, tearoff=0) move_menu = Menu(self.master.master, tearoff=0) #Get clicked indices and modlist modlist_index = self.dynamic_nearest() mod_index = self._get_clicked_mod_index(modlist_index) modlist = self.modlists[modlist_index] #General modlist commands rc_menu.add_command( label='Insert Nexus Mod Here...', command=lambda: self.insert_mod(modlist_index, mod_index)) rc_menu.add_command( label='Insert Multiple Nexus Mods Here...', command=lambda: self.batch_insert_mod(modlist_index, mod_index)) rc_menu.add_command( label='Insert Non-Nexus Mod Here...', command=lambda: self.insert_custom_mod(modlist_index, mod_index)) y = self._get_clicked_cat_index(modlist_index) rc_menu.add_command(label='Insert Category Here...', command=lambda: self.insert_input(y)) rc_menu.add_command(label='Insert Category At End...', command=lambda: self.insert_input(END)) #Move options rc_menu.add_separator() rc_menu.add_cascade(label='Move Selected Mods To...', menu=move_menu) if len(modlist.modlabel_list) > 0 and \ len(modlist.selected_modlabel_list) > 0: for ml in self.modlists: move_menu.add_command(label=ml.name, command=lambda ml=ml: self.move_mod_to( \ modlist_index,ml)) if ml == modlist: move_menu.entryconfig(ml.name, state='disabled') #Color options if len(modlist.modlabel_list) > 0: rc_menu.add_separator() rc_menu.add_cascade(label="Change Selected Mods' Color To...", menu=colors_menu) colors_menu.add_command(label='Default', command=lambda: \ self.update_selected_colors('#383838')) colors_menu.add_command(label='Red', command=lambda: \ self.update_selected_colors('red')) colors_menu.add_command(label='Blue', command=lambda: \ self.update_selected_colors('blue')) colors_menu.add_command(label='Green', command=lambda: \ self.update_selected_colors('green')) colors_menu.add_command(label='Yellow', command=lambda: \ self.update_selected_colors('yellow')) rc_menu.add_separator() #incompatibilities commands rc_menu.add_command(label='Manage Incompatibilities...', command=lambda: \ self.manage_incomp(modlist_index, mod_index)) if len(self.modlists[modlist_index].modlabel_list[mod_index]. conflicts) > 0: rc_menu.add_command(label='View Conflicts', command=lambda: self.view_conflicts( \ modlist_index, mod_index)) rc_menu.add_separator() rc_menu.add_command(label='Rename Category', command=lambda: self.rename(y)) #Link options rc_menu.add_separator() rc_menu.add_command( label='Copy Mod Link', command=lambda: self.copyURL(modlist_index, mod_index)) rc_menu.add_cascade(label='Open Links...', menu=links_menu) links_menu.add_command(label='Open Selected Mod Links', command=self.open_selected) links_menu.add_command(label='Open All Mod Links in Category Here', command=lambda x=modlist_index: self.openAll(x)) #Selection options rc_menu.add_separator() rc_menu.add_cascade(label='Select...', menu=select_menu) select_menu.add_command( label='Select Here', command=lambda: modlist.rightClickSelect(mod_index)) select_menu.add_command(label='Select All Mods in Category Here', command=modlist.selectAll) select_menu.add_command(label='Select All Mods', command=self.selectAll) #Merge options rc_menu.add_separator() rc_menu.add_cascade(label='Merge Category...', menu=merge_menu) merge_menu.add_command(label='Merge Category Here Into Upper', command=lambda: self.merge_up(modlist_index)) merge_menu.add_command(label='Merge Category Here Into Lower', command=lambda: self.merge_down(modlist_index)) if modlist_index == 0: merge_menu.entryconfig('Merge Category Here Into Upper', state='disabled') if modlist_index == len(self.modlists) - 1: merge_menu.entryconfig('Merge Category Here Into Lower', state='disabled') #Removal options rc_menu.add_separator() rc_menu.add_cascade(label='Remove...', menu=remove_menu) remove_menu.add_command( label='Remove Mod Here', command=lambda: self.delete_mod(modlist_index, mod_index)) remove_menu.add_command(label='Remove Selected Mods', command=self.delete_selected_mod) remove_menu.add_command( label='Remove All In Category', command=lambda: self.delete_all_cat(modlist_index)) remove_menu.add_command( label='Remove Category Here', command=lambda: self.delete_confirm(modlist_index)) remove_menu.add_command(label='Remove Selected Categories', command=lambda: self.delete_selected()) remove_menu.add_command(label='Remove All Mods', command=self.delete_all_mods) #Disables the appropriate menu options if len(modlist.modlabel_list) == 0: remove_menu.entryconfig('Remove Mod Here', state='disabled') remove_menu.entryconfig('Remove All In Category', state='disabled') select_menu.entryconfig('Select Here', state='disabled') select_menu.entryconfig('Select All Mods in Category Here', state='disabled') links_menu.entryconfig('Open All Mod Links in Category Here', state='disabled') if len(self.selected_modlists) == 0: remove_menu.entryconfig('Remove Selected Categories', state='disabled') #Selects and deselects appropriate mods and categories i = 0 for modlist in self.modlists: i += len(modlist.selected_modlabel_list) ## modlist.onClickEvent(event) modlist.rightClickMenu(event, rc_menu) if i == 0: links_menu.entryconfig('Open Selected Mod Links', state='disabled') def view_conflicts(self, modlist_index, mod_index): conflicts = self.modlists[modlist_index].modlabel_list[ mod_index].conflicts ConflictListbox(self, conflicts) def copyURL(self, modlist_index, mod_index): self.master.master.clipboard_clear() self.master.master.clipboard_append( self.modlists[modlist_index].modlabel_list[mod_index].get_info() [0]) def openAll(self, modlist_index): msgBox = messagebox.askquestion('Opening All Mod Links', 'Open all mod links in the "' + self.modlists[modlist_index].name + '" category in your default browser?', icon='warning') if msgBox == 'yes': self.modlists[modlist_index].open_all_links() def open_selected(self): for modlist in self.modlists: modlist.open_selected_links() def update_color(self, modlist_index, mod_index, color, state='normal'): '''Update a single mod's label color''' self.modlists[modlist_index].modlabel_list[ \ mod_index].update_color(color,state) def update_selected_colors(self, color, state='normal'): for modlist in self.modlists: for mod in modlist.selected_modlabel_list: mod.update_color(color, state) def manage_incomp(self, modlist_index, mod_index): l = self.modlists[modlist_index].modlabel_list[ mod_index].incompatibilities IncompatibilityManager(self, l) def _get_clicked_mod_index(self, modlist_index): '''return updated index if mouse is below the last mod in a given list''' modlist = self.modlists[modlist_index] mod_index = modlist.nearest() if modlist.listview: height = modlist.listview_height else: height = modlist.defaultview_height mouse_y = modlist.mlb_frame.winfo_pointery( ) - modlist.mlb_frame.winfo_rooty() if len(modlist.modlabel_list) > 1 and (height * mod_index + height) < mouse_y: return mod_index + 1 else: return mod_index def _get_clicked_cat_index(self, modlist_index): '''return updated index if mouse is below the last category''' modlist = self.modlists[modlist_index] height = modlist.winfo_height() mouse_y = self.winfo_pointery() - self.winfo_rooty() if (modlist.winfo_y() + modlist.winfo_height()) < mouse_y: return modlist_index + 1 else: return modlist_index def onDoubleClickEvent(self, event): for modlist in self.modlists: modlist.onDoubleClickEvent(event) def toggle_view(self): for modlist in self.modlists: modlist.toggle_view() def dynamic_nearest(self): '''get index of mod listbox nearest to the mouse y position. designed to work with widgets of variable sizes''' index = 0 current_nearest_index = 0 #get the absolute position of the mouse in relation to the ModlistListbox position mouse_y = self.winfo_pointery() - self.winfo_rooty() if len(self.modlists) > 1: #initialize y_maps, a list of 2-lengthed lists that store the #start and end y values of each modlist y_maps = [] for i in range(len(self.modlists)): #populate heights modlist = self.modlists[i] #Set y-extending values if i == 0: base = 0 else: base = y_maps[i - 1][1] if modlist.listview: mod_height = modlist.listview_height * len( modlist.modlabel_list) else: mod_height = modlist.defaultview_height * len( modlist.modlabel_list) #set start and end values if modlist.is_collapsed: y_maps.append([base, base + modlist.name_height]) else: y_maps.append( [base, base + modlist.name_height + mod_height]) for i in range(len(y_maps)): #find the index within the y mappings if y_maps[i][0] <= mouse_y < y_maps[i][1]: index = i return index
class Taurus(_SuperBike): """ Questa classe prende instaza dell'antenna in modalita' SERVER, conserva i pacchetti ricevuti in __memoize e si occupa dell'invio di pacchetti verso il CLIENT (bici) code --> codice con cui viene identif. nei pacchetti address --> indirizzo dell'antenna client server --> instanza dell'antenna server """ def __init__(self, code, address, xbee_port=PORT, server=None, secret_key=None): if not server: server = Server(port=xbee_port) if secret_key: Packet.secret_key = secret_key super().__init__(code, address, server) # inserisce l'istanza corrente # nei listener dell'antenna del server self.transmitter.listener = self # colleziona i pacchetti mandati al frontend # per visualizzarli al reload della pagina con # soluzione di continuita' self._history = OrderedSet() # memorizza un pacchetto # ricevuto per ogni tipo self._memoize = dict() def __str__(self): return f'{self.code} -- {self.address}' @property def history(self): return list(self._history) @property def data(self): data = self._memoize.get(Packet.Type.DATA) if data: self._history.append(data.jsonify) return data.jsonify if data else {} @property def state(self): state = self._memoize.get(Packet.Type.STATE) return state.jsonify if state else {} @property def setting(self): sett = self._memoize.get(Packet.Type.SETTING) return sett.jsonify if sett else {} @property def notice(self): notice = self._memoize.get(Packet.Type.NOTICE) return notice.jsonify if notice else {} # DIREZIONE: bici --> server def receive(self, packet): if not isinstance(packet, Packet): raise PacketInstanceException self._memoize.update({packet.tipo: packet})
class Bike(_SuperBike): """ Questa classe prende instaza dell'antenna in modalita' CLIENT, conserva i pacchetti ricevuti in __memoize e si occupa dell'invio di pacchetti verso il SERVER (marta) code --> codice con cui viene identif. nei pacchetti address --> indirizzo dell'antenna server client --> instanza dell'antenna client """ def __init__(self, code, address, client=None, sensors=None, secret_key=None): if not client: client = Client() if secret_key: Packet.secret_key = secret_key super().__init__(code, address, client) # memorizza le instanze dei valori utili self._sensors = sensors # inserisce l'instanza corrente # come client dell'antenna self.transmitter.bike = self # memorizza tutti i pacchetti ricevuti self._memoize = OrderedSet() def __len__(self): return len(self._memoize) def __str__(self): return f'{self.code} -- {self.transmitter.address}' @property def packets(self): return self._memoize @property def sensors(self): return self._sensors # DIREZIONE: bici -> server def blind_send(self, packet): if not isinstance(packet, Packet): raise PacketInstanceException self.send(packet) def send_data(self, d): if not isinstance(d, dict): raise InvalidInstanceException data = {'dest': self.code, 'type': Packet.Type.DATA} data.update(d) self.send(data) # NOTE: probabilmente da deprecare def send_state(self, s): if not isinstance(s, dict): raise InvalidInstanceException state = {'dest': self.code, 'type': Packet.Type.STATE} state.update(s) self.send(state) def send_setting(self, s): if not isinstance(s, dict): raise InvalidInstanceException settings = {'dest': self.code, 'type': Packet.Type.SETTING} settings.update(s) self.send(settings) # TODO: Aggiungere pacchetto NOTICE # DIREZIONE: server --> bici def receive(self, packet): if not isinstance(packet, Packet): raise PacketInstanceException self._memoize.append(packet)
class SeqIndexerBase(object): """ Storage and serialization a set of elements. """ def __init__(self, name, if_use_pad, if_use_unk): self.__name = name self.__if_use_pad = if_use_pad self.__if_use_unk = if_use_unk self.__index2instance = OrderedSet() self.__instance2index = OrderedDict() # Counter Object record the frequency # of element occurs in raw text. self.__counter = Counter() if if_use_pad: self.__sign_pad = "<PAD>" self.add_instance(self.__sign_pad) if if_use_unk: self.__sign_unk = "<UNK>" self.add_instance(self.__sign_unk) @property def name(self): return self.__name def add_instance(self, instance): """ Add instances to alphabet. 1, We support any iterative data structure which contains elements of str type. 2, We will count added instances that will influence the serialization of unknown instance. :param instance: is given instance or a list of it. """ if isinstance(instance, (list, tuple)): for element in instance: self.add_instance(element) return # We only support elements of str type. assert isinstance(instance, str) # count the frequency of instances. self.__counter[instance] += 1 if instance not in self.__index2instance: self.__instance2index[instance] = len(self.__index2instance) self.__index2instance.append(instance) def get_index(self, instance): """ Serialize given instance and return. For unknown words, the return index of alphabet depends on variable self.__use_unk: 1, If True, then return the index of "<UNK>"; 2, If False, then return the index of the element that hold max frequency in training data. :param instance: is given instance or a list of it. :return: is the serialization of query instance. """ if isinstance(instance, (list, tuple)): return [self.get_index(elem) for elem in instance] assert isinstance(instance, str) try: return self.__instance2index[instance] except KeyError: if self.__if_use_unk: return self.__instance2index[self.__sign_unk] else: max_freq_item = self.__counter.most_common(1)[0][0] return self.__instance2index[max_freq_item] def get_instance(self, index): """ Get corresponding instance of query index. if index is invalid, then throws exception. :param index: is query index, possibly iterable. :return: is corresponding instance. """ if isinstance(index, list): return [self.get_instance(elem) for elem in index] return self.__index2instance[index] def save_content(self, dir_path): """ Save the content of alphabet to files. There are two kinds of saved files: 1, The first is a list file, elements are sorted by the frequency of occurrence. 2, The second is a dictionary file, elements are sorted by it serialized index. :param dir_path: is the directory path to save object. """ # Check if dir_path exists. if not os.path.exists(dir_path): os.mkdir(dir_path) list_path = os.path.join(dir_path, self.__name + "_list.txt") with open(list_path, 'w', encoding='utf-8') as fw: for element, frequency in self.__counter.most_common(): fw.write(element + '\t' + str(frequency) + '\n') dict_path = os.path.join(dir_path, self.__name + "_dict.txt") with open(dict_path, 'w', encoding='utf-8') as fw: for index, element in enumerate(self.__index2instance): fw.write(element + '\t' + str(index) + '\n') def __len__(self): return len(self.__index2instance) def __str__(self): return 'Alphabet {} contains about {} words: \n\t{}'.format( self.name, len(self), self.__index2instance)
# continue # else: # # no action can be taken this turn # continue step = steps[0] step = step.split(" ") prereq_step = step[1] next_step = step[7] # start the tree if len(tree) == 0: prereq = prereq_lookup.get(prereq_step, None) if meets_requirements(prereq, tree): tree.append(prereq_step) available.append(next_step) # Correct route - use the alphabet first, check against prerequisites import string alphabet = list(string.ascii_uppercase) for letter in alphabet: if letter not in tree: available.append(letter) available = OrderedSet(sorted(available)) while len(available) != 0: for available_step in available: prereq = prereq_lookup.get(available_step, None) if meets_requirements(prereq, tree) and available_step not in tree: tree.append(available_step)
class SeqIndexerBase(object): """ Storage and serialization a set of elements. """ def __init__(self, name, if_use_pad, if_use_unk): self.__name = name self.__if_use_pad = if_use_pad self.__if_use_unk = if_use_unk self.__index2instance = OrderedSet() self.__instance2index = OrderedDict() # Counter Object record the frequency # of element occurs in raw text. self.__counter = Counter() if if_use_pad: self.__sign_pad = "<PAD>" self.add_instance(self.__sign_pad) if if_use_unk: self.__sign_unk = "<UNK>" self.add_instance(self.__sign_unk) @property def name(self): return self.__name def add_instance(self, instance): """ Add instances to alphabet. 1, We support any iterative data structure which contains elements of str type. 2, We will count added instances that will influence the serialization of unknown instance. :param instance: is given instance or a list of it. """ if isinstance(instance, (list, tuple)): for element in instance: self.add_instance(element) return # We only support elements of str type. assert isinstance(instance, str) # count the frequency of instances. self.__counter[instance] += 1 if instance not in self.__index2instance: self.__instance2index[instance] = len(self.__index2instance) self.__index2instance.append(instance) def get_index(self, instance): """ Serialize given instance and return. For unknown words, the return index of alphabet depends on variable self.__use_unk: 1, If True, then return the index of "<UNK>"; 2, If False, then return the index of the element that hold max frequency in training data. :param instance: is given instance or a list of it. :return: is the serialization of query instance. """ if isinstance(instance, (list, tuple)): return [self.get_index(elem) for elem in instance] assert isinstance(instance, str) try: return self.__instance2index[instance] except KeyError: if self.__if_use_unk: return self.__instance2index[self.__sign_unk] else: max_freq_item = self.__counter.most_common(1)[0][0] return self.__instance2index[max_freq_item] def get_instance(self, index): """ Get corresponding instance of query index. if index is invalid, then throws exception. :param index: is query index, possibly iterable. :return: is corresponding instance. """ if isinstance(index, list): return [self.get_instance(elem) for elem in index] return self.__index2instance[index] def save_content(self, dir_path): """ Save the content of alphabet to files. There are two kinds of saved files: 1, The first is a list file, elements are sorted by the frequency of occurrence. 2, The second is a dictionary file, elements are sorted by it serialized index. :param dir_path: is the directory path to save object. """ # Check if dir_path exists. if not os.path.exists(dir_path): os.mkdir(dir_path) list_path = os.path.join(dir_path, self.__name + "_list.txt") with open(list_path, 'w') as fw: for element, frequency in self.__counter.most_common(): fw.write(element + '\t' + str(frequency) + '\n') dict_path = os.path.join(dir_path, self.__name + "_dict.txt") with open(dict_path, 'w') as fw: for index, element in enumerate(self.__index2instance): fw.write(element + '\t' + str(index) + '\n') def add_padding_tensor(self, texts, digital=False, gpu=-1): len_list = [len(text) for text in texts] max_len = max(len_list) if not digital: texts = self.get_index(texts) trans_texts= [] mask = [] for index in range(len(len_list)): trans_texts.append(deepcopy(texts[index])) mask.append([1] * len_list[index]) mask[-1].extend([0] * (max_len - len_list[index])) trans_texts[-1].extend([0] * (max_len - len_list[index])) trans_texts = torch.LongTensor(trans_texts) mask = torch.LongTensor(mask) len_list = torch.LongTensor(len_list) if gpu >= 0 : trans_texts = trans_texts.cuda(device=gpu) mask = mask.cuda(device=gpu) len_list = len_list.cuda(device=gpu) return trans_texts, len_list, mask def idx2tensor(self, indexes, gpu): indexes = torch.LongTensor(indexes) if gpu >= 0: indexes = indexes.cuda(device=gpu) return indexes def instance2tensor(self, words, gpu): words = self.get_index(words) return self.idx2tensor(words, gpu) def tensor2idx(self, tensor, len_list:torch.Tensor): len_list = len_list.tolist() tensor = tensor.tolist() for i, x in enumerate(len_list): tensor[i] = tensor[i][:x] return tensor def __len__(self): return len(self.__index2instance) def __str__(self): return 'Alphabet {} contains about {} words: \n\t{}'.format(self.name, len(self), self.__index2instance)
def main(): arguments = docopt(__doc__) bundle = arguments["BUNDLE"] benchmark = arguments["--benchmark"] arguments["DIR"] = \ os.path.expandvars(os.path.expanduser(arguments.get("DIR") or '.')) arguments["LOCATION"] = \ os.path.expandvars(os.path.expanduser( arguments.get("LOCATION") or '~/.ssh/id_rsa.pub')) colorama.init(autoreset=True) if debug: banner("BEGIN ARGUMENTS") pprint(arguments) banner("END ARGUMENTS") WARNING = "WARNING WARNING WARNING WARNING WARNING" # # FIND ALL GIT REPOS IN cwd # global repos repos["all"] = get_all_repos() # # FIND ALL GIT REPOS that start with cloudmesh # bundles = arguments["BUNDLES"] def _get_bundles(): repositories = [] bundles = arguments["BUNDLES"] for bundle in bundles: check_for_bundle(bundle) repositories += repos[bundle] return repositories repos["cloudmesh"] = [] for repo in repos["all"]: if repo.startswith("cloudmesh-"): repos["cloudmesh"].append(repo) if arguments["version"]: print(insatller_version) elif arguments[ "list"] and not arguments["BUNDLE"] and not arguments["--git"]: if not arguments["--short"]: banner("Cloudmesh Bundles") block = "" for bundle in repos: block = block + bundle_elements(bundle) print(block) else: print(bundle_list(repos)) elif arguments["list"] and arguments["--git"]: check_for_bundle(bundle) print(bundle) banner(f" {bundle}") for entry in repos[bundle]: location = Git.url(entry) print(f"{location}.git") elif arguments["list"] and arguments["BUNDLE"]: bundle = arguments["BUNDLE"] if bundle in repos: print(bundle_elements(bundle)) else: print(f"ERROR: could not find bundle {bundle}") print("Available bundles: ") print(" ".join(repos.keys())) return "" elif arguments["info"]: verbose = arguments["--verbose"] native = hasattr(sys, 'real_prefix') executable = sys.executable if native: banner(WARNING, c=Fore.RED) print() RED("You are likely not running in a venv. " "Please remember that for " "development purposes we recommend you run in a venv. " "Please consult with our handbook on how to set one up") print() print("We found python in:") print(executable) print() print(70 * '-') print() # print("info") # packages = ["cloudmesh-common", "cloudmesh-cmd5", "cloudmesh-cloud"] bundle = arguments["BUNDLE"] or "cms" data = [["Package", "Git", "Pypi", "Installed"]] packages = repos[bundle] for package in packages: undefined = Fore.RED + "not found" + Style.RESET_ALL entry = [ package, undefined, # "git": undefined, # "PYPI" undefined, # "installed" ] if verbose: print(f"\nVersion -> {package}") # # GIT # try: v = requests.get( "https://raw.githubusercontent.com/cloudmesh" "/{package}/master/VERSION".format(package=package)).text entry[1] = v except: v = "ERROR: can not find git version" finally: if '404' in v: v = "ERROR: can not find git version" if verbose: print("... Github Version ->", v) # # PYPI # try: v = requests.get("https://pypi.org/project/{package}/".format( package=package)).text pat_str = '(.*)<h1 class="package-header__name">(.+?)</h1>(.*)' pattern = re.compile(pat_str, re.M | re.I | re.S) groups = re.match(pattern, v) # print (groups) v = (groups.group(2)).strip().split(package)[1].strip() entry[2] = v except: v = "ERROR: can not find pypi version" data.append(entry) if verbose: print("... Pypi Version ->", v) # # INSTALLED # try: installed = run(f"pip freeze | grep {package}", verbose=False).strip() entry[3] = installed except: installed = "ERROR: can not find installed version" if verbose: print("... Installed Version ->", installed) if verbose: print(70 * "-") print() print(tabulate(data, headers="firstrow")) print() elif arguments["status"] and arguments["git"]: repositories = _get_bundles() Git.status(repositories) elif arguments["clone"] and arguments["git"]: repositories = _get_bundles() result = Git.clone(repositories) elif arguments["pull"] and arguments["git"]: repositories = _get_bundles() Git.pull(repositories) elif arguments["key"] and arguments["git"]: try: location = arguments["LOCATION"] print("Key location:", location) if not location.endswith(".pub"): ERROR("make sure you specify a public key") sys.exit(1) key_contents = open(location).read() print() print("Make sure you copy the content between the lines to github") print(70 * "-") print(key_contents.strip()) print(70 * "-") print( "Please copy the content now, so you can use it in the browser." ) print() if yn_question( "would you like to open a web page to github to upload the key (yes/n)? " ): webbrowser.open_new("https://github.com/settings/keys") if yn_question("Is the key missing (yes/n)? "): print("Paste the key in the next window and submit.") webbrowser.open_new("https://github.com/settings/ssh/new") except: print(" you must have a key and upload it to github.") print("To generate the key use ssh-keygen") print("To avoid typing in the password all the time, use ssh-add") elif arguments["get"] or arguments["update"]: repositories = _get_bundles() Git.get(repositories) # if benchmark: # StopWatch.benchmark(sysinfo=True) elif arguments["install"]: banner(f"Installing bundles: {bundles}") repositories = OrderedSet(_get_bundles()) print('\n'.join(repositories)) print() if arguments["--venv"]: result = Git.install(repositories) else: result = Git.install(repositories, dev=True) StopWatch.benchmark(sysinfo=True) elif arguments["release"]: testing = "TESTING" in os.environ if testing: del os.environ["TESTING"] os.system("pip install twine") repos = arguments["REPOS"] repositories = [] for repository in repos: repositories.append(f"cloudmesh-{repository}") banner(f"Releasing repositories: {repositories}") print('\n'.join(repositories)) print() result = Git._command(repositories, "make patch") result = Git._command(repositories, "make release") StopWatch.status("make patch", "released") StopWatch.status("make release", "released") StopWatch.benchmark(sysinfo=True) Git.version(repositories) if testing: os.environ["TESTING"] = "1" elif arguments["--venv"] and arguments["clean"]: environment = arguments["--venv"] print() banner(WARNING, c=Fore.RED) RED( textwrap.dedent(""" Please notice that executing this command can do harm to your installation. Make sure that you also check your .bashrc, .bash_profile or .zprofile files as appropriately to remove aliasses or path variables pointing to your venv.""")) print() print(70 * '-') banner(f"Removing {environment}") print(70 * '-') print() commands = [f'rm -rf "~/{environment}"'] print("\n".join(commands)) print() if arguments["--force"] and \ yn_question("Would you like us to execute them (yes/n)? "): print(70 * '-') for command in commands: print("Executing:", command) print() os.system(command) print(70 * '-') print() elif arguments["clean"] and arguments["--dir"]: dryrun = not arguments['--force'] directory = arguments["--dir"] eggs = list(Path(directory).glob("**/cloudmesh*egg*")) if dryrun: banner("Dryrun directory clean") for egg in eggs: print(f" found -> {egg}") else: print() banner(WARNING, c=Fore.RED) RED( textwrap.dedent(""" Please notice that executing this command can do harm to your installation. If you delete files with this command it is on your own risk. The deletion may have bad effects on your python environment. So please only use it if you know what it effects. """)) print() for egg in eggs: print(f" found -> {egg}") print() if not yn_question( Fore.RED + f"WARNING: Removing listed files. Do you really want to continue. yes/n)? " ): sys.exit(1) for egg in eggs: remove(egg) elif arguments["new"]: venv = arguments["VENV"] or os.path.basename( os.environ("VIRTUAL_ENV")) or "~/ENV3" if os.path.basename(venv).startswith("ENV") and yn_question( f"Would you like reinstall the venv {venv} (yes/n)? "): script = textwrap.dedent(f""" rm -rf {venv} python3.8 -m venv {venv} source {venv}/bin/activate pip install pip -U which python which pip python --version pip --version pip install cloudmesh-installer """).strip() script = "; ".join(script.splitlines()) os.system(script) if bundles: print() print("Installing Bundles") print() repositories = _get_bundles() Git.get(repositories) print() print("You can add the following to your .bashrc or .bash_profile" or ".zprofile") print() print(" source ~/ENV3/bin/activate") print()
class Altervista(VocabularyBase): apiKeys = [ 'WLNmjWebV5RMaqVjDk8b', 'QGqbbwnP9lRMW35ZwJcV', 'RTRtk7E5eB3WYg9NWwEE' ] # 5000 queries a day per key splitChar = '|' cl = ConfigLoader() apiStr = cl.getAltervistaAPIString('API_CALL') keySuffix = cl.getAltervistaAPIString('KEY_SUFFIX') langSuffix = cl.getAltervistaAPIString('LANGUAGE_SUFFIX') relatedSet = OrderedSet() broaderSet = OrderedSet() narrowerSet = OrderedSet() def __init__(self, searchTerm, language): VocabularyBase.__init__(self, searchTerm, language) self.relatedSet = OrderedSet() self.broaderSet = OrderedSet() self.narrowerSet = OrderedSet() self.supportedLang.append('de') self.supportedLang.append('en') if language in self.supportedLang: apiLang = None if language == 'en': apiLang = 'en_US' elif language == 'de': apiLang = 'de_DE' for word in self.searchTerms: self.runAPICall(word, apiLang) @retry(Exception, tries=3) def apiCall(self, word, apiLang): key = 0 result = requests.get(self.apiStr + word + self.keySuffix + self.apiKeys[key] + self.langSuffix + apiLang) while (result.status_code == 403 and key < len(self.apiKeys) ): # Forbidden 403: No permission, or key over rate limit key = key + 1 result = requests.get(self.apiStr + word + self.keySuffix + self.apiKeys[key] + self.langSuffix + apiLang) return result def runAPICall(self, word, apiLang): result = self.apiCall(word, apiLang) if result.status_code < 400: # found some terms resultJson = result.json() response = resultJson['response'] for responseList in response: lists = responseList['list'] categoryString = lists['synonyms'] splitArray = categoryString.split(self.splitChar) self.fillToSets(splitArray) def fillToSets(self, splitArray): for term in splitArray: term = term.replace(' (similar term)', '') term = term.replace(' (related term)', '') term = term.replace(' (umgangssprachlich)', '') term = term.replace(' (derb)', '') term = term.replace(' (fachsprachlich)', '') if not '(antonym)' in term and not '(Antonym)' in term: if ' (Oberbegriff)' in term: term = term.replace(' (Oberbegriff)', '') self.broaderSet.append(utils.eszettToSS(term)) elif ' (Unterbegriff)' in term: term = term.replace(' (Unterbegriff)', '') self.narrowerSet.append(utils.eszettToSS(term)) else: self.relatedSet.append(utils.eszettToSS(term)) def getRelated(self): return self.relatedSet def getNarrower(self): return self.narrowerSet def getBroader(self): return self.broaderSet def checkConnection(self): response = self.apiCall('Test', 'de_DE') if response is not None and response.status_code < 400: return True return False
class BaseDeployment(object): name = 'Base Deployment' def __init__(self, fleet_client, service_name, tag, unit_file=None): self.fleet = fleet_client self.service_name = service_name self.tag = tag self.plans = list() self.units = OrderedSet() self.chunking_count = 1 # default self.desired_units = 0 if unit_file is None: # load service template from fleet self.unit_template = self.fleet.get_unit("%[email protected]" % self.service_name) else: self.unit_template = unit_file.read() def __str__(self): return "<Base Deployment Object: (%s plans) (%s units)>" % (len(self.plans), len(self.units)) @property def current_unit_count(self): return len(self.units) @property def unit_count_difference(self): return self.desired_units - self.current_unit_count @property def full_service_name(self): return "%s-%s" % (self.service_name, self.tag) def load(self, instances): """ Run logic and API calls to setup Units """ if instances is None: # assume desired is current state self.desired_units = self.current_unit_count else: self.desired_units = instances # Load unit state from cluster, set desired instances. # find relevant units that exist in the cluster for u in self.fleet.list_units(): if u['name'].startswith(self.service_name + '-'): if u['name'] != "%[email protected]" % self.service_name: # Exclude service templates unit = Unit(u['name'], u['currentState']) self.units.append(unit) # mark excess units for destruction i = 0 while i > self.unit_count_difference: self.units[i].required_action = 'destroy' i -= 1 # define the creation of new units here i = 0 spawn = list() while i < self.unit_count_difference: i += 1 spawn.append(Unit(self.get_unit_name(self.current_unit_count+i), 'uncreated', 'spawn')) for s in spawn: self.units.append(s) if self.current_unit_count == 0: raise Exception('No units found') def get_unit_name(self, idx): return "%s-%s@%s.service" % (self.service_name, self.tag, idx) def update_chunking(self, chunking, chunking_percent): # update chunking_count based on our parameters and the number of units if chunking_percent is not None: if self.unit_count_difference < 0: # we are destroying some, exclude from the chunking calculation units_to_deploy = self.current_unit_count + self.unit_count_difference else: units_to_deploy = self.current_unit_count self.chunking_count = int(math.ceil((float(units_to_deploy)*(float(chunking_percent)/float(100))))) else: if chunking is not None: self.chunking_count = chunking if chunking > self.current_unit_count: raise click.UsageError('--chunking cannot be greater than --instances.') def create_plans(self): i = 0 while i < self.current_unit_count: plan = Plan(self.fleet, self.service_name, self.full_service_name, self.unit_template) from_idx = i to_idx = i + self.chunking_count if to_idx > self.current_unit_count: to_idx = self.current_unit_count for unit in self.units[from_idx:to_idx]: if unit.required_action == 'spawn': plan.steps.append(Step(unit.name, 'spawn')) if unit.required_action == 'redeploy': plan.steps.append(Step(unit.name, 'stop')) for unit in self.units[from_idx:to_idx]: if unit.required_action == 'redeploy': plan.steps.append(Step(unit.name, 'start')) if unit.required_action == 'destroy': plan.steps.append(Step(unit.name, 'destroy')) i = to_idx self.plans.append(plan) def describe_plans(self): # # let us know what will be done, if anything # if self.unit_count_difference > 0: # click.echo("Insufficient units found. %s will be spawned." % self.unit_count_difference) # if self.unit_count_difference < 0: # click.echo("Excess units found. %s will be destroyed." % abs(self.unit_count_difference)) output = list() output.append("*** %s Deployment Plan ***" % self.name) output.append("==> Details") for u in self.units: output.append("Unit: %s (%s)." % (u.name, u.state)) output.append("Chunking: %s units" % self.chunking_count) output.append("==> Deployment Plan") stage_idx = 1 step_idx = 1 for plan in self.plans: output.append("==> Stage %s" % stage_idx) stage_idx += 1 for step in plan.steps: output.append("Step %s: %s" % (step_idx, step)) step_idx += 1 return output def run_plans(self): for plan in self.plans: plan.run() click.echo("Finished.")
class ModlistListbox(Frame): def __init__(self, parent, listview=False, name='Mods'): Frame.__init__(self, parent, bg='#2d2d2d') self.mod_list = [] self.modlabel_list = [] self.selected_modlabel_list = OrderedSet([]) self.listview = listview self.listview_height = 29 self.defaultview_height = 68 self.name_height = 35 self.current_index = None #category selection variables self.is_top_entered = False self.is_top_selected = False #collapse variable self.is_collapsed = False #enter variable self.is_entered_all = False #create mod listbox frame self.mlb_frame = Frame(self) #naming initalization self.name = name self.name_label = TagLabel(self, bordercolor='#666666', size=12, font=('roboto', 16, 'bold'), text=self.name, bg='#444444', fg='#f0f0f0', borderwidth=4, relief='flat') #limit the minimum size of name label if self.name_label.label.winfo_reqwidth() < 205: self.name_label.label.configure(width=15) #enter and leave events self.name_label.bind('<Enter>', self.on_enter_top) self.name_label.bind('<Leave>', self.on_leave_top) self.bind('<Enter>', self.on_enter_all) self.bind('<Leave>', self.on_leave_all) #lay out the name frame and mod listbox frame self.name_label.pack(side='top', fill='y', anchor='sw', padx=10) self.mlb_frame.pack(side='bottom', fill='both', expand=True) self.mlb_frame.grid_columnconfigure(0, weight=1) self.update_idletasks() def update_name(self, name): self.name = name ## self.name_label.update_text(name) #resizing a pre-existing TagLabel isn't working when changing text #so just make a new one 4Head new_name = TagLabel(self, bordercolor='#666666', size=12, font=('roboto', 16, 'bold'), text=name, bg='#444444', fg='#f0f0f0', borderwidth=4, relief='flat') self.name_label.pack_forget() self.name_label.destroy() self.name_label = new_name self.name_label.pack(side='top', fill='y', anchor='sw', padx=10) if self.name_label.label.winfo_reqwidth() < 205: self.name_label.label.configure(width=15) def get_name(self): return self.name def toggle_collapse(self): '''Collapses or expands the mod listbox''' if len(self.modlabel_list) > 0: if self.is_collapsed: self.force_expand() else: self.force_collapse() self.update_idletasks() def force_collapse(self): if len(self.modlabel_list) > 0: for mod in self.modlabel_list: mod.grid_remove() self.mlb_frame.configure(height=1) self.is_collapsed = True self.name_label.configure(bg='white') def force_expand(self): if len(self.modlabel_list) > 0: for mod in self.modlabel_list: mod.grid() self.is_collapsed = False self.name_label.configure(bg='#666666') def insert(self, index, info): '''inserts a mod at the given index, and with the given info''' if index == END or index > len(self.mod_list): index = len(self.mod_list) mod_label = ModLabel(self.mlb_frame, info=info, index=index, listview=self.listview) #Try to update the new indices. Fails if from an older mod try: mod_label.update_color(info[6]) mod_label.incompatibilities = info[7] except IndexError: pass self.mod_list.insert(index, info) self.modlabel_list.insert(index, mod_label) if len(self.mod_list) > index: for x in range(index, len(self.mod_list)): self.modlabel_list[x].update_index(x) self.modlabel_list[x].grid(row=x, column=0, sticky='nsew') else: mod_label.grid(row=len(self.mod_list), column=0, sticky='nsew') if self.is_collapsed: self.force_expand() def delete(self, index): '''Delete a mod at the given index''' if index == END: index = len(self.mod_list) - 1 if index < len(self.mod_list) - 1: for x in range(index, len(self.mod_list) - 1): self.modlabel_list[x + 1].grid(row=x, column=0, sticky='nsew') self.modlabel_list[x + 1].update_index(x) if self.modlabel_list[index] in self.selected_modlabel_list: self.selected_modlabel_list.remove(self.modlabel_list[index]) self.modlabel_list[index].grid_forget() self.modlabel_list[index].destroy() del self.mod_list[index] del self.modlabel_list[index] if len(self.modlabel_list) == 0: self.mlb_frame.configure(height=1) if self.is_collapsed: self.force_expand() #Quick fix for a category not properly expanding if last mod deleted self.is_collapsed = False self.name_label.configure(bg='#666666') def delete_selected(self): for x in range(len(self.selected_modlabel_list)): self.delete(self.selected_modlabel_list[0].get_index()) def delete_all_confirm(self): msgBox = messagebox.askquestion('Removing All', 'Remove all mods from the ' + self.name + ' Category?', icon='warning') if msgBox == 'yes': self.delete_all() def delete_all(self): '''deletes all mods from the list''' for x in range(len(self.modlabel_list)): self.delete(0) #Quick fix for a category not properly expanding if last mod deleted self.is_collapsed = False self.name_label.configure(bg='#666666') def open_link(self, modlabel): webbrowser.open_new(modlabel.get_info()[0]) def open_selected_links(self): for mod in sorted(self.selected_modlabel_list, key=lambda x: x.get_index()): self.open_link(mod) def open_all_links(self): for mod in self.modlabel_list: self.open_link(mod) def get_size(self): return len(self.modlabel_list) def get_all_info(self): '''return a list of lists of all mod info''' l = [] for mod in self.modlabel_list: l.append(mod.get_info()) return l def get_info(self, index): '''return a list of the mod info at the given index''' return self.modlabel_list[index].get_info() def get_height(self): return len(self.modlabel_list) def toggle_view(self): for mod in self.modlabel_list: mod.toggle_view() self.listview = not self.listview def force_listview(self): for mod in self.modlabel_list: mod.display_listview() self.listview = True def force_defaultview(self): for mod in self.modlabel_list: mod.display_default() self.listview = False def on_enter_top(self, event): '''event to determine if the list's category title is focused''' self.is_top_entered = True def on_leave_top(self, event): self.is_top_entered = False def on_enter_all(self, event): self.is_entered_all = True def on_leave_all(self, event): self.is_entered_all = False #====Right Click Menu Functionality def rightClickMenu(self, event, rc_menu): for mod in self.modlabel_list: if mod.is_focused: #If the mod clicked is not selected, select it if mod not in self.selected_modlabel_list: self.onClickEvent(event) ## rc_menu.add_command(label='Select', ## command=lambda mod=mod: self.rightClickSelect(mod)) ## rc_menu.add_command(label='Select All Mods in "'+self.name+'" Category', ## command=self.selectAll) ## rc_menu.add_separator() ## rc_menu.add_separator() ## rc_menu.add_command(label='Remove', ## command=lambda mod=mod: self.delete(mod.get_index())) ## if len(self.selected_modlabel_list) > 0: ## rc_menu.add_command(label='Remove Selected', ## command=self.delete_selected) ## if len(self.modlabel_list) > 0: ## rc_menu.add_command(label='Remove All Mods In "'+self.name+'" Category', ## command=self.delete_all_confirm) def insertInput(self, index): url = askstring('New Mod at Index ' + str(index + 1), 'Input Nexus URL') info = None if (url is not None): info = ParseURL.parse_nexus_url(url) if info is not None: self.insert(index, info) def insertCustomInput(self, index): info = [] CM = CustomModMessageBox(self, 'New Mod at Index ' + str(index + 1), info) if info != []: self.insert(index, info) #====Selection Functionality==== def onClickEvent(self, event): deselect_others = True self._check_descs() if len(self.modlabel_list) > 0: #if clicked mod is already part of selection, prevents the deselection of other mods for x in range(len(self.modlabel_list)): if self.modlabel_list[ x].is_index_focused and self.modlabel_list[ x].is_selected: deselect_others = False self.current_index = x if deselect_others: #checks every modlabel to see if any are selected for x in range(len(self.modlabel_list)): self.modlabel_list[x].select() #code for multi-selection capabilities if self.modlabel_list[x].is_selected and self.modlabel_list[ x] not in self.selected_modlabel_list: #adds selected modlabels to the selected modlabels list self.current_index = x self.selected_modlabel_list.append( self.modlabel_list[x]) elif not self.modlabel_list[ x].is_selected and self.modlabel_list[ x] in self.selected_modlabel_list: #removes deselected modlabels from the selected modlabels list self.selected_modlabel_list.remove( self.modlabel_list[x]) def _check_descs(self): '''checks whether mouse is over a mod description or not''' #Get widget type under mouse x, y = self.winfo_pointerxy() widget = self.winfo_containing(x, y) for mod in self.modlabel_list: if widget is not mod.description: mod.disable_desc_edit() else: mod.enable_desc_edit() def onDoubleClickEvent(self, event): if self.is_top_entered and len(self.modlabel_list) > 0: self.toggle_collapse() def onShiftClickEvent(self, event): #code for multi-selection if len(self.selected_modlabel_list) > 0: #set original index to start multi-selection from origin = self.selected_modlabel_list[-1].get_index() for x in range(len(self.modlabel_list)): # checks every modlabel for a valid multi-selection activation if self.modlabel_list[x].is_index_focused: #checks whether the index of the target modlabel is above #or below origin, then multi-selects accordingly if (x - origin) > 0: for y in range(origin, x + 1): self.selected_modlabel_list.append( self.modlabel_list[y]) self.modlabel_list[y].force_select() elif (x - origin) < 0: for y in range(x, origin): self.selected_modlabel_list.append( self.modlabel_list[y]) self.modlabel_list[y].force_select() def rightClickSelect(self, mod_index): mod = self.modlabel_list[mod_index] self.deselectAll() mod.force_select() self.selected_modlabel_list.append(mod) def selectAll(self): '''Selects all the mods''' if len(self.modlabel_list) > 0: for mod in self.modlabel_list: self.selected_modlabel_list.append(mod) mod.force_select() def deselectAll(self): if len(self.selected_modlabel_list) > 0: for mod in self.selected_modlabel_list: mod.force_deselect() self.selected_modlabel_list.clear() def selectTop(self): if self.is_top_entered: self.forceSelectTop() else: self.forceDeselectTop() def forceSelectTop(self): self.is_top_selected = True self.name_label.label.configure(bg='#f0f0f0', fg='#444444') def forceDeselectTop(self): self.is_top_selected = False self.name_label.label.configure(bg='#444444', fg='#f0f0f0') def is_top_selected(self): return self.is_top_selected #====Drag and Drop Functionality==== def moveSelectionUp(self): '''Goes through the selected mods and moves them up by 1''' if len(self.selected_modlabel_list) > 0: for mod in self.selected_modlabel_list: #if mod at upper limit, don't move anything if mod.get_index() == 0: return -1 #sorts selected mods using the index as the key, then iterates for mod in sorted(self.selected_modlabel_list, key=lambda x: x.get_index()): modtomove_data = self.modlabel_list[mod.get_index() - 1].get_info() modtomove_index = mod.get_index() - 1 self.delete(modtomove_index) self.insert(modtomove_index + 1, modtomove_data) def moveSelectionDown(self): '''Goes through the selected mods and moves them down by 1''' if len(self.selected_modlabel_list) > 0: for mod in self.selected_modlabel_list: #if mod at lower limit, don't move anything if mod.get_index() == len(self.modlabel_list) - 1: return 1 #sorts selected mods using the index as the key, then iterates for mod in sorted(self.selected_modlabel_list, key=lambda x: x.get_index(), reverse=True): modtomove_data = self.modlabel_list[mod.get_index() + 1].get_info() modtomove_index = mod.get_index() + 1 self.delete(modtomove_index) self.insert(modtomove_index - 1, modtomove_data) def dragSelection(self, event): '''Moves all selected ModLabels up or down depending on mouse movement while the mouse is held''' i = self.nearest() if self.current_index is not None: if i < self.current_index and i != -1: self.moveSelectionUp() self.current_index = i elif i > self.current_index and i != len(self.modlabel_list): self.moveSelectionDown() self.current_index = i return i def nearest(self): '''get index of ModLabel item nearest to the mouse y position using hardcoded ModLabel heights''' index = 0 current_nearest_index = 0 #get the correct height of the ModLabels if self.listview: height = self.listview_height else: height = self.defaultview_height #get the absolute position of the mouse in relation to the ModlistListbox position mouse_y = self.mlb_frame.winfo_pointery() - self.mlb_frame.winfo_rooty( ) if len(self.modlabel_list) > 0: current_index = 0 distance_from_current = abs((height / 2) - mouse_y) for i in range(len(self.modlabel_list)): distance_from_next = abs((i * height + (height / 2)) - mouse_y) if distance_from_current > distance_from_next: distance_from_current = distance_from_next current_index = i #if going beyond the list, return an index beyond the list to signify it moving into a different category ## if(current_index = index = current_index return index #====Code that doesn't f****n' work==== '''NOTES: This nearest() is based off of each widget's actual position on the board. It bugs out when a mod is
class WordnikApi(VocabularyBase): cl = ConfigLoader() apiUrl = cl.getWordnikAPIString('API_URL') apiKey = 'f56e0ac9bcfd011dcf15f07382b057da68ece00811dcb38a9' # 15000 queries per hour relatedSet = OrderedSet() broaderSet = OrderedSet() narrowerSet = OrderedSet() # dont retry here: @retry(Exception, tries=3) @timeout(5) def apiCall(self, word, apiLang): client = swagger.ApiClient(self.apiKey, self.apiUrl) wordApi = WordApi.WordApi(client) relatedWords = wordApi.getRelatedWords(word) return relatedWords def __init__(self, searchTerm, language): VocabularyBase.__init__(self, searchTerm, language) self.relatedSet = OrderedSet() self.broaderSet = OrderedSet() self.narrowerSet = OrderedSet() self.supportedLang.append('en') if language in self.supportedLang: for word in self.searchTerms: relatedWords = None try: relatedWords = self.apiCall(word, language) except: relatedWords = None if relatedWords is not None: for related in relatedWords: relationship = related.relationshipType if ('equivalent' in relationship or 'synonym' in relationship or 'verb-form' in relationship or 'form' in relationship): for word in related.words: self.relatedSet.append(utils.eszettToSS(word)) if ('hypernym' in relationship): for word in related.words: self.broaderSet.append(utils.eszettToSS(word)) if ('hyponym' in relationship): for word in related.words: self.narrowerSet.append(utils.eszettToSS(word)) def getRelated(self): return self.relatedSet def getNarrower(self): return self.narrowerSet def getBroader(self): return self.broaderSet def checkConnection(self): try: client = swagger.ApiClient(self.apiKey, self.apiUrl) wordApi = WordApi.WordApi(client) wordApi.getRelatedWords('test') return True except: return False
class OpenThesaurus(VocabularyBase): cl = ConfigLoader() apiPrefix = cl.getOpenThesaurusAPIString('API_URL') apiSuffix = cl.getOpenThesaurusAPIString('API_URL_SUFFIX') relatedSet = OrderedSet() broaderSet = OrderedSet() narrowerSet = OrderedSet() def __init__(self, searchTerm, language): VocabularyBase.__init__(self, searchTerm, language) self.relatedSet = OrderedSet() self.broaderSet = OrderedSet() self.narrowerSet = OrderedSet() self.supportedLang.append('de') if language in self.supportedLang: for word in self.searchTerms: result = self.apiCall(word, language) if result.status_code < 400: xmlString = result.text self.parseXML(xmlString) if len(self.relatedSet) > 0: break @retry(Exception, tries=3) def apiCall(self, word, apiLang): return requests.get(self.apiPrefix + word + self.apiSuffix) def parseXML(self, xmlString): root = ET.fromstring(xmlString) for levelOne in root: if levelOne.tag == 'synset': synsetOne = levelOne for levelTwo in synsetOne: if levelTwo.tag == 'term': synonym = levelTwo.attrib['term'] self.relatedSet.append(utils.eszettToSS(synonym)) elif levelTwo.tag == 'supersynsets': for levelThree in levelTwo: if levelThree.tag == 'synset': for levelFour in levelThree: if levelFour.tag == 'term': broader = levelFour.attrib['term'] self.broaderSet.append( utils.eszettToSS(broader)) elif levelTwo.tag == 'subsynsets': for levelThree in levelTwo: if levelThree.tag == 'synset': for levelFour in levelThree: if levelFour.tag == 'term': narrower = levelFour.attrib['term'] self.narrowerSet.append( utils.eszettToSS(narrower)) def getRelated(self): return self.relatedSet def getNarrower(self): return self.narrowerSet def getBroader(self): return self.broaderSet def checkConnection(self): response = self.apiCall('test', 'de') if response is not None and response.status_code < 400: return True return False