def process_syntax(self): self.load_variables() if self.x != 51: if self.x in terminals.keys( ) or self.x == 51: # 51 = $ / Fim da pilha if self.x == self.a: self.expansions.pop(0) self.input.pop(0) pass else: raise Exception( "Erro de Sintaxe: expansao terminal {} nao encontrado no topo da pilha de tokens" .format(self.x)) else: if (self.x, self.a) in productions.keys(): self.expansions.pop(0) if productions[ (self.x, self.a)][0] != 0: # 0 = NULL / No productions self.expansions = productions[ (self.x, self.a)] + self.expansions self.current_derivation = "({0}, {1}) deriva em: {2}".format( self.x, self.a, productions[(self.x, self.a)]) self.trigger_actions() else: raise Exception( "Derivacao para ({}, {}) nao foi encontrado na tabela de parsing" .format(self.x, self.a)) self.load_variables() else: print("End of derivation") self.trigger_actions()
def str_split_get_pop_elem (self, str_in = None, delim_in = None, which_elem = None): if (str_in or delim_in) == None: raise Exception("Missing some params") if (not (str(which_elem) == "FIRST")) and (not (str(which_elem) == "LAST")) and ( not isinstance(which_elem, int)): raise Exception("INDEX PASSED is NOT a VALID 'FIRST or 'LAST' string OR NOT a NUMBER" + which_elem) else: res_arr = str_in.split(delim_in) if which_elem == 'LAST': res = res_arr.pop() return res if which_elem == 'FIRST': res = res_arr.pop(0) return res else: # if isinstance(which_elem, int): res = res_arr.pop(which_elem) return res # else: # raise Exception("INDEX PASSED is NOT a NUMBER") return
def save(self, obj): """Saves the object in repository. If the object's id is already in repo, it will raise duplicate id. If objects validator and repository validator don't match, it will raise an exception Input: -object - an object Output: -None Raises: - Exception - if the object is already saved in repository - TypeError - if the object is not matching the repository """ if not obj.validator == self.__validator: raise TypeError("Object not matching the repository") try: self.__validator.validate(obj) except Exception as ex: raise Exception("Not a valid object" + str(ex)) if obj.id in self.__objects.keys(): raise Exception("Duplicated ID") self.__objects[obj.id] = obj
def is_existing_station(cls, from_station, end_station): station = Station() if not station.has_this_station(from_station): raise Exception("不存在此出发车站,请重新配置!") if not station.has_this_station(end_station): raise Exception("不存在此到达车站,请重新配置!") return True
def __init__(self, description, errorCode): """Create a blpapi exception Args: description (str): Description of the error errorCode (int): Code corresponding to the error """ _StandardException.__init__(self, description, errorCode)
def fabric_run_commands (self): """ TODO: There must be a possibility of dedicated test on remote host with a ASH script which may be needed only in some cases. The script should be uploaded to the target machine in the same archive as config files and start scripts. For that configure special key on the class level and after check whether it exists pop a command for the script execution from common dict (may not be entirely good idea but will work) """ ded_test_command = None if self.ssh_ded_test_key != None: ded_test_command = self.ssh_commands_to_exec.pop(self.ssh_ded_test_key) else: raise Exception("TESTING SCRIPT IS NOT PRESENT") return for key, command in self.ssh_commands_to_exec.items(): if key == self.ssh_ded_test_key: # ded_test_command = self.ssh_commands_to_exec.pop(self.ssh_ded_test_key) continue else: self.logger.debug("<== THIS COMMAND is Going To be Executed REMOTELY ==>") self.logger.debug(str(command)) self.logger.debug("<===================================================>") result = self.fabric_connection.run(command) if ded_test_command != None: result_dedicated_test = self.fabric_connection.run(ded_test_command) else: result_dedicated_test = None ''' self.vm_ssh_cmds_exec_banner(command=command, ending=' for the key ' + str(key) + ' has been executed ', exec_res01='result.ok: ' + str(result.ok), exec_res02='result.return_code: ' + str(result.return_code), exec_res03='result_dedicated_test.return_code: ' + str( result_dedicated_test.return_code), logging_level='DEBUG', logger=self.logger ) ''' self.logger.debug("<===================================================>") self.logger.debug("<== WITH THE FOLLOWING RESULT ==>") self.logger.debug(str(result.ok) + ' ' + str(result.ok) + ' ' + str(result.return_code) + ' ' + str( result_dedicated_test.return_code)) self.logger.debug("<===================================================>") if result.ok == True and result.return_code == 0 and ( result_dedicated_test != None or result_dedicated_test.return_code == 0): pass else: raise Exception('Last command did not go thru. Execution interrupted') return
def enhance_contrast(self, img, method='HE', level=256, window_size=32, affect_size=16, blocks=8, threshold=10.0): """ equalize the histogram :param img: Image type :param method: Histogram Equalization Method :param level: color or gray scale :param window_size: in AHE, the window to calculate Histogram CDF size :param affect_size: in AHE, the affected pixels size :param blocks: in CLAHE, split how many times in row and col :param threshold: in CLAHE, if threshold times higher than the mean, clip :return: equalized result """ # choose algorithms if method in ['HE', 'FHE', 'he', 'fhe']: he_func = self.histogram_equalization # HE elif method in ['AHE', 'ahe']: he_func = self.adaptive_histogram_equalization # AHE elif method in ['CLANE', 'clane']: he_func = self.contrast_limited_adaptive_histogram_equalization # CLAHE elif method in ['standard', 'STANDARD', 'Standard']: he_func = self.standard_histogram_equalization # ImageOps HE elif method in ['Bright', 'bright', 'bright_level']: he_func = self.bright_wise_histogram_equalization # Local Region Stretch else: he_func = self.histogram_equalization raise Exception("unknown method: ", method) # process gray and color images img_arr = np.array(img) if len(img_arr.shape) == 2: channel_num = 1 elif len(img_arr.shape) == 3: channel_num = img_arr.shape[2] else: channel_num = 1 raise Exception("image shape wrong") if channel_num == 1: # gray image arr = he_func(img_arr, level=level, window_size=window_size, affect_size=affect_size, blocks=blocks, threshold=threshold) img_res = Image.fromarray(arr) elif channel_num == 3 or channel_num == 4: # RGB image or RGBA image(such as png) rgb_arr = [None] * 3 rgb_img = [None] * 3 # process dividely for k in range(3): rgb_arr[k] = he_func(img_arr[:, :, k], level=level, window_size=window_size, affect_size=affect_size, blocks=blocks, threshold=threshold) rgb_img[k] = Image.fromarray(rgb_arr[k]) img_res = Image.merge("RGB", tuple(rgb_img)) else: img_res = img raise Exception("The channel_num must be 1, 3 or 4") return img_res
def _check_valid_add_user(cls, username, password): if cls.check_username_exists(username): raise Exception(USERNAME_EXISTS_MSG) if not cls.check_valid_username(username): raise Exception(INVALID_USERNAME_MSG) if not cls.check_valid_password(password): raise Exception(INVALID_PASSWORD_MSG) return True
def ensure_broker_server_dblink_exists(): """Ensure that all the database extensions exist, and the broker database is setup as a foreign data server This leverages SQL script files and connection strings in ``settings.DATABASES`` in order to setup these database objects. The connection strings for BOTH the USAspending and the Broker databases are needed, as the postgres ``SERVER`` setup needs to reference tokens in both connection strings. NOTE: An alternative way to run this through bash scripting is to first ``EXPORT`` the referenced environment variables, then run:: psql --username ${USASPENDING_DB_USER} --dbname ${USASPENDING_DB_NAME} --echo-all --set ON_ERROR_STOP=on --set VERBOSITY=verbose --file usaspending_api/database_scripts/extensions/extensions.sql eval "cat <<< \"$(<usaspending_api/database_scripts/servers/broker_server.sql)\"" > usaspending_api/database_scripts/servers/broker_server.sql psql --username ${USASPENDING_DB_USER} --dbname ${USASPENDING_DB_NAME} --echo-all --set ON_ERROR_STOP=on --set VERBOSITY=verbose --file usaspending_api/database_scripts/servers/broker_server.sql """ # Gather tokens from database connection strings if DEFAULT_DB_ALIAS not in settings.DATABASES: raise Exception( "'{}' database not configured in django settings.DATABASES".format( DEFAULT_DB_ALIAS)) if "data_broker" not in settings.DATABASES: raise Exception( "'data_broker' database not configured in django settings.DATABASES" ) db_conn_tokens_dict = { **{ "USASPENDING_DB_" + k: v for k, v in settings.DATABASES[DEFAULT_DB_ALIAS].items() }, **{ "BROKER_DB_" + k: v for k, v in settings.DATABASES["data_broker"].items() }, } extensions_script_path = str(settings.APP_DIR / "database_scripts" / "extensions" / "extensions.sql") broker_server_script_path = str(settings.APP_DIR / "database_scripts" / "servers" / "broker_server.sql") with open(extensions_script_path) as f1, open( broker_server_script_path) as f2: extensions_script = f1.read() broker_server_script = f2.read() with connection.cursor() as cursor: # Ensure required extensions added to USAspending db cursor.execute(extensions_script) # Ensure foreign server setup to point to the broker DB for dblink to work broker_server_script_template = Template(broker_server_script) cursor.execute( broker_server_script_template.substitute(**db_conn_tokens_dict))
def add_initial_tile(self, tile, pos): x = pos.get_x() y = pos.get_y() if self.get_tile(x, y).is_empty(): surroundings = self.get_surrounding_tiles(x, y) for surrounding in surroundings: if not surrounding.is_empty(): raise Exception( "Initial Tile cannot be placed near existing Tile") if self.is_on_edge(pos): self.add_tile(tile, x, y) return True raise Exception("Invalid initial tile placement")
def update_a_book(_name, _price, _isbn): for each_book in list_of_books: ##Validations: If user sends ISBN, name and price, update name and price information for the book with the ISBN. if (_isbn in each_book.values() and (_name != None) and (_price != None) and (_isbn != none)): each_book.update({'name': _name, 'price': _price, 'ISBN': _isbn}) print("I've updated the book successfully") ##Validations: If user sends only ISBN and price, update price information for the book with the same ISBN. if (_isbn in each_book.values() and (_name is None) and (_price != None) and (_isbn != None)): each_book.update({ 'name': each_book['name'], 'price': _price, 'ISBN': _isbn }) print("I've updated the book successfully elif") ##Validations: If user sends only name or price, do not update anything. Return Error saying you need at two paramenters input to perform any operation if ((_isbn is None) and ((_name is None) and (_price != None))): raise Exception( "You need at least two paramenters input to perform any operation not only PRICE" ) if ((_isbn is None) and ((_name != None) and (_price is None))): raise Exception( "You need at least two paramenters input to perform any operation no only NAME" ) # All None if ((_isbn is None) and ((_name is None) and (_price is None))): raise Exception( "Please enter at least two parameters to perform any operations" ) ##Validations: If user sends only name and price, update price information for the book with the same name. # If you find more than one book with the name, do not update. if ((_isbn is None) and (_name != None) and (_price != None)): if ((each_book['name']) == _name): #print(count) count = 0 for each_book['name'] in each_book: count = count + 1 #print("sec", count) if (count > 1): raise Exception("Name is duplicate") continue else: print(each_book) each_book.update({ 'name': _name, 'price': _price, 'ISBN': each_book['ISBN'] })
def max_min_blur(image, ksize=3, mode=1): """ :param image: 原始图像 :param ksize: 卷积核大小 :param mode: 最大值: 1 或 最小值 0 :return: """ img = image.copy() rows, cols, channels = img.shape if channels == 3: img = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) padding = (ksize - 1) // 2 new_img = cv2.copyMakeBorder(img, padding, padding, padding, padding, cv2.BORDER_CONSTANT, value=255) for i in range(rows): for j in range(cols): roi_img = new_img[i:i+ksize, j:j+ksize].copy() min_val, max_val, min_index, max_index = cv2.minMaxLoc(roi_img) if mode == 1: img[i, j] = max_val elif mode == 0: img[i, j] = min_val else: raise Exception('please Select a mode: max(1) or min(0)') return img
def main(): should_continue = True while should_continue: company_name = input("Enter the company name: ") response = requests.get("https://abr.business.gov.au/Search/ResultsActive?SearchText=" + company_name + "&AllNames=False&EntityName=False&BusinessName=False&TradingName=False&NarrowSearch=False&SearchType=ActiveAbns&AllStates=True&ACT=False&NSW=False&NT=False&QLD=False&TAS=False&SA=False&VIC=False&WA=False&PostcodeDisplayName=Postcode%20(required%20for%20DGR%20%26%20Charity%20search%20options%20if%20Name%20is%20blank)%3A&HideBreadcrumbs=False&HideSearchBox=False&HideLeftSideBar=False&ShowHelp=False&IsHomePage=False&NoIndex=False&ShowVersionNumber=False") status = response.status_code expected_status = 200 if status != expected_status: raise Exception("Expected status code to be " + expected_status + ", but it was " + status) tree = html.fromstring(response.content) col = 0 row = 0 bsb = "" company_ary = [] exit_char = 'e' for cell in tree.xpath('//*[@id="content-matching"]/div/div/table/tbody/tr/td'): if col == 0: bsb = cell.getchildren()[0].text_content().strip() elif col == 1 or col == 2 or col == 3: company_ary.append(re.sub("\\s+", " ", cell.text_content().strip())) if col == 3: print(bsb + " | " + ", ".join(str(x) for x in company_ary)) bsb = "" company_ary = [] col = (col + 1) % 4 row += 1 exit_input = input("Enter '" + exit_char + "' to exit, or any other key to continue: ") should_continue = exit_input != exit_char
def getCaemlDict_Recursive(self) -> dict: aCaemlDict = dict([(x, copy.deepcopy(y)) for x, y in self.__dict__.items() if ( not x.startswith('_') and not x.startswith('parent')) or x.startswith('_id')]) if 'caemlType' in aCaemlDict: raise Exception('caemlType is reserved and cannot be used in a caeml serializable obj') else: aCaemlDict['caemlType'] = [t.__module__ + '.' + t.__name__ for t in type(self).mro()] for k, v in aCaemlDict.items(): if type(v) in [dict]: for k2, v2 in v.items(): if isinstance(v2, caemlBaseObj): aCaemlDict[k][k2] = v2.getCaemlDict_Recursive() if type(v) in [list]: for i, v2 in enumerate(v): if isinstance(v2, caemlBaseObj): aCaemlDict[k][i] = v2.getCaemlDict_Recursive() elif isinstance(v, caemlBaseObj): aCaemlDict[k] = v.getCaemlDict_Recursive() else: pass # keep entry return aCaemlDict
def pass_a_freq(self, time): if self.closed: raise Exception("should not be here") self.__curr_time = time self.curr_freq += 1 self.curr_price = perc_to_decimal(self.raw_data.prices[self.curr_freq]) self.__curr_return = (self.curr_price / self.start_price - 1) * self.raw_data.hyper_parameter.direction self.curr_value = self.initial_capital + ( self.curr_price - self.start_price ) * self.raw_data.hyper_parameter.direction * self.shares if self.__curr_return >= self.max_return: self.max_return = self.__curr_return self.max_return_cash_value = self.curr_value - self.initial_capital assert self.max_return >= 0 assert self.max_return_cash_value >= 0 self.max_return_freq = self.curr_freq if self.__curr_return <= self.max_draw: self.max_draw = self.__curr_return self.max_draw_cash_value = self.curr_value - self.initial_capital assert self.max_draw <= 0 assert self.max_draw_cash_value <= 0 self.max_draw_freq = self.curr_freq curr_draw_from_previous_high = self.__curr_return - self.max_return if curr_draw_from_previous_high <= self.max_draw_from_previous_high: self.max_draw_from_previous_high = curr_draw_from_previous_high assert self.max_draw_from_previous_high <= 0 self.max_draw_from_previous_high_freq = self.curr_freq
def get_item_names(screenshot): """ converts a screenshot to valid warframe item names and it's corresponding ducat value. @param screenshot: the screenshot as a PIL-Image or numpy-array """ if _executor is None: raise Exception("warframe_ocr.init() was never called") try: screenshot = np.asarray(screenshot) tess_images = [] for text_image in _get_text_images(screenshot): # embed the text image in a larger one with white background (tesseract needs a bit of space around the characters) tess_image = 255 * np.ones((text_image.shape[0] + 12, text_image.shape[1] + 12)) tess_image[6:-6, 6:-6] = text_image tess_images.append(tess_image) # for i, tess_image in enumerate(tess_images): # plt.subplot(len(tess_images), 1, i+1) # plt.imshow(tess_image, cmap="gray") # plt.show() image_to_string = functools.partial(_image_to_string, name_list=list(_ocr_item_to_ducats.keys())) item_names = list(_executor.map(image_to_string, tess_images)) return item_names, [_ocr_item_to_ducats.get(name) for name in item_names] except: print("[WF OCR] Error:") traceback.print_exc() return [], []
def __init__(self): # 観測できる足数 self.visible_bar = 100 # CSVファイルのパス配列(最低4ヶ月分を昇順で) self.csv_file_paths = [] now = datetime.datetime.now() for _ in range(4): now = now - relativedelta.relativedelta(months=1) filename = 'DAT_MT_EURUSD_M1_{}.csv'.format(now.strftime('%Y%m')) if not os.path.exists(filename): print('ヒストリーファイルが存在していません。下記からダウンロードしてください。', filename) print('http://www.histdata.com/download-free-forex-historical-data/?/metatrader/1-minute-bar-quotes/EURUSD/') raise Exception('ヒストリーファイルが存在していません。') else: self.csv_file_paths.append(filename) self.data = pandas.DataFrame() for path in self.csv_file_paths: csv = pandas.read_csv(path, names=['date', 'time', 'open', 'high', 'low', 'close', 'v'], parse_dates={'datetime': ['date', 'time']}, dtype={'datetime': np.long, 'open': np.float32, 'high': np.float32, 'low': np.float32, 'close': np.float32} ) csv.index = csv['datetime'] csv['datetime'] = csv['datetime'].astype(np.long) csv = csv.drop('datetime', axis=1) # ohlcを作る際には必要 # csv = csv.drop('open', axis=1) # histdata.comはvolumeデータが0なのでdrop csv = csv.drop('v', axis=1) self.data = self.data.append(csv) # 最後に読んだCSVのインデックスを開始インデックスとする self.read_index = len(self.data) - len(csv)
def pass_a_freq(self, trades, liquidate): self.current_holding_cash = 0 for name, positions in self.holdings.items(): for position in positions: position.pass_a_freq(self.current_freq) self.current_holding_cash += position.curr_value if liquidate: self.liquidate_positions() else: self.close_positions() clean_trade = sorted(trades, key=lambda td: self.edge_no_limit(td), reverse=True) for trade in clean_trade: self.add_position(trade) self.pay_borrow_cost() capital_used = 0 trade_hold = 0 for name, positions in self.holdings.items(): for position in positions: capital_used += position.initial_capital trade_hold += 1 if liquidate and self.current_holding_cash != 0: raise Exception("should not be here") self.daily_status.append((self.current_freq, self.capital + self.current_holding_cash, self.capital, capital_used, trade_hold))
def _add_contents(self, **options): """ Get all of the transactions presented in the view and stuff them into the Elasticsearch index. The view is only needed to load the transactions into Elasticsearch so it is dropped after each use. """ if self.index_type == "award": view_sql_file = f"{settings.ES_AWARDS_ETL_VIEW_NAME}.sql" view_name = settings.ES_AWARDS_ETL_VIEW_NAME es_id = f"{self.index_type}_id" elif self.index_type == "covid19_faba": view_sql_file = f"{settings.ES_COVID19_FABA_ETL_VIEW_NAME}.sql" view_name = settings.ES_COVID19_FABA_ETL_VIEW_NAME es_id = "financial_account_distinct_award_key" elif self.index_type == "transaction": view_sql_file = f"{settings.ES_TRANSACTIONS_ETL_VIEW_NAME}.sql" view_name = settings.ES_TRANSACTIONS_ETL_VIEW_NAME es_id = f"{self.index_type}_id" else: raise Exception("Invalid index type") view_sql = open(str(settings.APP_DIR / "database_scripts" / "etl" / view_sql_file), "r").read() with connection.cursor() as cursor: cursor.execute(view_sql) cursor.execute(f"SELECT * FROM {view_name};") records = ordered_dictionary_fetcher(cursor) cursor.execute(f"DROP VIEW {view_name};") if self.index_type == "covid19_faba": records = transform_covid19_faba_data( TaskSpec( name="worker", index=self.index_name, sql=view_sql_file, view=view_name, base_table="financial_accounts_by_awards", base_table_id="financial_accounts_by_awards_id", field_for_es_id="financial_account_distinct_award_key", primary_key="award_id", partition_number=1, is_incremental=False, ), records, ) for record in records: # Special cases where we convert array of JSON to an array of strings to avoid nested types routing_key = options.get("routing", settings.ES_ROUTING_FIELD) routing_value = record.get(routing_key) es_id_value = record.get(es_id) if self.index_type == "transaction": record["federal_accounts"] = self.convert_json_arrays_to_list(record["federal_accounts"]) if self.index_type == "covid19_faba": es_id_value = record.pop("_id") self.client.index( index=self.index_name, body=json.dumps(record, cls=DjangoJSONEncoder), id=es_id_value, routing=routing_value, ) # Force newly added documents to become searchable. self.client.indices.refresh(self.index_name)
def __init__(self, message: str = None, cause: Exception = None, hints=None, exitcode: int = 2): """ Constructor """ Exception.__init__(self, message) self.__message = message self.__cause = cause self.__hints = [] self.__exitcode = exitcode if isinstance(hints, str): self.__hints.append(hints) elif hints is not None: self.__hints += hints
def mms_request(request): try: code = request.GET['query'] print(code) return mms.get_mms_data(code) except: raise Exception( "Malformed JSON as input. Expects a field called query.")
def remove(self, objId): """Removes the object with id objId from repository and returns it. If repository does not contain this object, it will raise repository Exception """ if objId not in self.__objects.keys(): raise Exception("Object not in repository") del self.__objects[objId]
def simple_report(self, valid): if not valid: return Decimal("-inf") total_return = self.total_cash_return - self.total_borrow_cost - self.total_tran_cost if round(self.capital, 2) != round(self.config.portfolio.initial_capital + total_return, 2): raise Exception("should not be here") return total_return
def degree_reqs(request): try: code = request.GET['query'] response = degree_plan_helper.get_degree_requirements(code) return HttpResponse(response, content_type="application/json") except (Exception): raise Exception( "Requirements of the requested degree could not be found. ")
def process(self): workbook = xlrd.open_workbook(self.tempfile) worksheet = workbook.sheet_by_index(0) start_row = 3 end_row = 26 start_col = 1 end_col = 25 curr_row = start_row curr_col = start_col count = 0 last_stock = None while curr_col < end_col: while curr_row < end_row: row = worksheet.row(curr_row) # Cell Types: 0=Empty, 1=Text, 2=Number, 3=Date, 4=Boolean, 5=Error, 6=Blank buy_quant = worksheet.cell_value(curr_row, curr_col) buy_price = worksheet.cell_value(curr_row, curr_col + 1) sell_quant = worksheet.cell_value(curr_row, curr_col + 3) sell_price = worksheet.cell_value(curr_row, curr_col + 4) #cell_type = worksheet.cell_type(curr_row, curr_col + curr_cell) #cell_value = worksheet.cell_value(curr_row, curr_col + curr_cell) if worksheet.cell_value(curr_row, curr_col + 2)!='': ticker = int(worksheet.cell_value(curr_row, curr_col + 2)) # quick fix to get Shenzhen stock ticker(add missing zeroes as prefix) if ticker<100000: ticker = "000000"[(int(log10(ticker))+1):]+str(ticker) try: last_stock = Stock.objects.get(ticker = str(ticker)) except: logger.error("{} does not exist".format(ticker)) raise Exception("{} does not exist".format(ticker)) if buy_price!='' and buy_quant!='' : trade = Trade(price=buy_price,quantity=buy_quant,stock=last_stock,trader=self.trader,account=self.account,time=self.date) if self.dry != True: #print("{} {}".format(buy_price,buy_quant)) trade.save() count += 1 if sell_price!='' and sell_quant!='' : # mark quantity to negative as a sign of sell trade = Trade(price=sell_price,quantity=0-sell_quant,stock=last_stock,trader=self.trader,account=self.account,time=self.date) if self.dry != True: trade.save() count += 1 curr_row += 1 # reset start row and move to next column session curr_row = start_row curr_col += 5 logger.info("{} trades saved.".format(count)) return count
def preProcess(self, input, n): if len(input) >= 128: raise Exception('input too large, must be under 128 characters') else: i = input.replace(" ", "") i = i.replace(',', '') output = re.sub( r'(?<=\w|\))(?=\() | (?<=\))(?=\w) | (?<=\d)(?=n) | (?<=n)(?=\w)', '*', i.lower(), flags=re.X) output = output.replace('n', str(n)) matches = re.search(r'([^\^0-9.*/()+-])', output) if matches != None: raise Exception("Invalid syntax '{}'".format(matches.group(1))) return output
def liquidate_positions(self): for name in self.holdings.keys(): holdings = self.holdings[name] for position in holdings: self.close_position(position, "force") if len([_ for _ in holdings if not _.closed]) > 0: raise Exception("should not be here") holdings.clear() self.holdings.clear()
def set(self, item): ''' Select an item in the combo box programmatically. If the item is not part of the combo box, an Exception will be thrown. ''' if not item in self._item_map.values(): raise (Exception("Item %s is not in items for combo box" % item)) self._selection.set("%s" % item)
def test_lamoda_delivery(): driver = WebDriver(executable_path='C://python//chromedriver.exe') driver.implicitly_wait(10) driver.get('https://lamoda.ru') with allure.step('Нажимаем на кнопку выбора региона доставки'): search_button = driver.find_element_by_xpath('//span[@class="header__top-description"]').click() with allure.step('Выбираем окно ввода и набираем г Новосибирск'): search_input = driver.find_element_by_xpath('//input[@class="text-field text-field_large"]').send_keys( "г Новосибирск") city = 'г Новосибирск' with allure.step('Выбираем г Новосибирск из списка саджестов'): suggests = driver.find_elements_by_xpath('//ul[@class="geo__suggest"]//li[@class="suggest__item"]') for suggest in suggests: if suggest.find_element_by_xpath('//span[@class="suggest__item-query"]').text == city: suggest.click() break else: raise Exception(f'Среди подсказок отсутствует {city_name}') with allure.step('Нажимаем подтвержить выбор'): search_button = driver.find_element_by_xpath('//button[@class="button button_blue geo__button-save"]').click() with allure.step('Переходим на страницу Условия Доставки'): search_button = driver.find_element_by_xpath('//a[@class="link footer__link"][contains( text(), "Условия доставки")]').click() with allure.step('Меняем город доставки на Москву'): search_input = driver.find_element_by_xpath('//input[@class="text-field js-suggested-input"]').clear() search_input = driver.find_element_by_xpath('//input[@class="text-field js-suggested-input"]').send_keys('г Москва') with allure.step('Из списка саджестов выбираем Москву и подтверждаем выбор'): suggest = driver.find_element_by_xpath('//span[@class="suggest__item-query"]').click() with allure.step('Проверяем кол-во вариантов доставки, сроки и стоимость'): CountOfXpath = driver.find_elements_by_xpath('//div[@class="delivery_item-name_type"]') for Count in CountOfXpath: if driver.find_elements_by_xpath('//div[@class="delivery_item-name_type"]').__len__() == 5: break else: raise Exception(f'Вариантов доставки больше чем 5')
def separate_trades(self, trades): yesterday_trades = [] prev_date = None for trade in trades: current_freq = datetime.datetime.strptime(trade.date, "%m/%d/%Y").date() if prev_date is not None and prev_date != current_freq: if prev_date in self.trades: raise Exception("should not be here") self.trades[prev_date] = yesterday_trades yesterday_trades = [] yesterday_trades.append(trade) prev_date = current_freq if len(yesterday_trades) == 0: raise Exception("should not be here") if prev_date in self.trades: raise Exception("should not be here") self.trades[prev_date] = yesterday_trades
def __init__(self, description, errorCode): _StandardException.__init__(self, description, errorCode)