def validate_limits(limit, skip) -> Tuple[int, int]: limit, skip = int(empty_if(limit, base.DEFAULT_API_LIMIT)), int(empty_if(skip, 0)) limit = base.MAX_API_LIMIT if limit > base.MAX_API_LIMIT else limit limit = 1 if limit < 1 else limit skip = 0 if skip < 0 else skip return limit, skip
def _tpl_add_hosts(host: str = None, v4_host: str = None, v6_host: str = None, **kwargs) -> dict: filter_hosts = kwargs.pop('filter_hosts', settings.FILTER_HOSTS) set_trusted_hosts = kwargs.pop('set_trusted_hosts', True) v4_sub = kwargs.pop('v4_subdomain', settings.V4_SUBDOMAIN) v6_sub = kwargs.pop('v6_subdomain', settings.V6_SUBDOMAIN) main_host = kwargs.pop('main_host', settings.MAIN_HOST) force_main_host = kwargs.pop('force_main_host', settings.FORCE_MAIN_HOST) if set_trusted_hosts: request.trusted_hosts = settings.ALLOWED_HOSTS if filter_hosts else None hst = request.host if empty(host) else host if not any([empty(v4_host), empty(v6_host)]): pass # both v4 and v6_host are set, so we don't want to override them. elif force_main_host: # If force_main_host is true, then we shouldn't use the current requested host from ``hst``, # instead we use the pre-set V4_HOST and V6_HOST from settings (unless v4/v6_host are overridden) v4_host = empty_if(v4_host, settings.V4_HOST) v6_host = empty_if(v6_host, settings.V6_HOST) else: # If the current host is on the v4/v6 subdomain, then we need to trim the subdomain to avoid prepending a second subdomain if hst.startswith(f'{v4_sub}.'): hst = hst.replace(f'{v4_sub}.', '', 1) if hst.startswith(f'{v6_sub}.'): hst = hst.replace(f'{v6_sub}.', '', 1) v4_host, v6_host = empty_if(v4_host, f"{v4_sub}.{hst}"), empty_if(v6_host, f"{v6_sub}.{hst}") return dict(v4_host=v4_host, v6_host=v6_host, host=hst, main_host=main_host)
def parse_db_args(ins, db=None, memory_persist=False, connection_kwargs=None, default_kwargs=None): connection_kwargs = empty_if(connection_kwargs, {}) default_conn_kwargs = empty_if( default_kwargs, dict(isolation_level=ins.isolation_level, timeout=ins.db_timeout)) db = 'file::memory:?cache=shared' if memory_persist else empty_if( db, ins.DEFAULT_DB) if ':memory:' not in db: db_folder = dirname(db) if not isabs(db): log.debug("Passed 'db' argument isn't absolute: %s", db) db = join(ins.DEFAULT_DB_FOLDER, db) log.debug("Prepended DEFAULT_DB_FOLDER to 'db' argument: %s", db) db_folder = dirname(db) if not os.path.exists(db_folder): log.debug( "Database folder '%s' doesn't exist. Creating it + any missing parent folders", db_folder) os.makedirs(db_folder) else: log.debug( "Passed 'db' argument is %s - using in-memory sqlite3 database.", db) if 'file:' in db: default_conn_kwargs['uri'] = True return db, {**default_conn_kwargs, **connection_kwargs}
def _neighbor_row(neighbor: Neighbor): n = DictObject( address=neighbor.address, domain=neighbor.domain, alias=empty_if(neighbor.alias, 'N/A'), connectionType=neighbor.connectionType, autopeeringId=empty_if(neighbor.autopeeringId, 'N/A'), numberOfAllTransactions=str(neighbor.numberOfAllTransactions), numberOfReceivedMilestoneReq=str( neighbor.numberOfReceivedMilestoneReq), numberOfSentHeartbeats=str(neighbor.numberOfSentHeartbeats), ) n.connected = f"{Fore.GREEN}YES{Fore.RESET}" if neighbor.connected else f"{Fore.RED}NO{Fore.RESET}" return NeighborTableRow(**n)
def __init__(self, error_code="UNKNOWN_ERROR", message: str = None, status: int = None, template: str = None, extra: dict = None): from .core import _get_error _err = _get_error(error_code) super().__init__(error_code + ' ' + empty_if(message, _err.message)) self.message = message self.error_code = error_code self.status = empty_if(status, _err.status, zero=True) self.template = template self.extra = {} if extra is None else extra
def view_lookup(ip_addr=None, dtype=None, bformat=None): frm = merge_frm(req=request) ip = frm.get('ip', frm.get('address', frm.get('addr', frm.get('ip_address', ip_addr)))) iplist = frm.get('ips', frm.get('addresses', frm.get('addrs', frm.get('ip_addresses', [])))) ua = request.headers.get('User-Agent', 'N/A') wanted = wants_type() if 'format' in frm else wants_type(fmt=bformat) if not empty(iplist, itr=True) and isinstance(iplist, str): iplist = iplist.split(',') if not empty(iplist, itr=True) and isinstance(iplist, (list, tuple)): if len(iplist) > settings.MAX_ADDRESSES: ecode, emsg = "TOO_MANY_ADDRS", f"Too many addresses. You can only lookup {settings.MAX_ADDRESSES} addresses at a time." if not empty(dtype) or wanted == 'text': return Response(f"ERROR: {emsg} (code: {ecode})", status=402, content_type='text/plain') edict = dict(error=True, code=ecode, message=emsg) if wanted == 'yaml': return Response(dump_yaml(edict), status=402, content_type='text/yaml') return jsonify(edict), 402 if not empty(dtype) or wanted == 'text': dtype = empty_if(dtype, frm.get('type', frm.get('dtype', 'all'))) _ln = "\n==========================================================\n" res_txt = _ln.lstrip('\n') for xip in iplist: res_txt += get_flat(xip, ua=ua, dtype=dtype) + "\n" + _ln return Response(res_txt, status=200, content_type='text/plain') res_list = {xip: geo_view(xip, ua=ua) for xip in iplist} rdct = {k: _safe_dict(v) for k, v in res_list.items()} if wanted == 'yaml': return Response(dump_yaml(dict(addresses=rdct)), status=200, content_type='text/yaml') return jsonify(rdct) ip = get_ip() if empty(ip) else ip # ua = h.get('User-Agent', 'Empty User Agent') data = dict(geo_view(ip, ua=ua)) # wanted = wants_type() if not empty(dtype) or wanted == 'text': dtype = empty_if(dtype, frm.get('type', frm.get('dtype', 'all'))) fres = get_flat(ip, ua=ua, dtype=dtype) + "\n" return Response(fres, status=200, content_type='text/plain') if wanted == 'yaml': return Response(dump_yaml(data), status=200, content_type='text/yaml') return jsonify(data)
def get_iota(host=None, new_instance=False, **kwargs) -> "PrivexIota": host = empty_if(host, settings.IOTA_HOST) if new_instance or empty(STORAGE.get('iota')): if 'iota' in STORAGE: del STORAGE['iota'] STORAGE.iota = PrivexIota(adapter=host, **kwargs) return STORAGE.iota
async def test_get_followers( self, host=None, *args, **kwargs) -> Tuple[Union[list, dict], float, int]: """Test a node for functioning full node get_followers""" host = empty_if(host, self.host) mtd = 'condenser_api.get_followers' count = 10 params = [self.test_acc, None, "blog", count] res, tt, tr = await rpc(host=host, method=mtd, params=params) log.debug( f'Length check if result from {host} has at least {count} results') follow_len = len(res) if follow_len < count: raise ValidationError( f"Too little followers. Only {follow_len} follower results (<{count}) for {host}" ) log.debug( f'get_followers check if result from {host} has valid follower items' ) for follower in res: self._check_follower(follower) return res, tt, tr
async def get_pair(self, from_coin: str, to_coin: str = None) -> PriceData: to_coin = empty_if(to_coin, self.base_token) from_coin = from_coin.upper() # Avoid imposter tokens by correcting "standard" symbols into their known official symbol on the exchange. if from_coin in self.linked_symbols: from_coin = self.linked_symbols[from_coin] return await super().get_pair(from_coin=from_coin, to_coin=to_coin)
async def _try_unknown_method(node: str, method: str, params: Union[list, dict] = None, auto_exit=True): params = empty_if(params, []) try: # loop = asyncio.get_event_loop() data = await rpc(host=node, method=method, params=params) if empty(data[0]): log.warning( "Response for method '%s' from '%s' was empty. Marking as broken!", method, node) return sys.exit(settings.BAD_RETURN_CODE) if auto_exit else False return data except RPCError as e: log.error( "Got RPC error in _try_unknown_method() while testing method %s against %s - Ex: %s %s", method, node, type(e), str(e)) return sys.exit(settings.BAD_RETURN_CODE) if auto_exit else False except ServerDead as e: log.error( "Got ServerDead error in _try_unknown_method() while testing method %s against %s - Ex: %s %s", method, node, type(e), str(e)) oe = e.orig_ex if not empty(oe): if isinstance(oe, RPCError): log.error( "ServerDead contained RPCError while testing method %s against %s - Ex: %s %s", method, node, type(oe), str(oe)) return sys.exit(settings.BAD_RETURN_CODE) if auto_exit else False except Exception as e: log.error( "Fatal exception in _try_unknown_method() while testing method %s against %s - Ex: %s %s", method, node, type(e), str(e)) return sys.exit(1) if auto_exit else False
async def test_bridge_trending_topics( self, host=None, *args, **kwargs) -> Tuple[Union[list, dict], float, int]: """Test a node for functioning bridge.get_trending_topics""" host = empty_if(host, self.host) mtd = 'bridge.get_trending_topics' count = 10 params = {"limit": count} res, tt, tr = await rpc(host=host, method=mtd, params=params) log.debug( f'bridge.get_trending_topics check if result from {host} has valid trending topics' ) for a in res: if len(a) != 2: raise ValidationError( f"Community result contained {len(a)} items (expected 2) in bridge.get_trending_topics response from {host}" ) if 'hive-' not in a[0]: raise ValidationError( f"Invalid community '{a[0]}' in bridge.get_trending_topics response from {host}" ) return res, tt, tr
def convert_tables(opts): db = empty_if(opts.db, settings.DB_NAME, itr=True) tables = empty_if(opts.tables, [], itr=True) all_tables = is_true(opts.all_tables) conv_columns = is_true(opts.conv_columns) outer_tx = is_true(opts.outer_tx) skip_indexed = is_true(opts.skip_indexed) if not empty(db): core.reconnect(database=db) charset, collation = empty_if(opts.charset, 'utf8mb4', itr=True), empty_if(opts.collation, 'utf8mb4_unicode_ci', itr=True) # table = empty_if(opts.table, None, itr=True) if empty(tables, itr=True) and not all_tables: parser.error(f"\n{RED}ERROR: You must specify a table to 'convert_tables' or pass --all-tables / -a{RESET}\n") return sys.exit(1) if settings.QUIET: core.set_logging_level() else: core.set_logging_level(env('LOG_LEVEL', 'INFO')) if all_tables: tables = core.get_tables(database=db) tnames = [t.table for t in tables] print(YELLOW) print(f" >>> --all-tables was specified. Converting {len(tables)} tables! The tables are: {', '.join(tnames)}") print(RESET) else: tables = [core.get_tables(database=db, table=t)[0] for t in tables] tnames = [t.table for t in tables] for t in tables: print(f"\n{YELLOW} [...] Converting table {t.table} to charset {charset} and collation {collation}{RESET}\n") core.convert_table(t.table, charset=charset, collation=collation) print(f"\n{GREEN} [+++] Successfully converted table {t.table}{RESET}\n") if conv_columns: print(f"\n{BLUE} >>> Converting COLUMNS to charset {charset} and collation {collation} for tables: {', '.join(tnames)}{RESET}\n") _convert_columns( tables, charset=charset, collation=collation, outer_tx=outer_tx, skip_indexed=skip_indexed ) print(f"\n{GREEN} [+++] Successfully converted COLUMNS inside of tables: {', '.join(tnames)}{RESET}\n") print(f"\n{GREEN} ++++++ Successfully converted {len(tables)} tables ++++++ {RESET}\n")
def __init__(self, table: str, connection_args: list = None, connection_kwargs: dict = None, **kwargs): self.query = "" self.connection_kwargs = empty_if(connection_kwargs, {}) self.connection_args = empty_if(connection_args, []) self.table = table self.select_cols = [] self.group_cols = [] self.where_clauses = [] self.where_clauses_values = [] self.order_cols = [] self.order_dir = '' self.limit_num = None self.limit_offset = None self._cursor = None self._is_executed = False setup_nest_async() # Load nest_asyncio if it wasn't already loaded
def convert_columns(opts): db = empty_if(opts.db, settings.DB_NAME, itr=True) table = empty_if(opts.table, None, itr=True) charset, collation = empty_if(opts.charset, 'utf8mb4', itr=True), empty_if(opts.collation, 'utf8mb4_unicode_ci', itr=True) columns = empty_if(opts.columns, [], itr=True) outer_tx = is_true(opts.outer_tx) skip_indexed = is_true(opts.skip_indexed) all_tables = is_true(opts.all_tables) all_cols = is_true(opts.all_columns) if not empty(db): core.reconnect(database=db) if empty(table) and not all_tables: parser.error(f"\n{RED}ERROR: You must specify a table to 'convert_columns' without -a / --all-tables{RESET}\n") return sys.exit(1) if all_tables and (empty(columns, itr=True) or not all_cols): parser.error(f"\n{RED}ERROR: You must either columns using '-c' or pass --all-columns / -k when using -a / --all-tables{RESET}\n") return sys.exit(1) if settings.QUIET: core.set_logging_level() else: core.set_logging_level(env('LOG_LEVEL', 'INFO')) if all_tables: tables = core.get_tables(db) _convert_columns( tables, all_cols, charset=charset, collation=collation, db=db, columns=columns, outer_tx=outer_tx, skip_indexed=skip_indexed ) return print(f"\n >>> Converting columns in table {table} to charset {charset} and collation {collation}\n") try: core.convert_columns( table, *columns, conv_all=all_cols, charset=charset, collation=collation, use_tx=outer_tx, skip_indexed=skip_indexed, database=db ) print(f"\n [+++] Finished converting {table}.\n") except Exception as e: log.exception("Error while converting columns in table %s - %s - %s", table, type(e), str(e)) return sys.exit(1)
def get_flat(ip: str, ua: str = None, dtype: str = None, geodata: Union[GeoIPResult, DictObject] = None) -> str: # h = request.headers # ip = get_ip() # ua = h.get('User-Agent', 'N/A') ua = empty_if(ua, 'N/A') dtype = empty_if(dtype, '') if dtype.lower() in ['', 'none', 'ip', 'address', 'addr', 'ipaddr', 'ipaddress', 'ip_address']: return str(ip) if dtype.lower() in ['ua', 'agent', 'useragent', 'user-agent', 'user_agent']: return str(ua) data = get_geodata(ip) if empty(geodata, itr=True) else geodata hostname = get_rdns(ip) ip_type = 'ipv4' if ip_is_v4(ip) else 'ipv6' if dtype.lower() in ['version', 'type', 'ipv', 'ipver', 'ipversion', 'ip_version', 'ip-version']: return str(ip_type) if dtype.lower() in ['dns', 'rdns', 'reverse', 'reversedns', 'host', 'hostname', 'arpa', 'rev']: return str(hostname) if dtype.lower() in ['country', 'region']: return str(data.country) if dtype.lower() in ['country_code', 'region_code', 'country-code', 'region-code', 'code']: return str(data.country_code) if dtype.lower() in ['city', 'area']: return str(data.city) if dtype.lower() in ['asfull', 'fullas', 'asnfull', 'fullasn', 'ispfull', 'fullisp', 'as_full', 'full_as', 'full_asn' 'isp_full', 'full_isp', 'asinfo', 'asninfo', 'as_info', 'asn_info', 'isp_info', 'ispinfo']: return f"{data.as_name}\nAS{data.as_number}" if dtype.lower() in ['as', 'asn', 'asnum', 'asnumber', 'as_number', 'isp_num', 'isp_number', 'isp_asn']: return str(data.as_number) if dtype.lower() in ['asname', 'ispname', 'isp', 'as_name', 'isp_name']: return str(data.as_name) if dtype.lower() in ['post', 'postal', 'postcode', 'post_code', 'zip', 'zipcode', 'zip_code']: return str(data.postcode) if dtype.lower() in ['loc', 'locate', 'location', 'countrycity', 'citycountry', 'country_city', 'city_country']: res = "" if not empty(data.city): res += f"{data.city!s}, " if not empty(data.postcode): res += f"{data.postcode!s}, " if not empty(data.country): res += f"{data.country!s}" return res.strip(', ') if dtype.lower() in ['all', 'full', 'info', 'information']: return f"IP: {ip}\nVersion: {ip_type}\nHostname: {hostname}\nUserAgent: {ua}\nCountry: {data.country}\n" \ f"CountryCode: {data.country_code}\nCity: {data.city}\nPostcode: {data.postcode}\nLat: {data.lat}\n" \ f"Long: {data.long}\nASNum: {data.as_number}\nASName: {data.as_name}\nNetwork: {data.network}\n" if dtype.lower() in ['lat', 'latitude']: return str(data.lat) if dtype.lower() in ['lon', 'long', 'longitude']: return str(data.long) if dtype.lower() in ['latlon', 'latlong', 'latitudelongitude', 'pos', 'position', 'cord', 'coord', 'coords', 'coordinate', 'coordinates', 'co-ordinates']: return f"{data.lat:.4f}, {data.long:.4f}" return str(ip)
def list_cols(opts): db = empty_if(opts.db, settings.DB_NAME, itr=True) table = empty_if(opts.table, None, itr=True) col_size = 30 print("\nColumn list for database:", db, "\n") print("\nTable:", table, "\n") columns = core.get_columns(db, table) headers = ['DB', 'Table', 'ColName', 'Default', 'Null', 'Type', 'Key', 'Extra', 'Char Set', 'Collation'] # tline = "+{}+{}+{}+".format('-' * 41, '-' * 41, '-' * 41) tline = spaceize(len(headers), col_size + 1) print(tline) print(columnize(*headers, size=col_size)) print(tline) for t in columns: print(columnize( t.schema, t.table, t.column, t.default, t.nullable, t.column_type, t.column_key, t.extra, t.character_set, t.collation, size=col_size )) print(tline)
async def test_condenser_history( self, host=None, *args, **kwargs) -> Tuple[Union[list, dict], float, int]: """Test a node for functioning condenser_api account history""" host = empty_if(host, self.host) mtd = 'condenser_api.get_account_history' params = [self.test_acc, -100, 100] res, tt, tr = await rpc(host=host, method=mtd, params=params) self._check_hist(res) return res, tt, tr
def test_emptyif_only_value(self): self.assertIsNone(helpers.empty_if("")) self.assertIsNone(helpers.empty_if(None)) self.assertIsNone(helpers.empty_if(0, zero=True)) self.assertEqual(helpers.empty_if("hello"), "hello") self.assertEqual(helpers.empty_if(1234), 1234) self.assertListEqual(helpers.empty_if([1, 2, 3]), [1, 2, 3])
def test_emptyif_only_empty(self): self.assertEqual(helpers.empty_if("", "empty"), "empty") self.assertEqual(helpers.empty_if(None, "empty"), "empty") self.assertEqual(helpers.empty_if(0, "empty", zero=True), "empty") self.assertEqual(helpers.empty_if("hello", "empty"), "hello") self.assertEqual(helpers.empty_if(1234, "empty"), 1234) self.assertListEqual(helpers.empty_if([1, 2, 3], "empty"), [1, 2, 3])
def _convert_columns(tables: List[core.TableResult], all_cols=True, charset="utf8mb4", collation="utf8mb4_unicode_ci", **kwargs): db = empty_if(kwargs.get('db'), settings.DB_NAME, itr=True) columns = empty_if(kwargs.get('columns'), [], itr=True) outer_tx = is_true(kwargs.get('outer_tx', True)) skip_indexed = is_true(kwargs.get('skip_indexed', True)) # all_cols = is_true(opts.all_columns) tnames = [t.table for t in tables] print(f"{YELLOW} >>> Converting columns in {len(tables)} tables. Tables are: {', '.join(tnames)}{RESET}\n") for t in tables: print(f"{CYAN} [-] Converting columns in table {t.table} to charset {charset} and collation {collation}{RESET}") try: core.convert_columns( t.table, *columns, conv_all=all_cols, charset=charset, collation=collation, use_tx=outer_tx, skip_indexed=skip_indexed, database=db ) print(f"{GREEN} [+] Finished converting columns in table {t.table}{RESET}\n") except Exception as e: log.exception("Error while converting columns in table %s - %s - %s", t.table, type(e), str(e)) return sys.exit(1) print(f"\n{GREEN} [+++] Finished converting {len(tables)} tables. Tables were: {', '.join(tnames)}{RESET}\n")
async def base_get_accounts(steem: SteemAsync, network: str = None): network = empty_if(network, steem.network) accounts = await steem.get_accounts(*TEST_ACCOUNT_LST) assert len(accounts.keys()) > 0 for n, a in accounts.items(): assert isinstance(a, Account) assert n == a.name curr_steem, curr_sbd = 'HIVE', 'HBD' if network.upper() == 'STEEM': curr_steem, curr_sbd = 'STEEM', 'SBD' assert curr_steem in a.balances assert curr_sbd in a.balances assert a.balances[curr_steem].amount > 0
def load_node_info(host: Optional[str] = None) -> NodeInfo: """ :param host: :raises ConnectionRefusedError: :raises requests.exceptions.ConnectionError: :return: """ host = empty_if(host, settings.IOTA_HOST) err(f"\n >>> {Fore.GREEN}Connecting to IOTA HTTP API...{Fore.RESET} {host}\n" ) api = get_iota(host=host) err(f" >>> {Fore.GREEN}Calling get_node_info / Processing node response...{Fore.RESET}\n" ) return api.get_node_info()
def _render_neighbor_row(row: NeighborTableRow = None, columns: list = None) -> str: columns = empty_if(columns, NEIGHBOR_COLS) r = '' for c in columns: col = table_columns[c] data, padding = NEIGHBOR_TITLE_COLOUR + col.title + Fore.RESET, col.title_padding if row: data, padding = NEIGHBOR_ROW_COLOUR + getattr( row, c) + Fore.RESET, col.content_padding r += ("{:<" + str(padding) + "}").format(str(data)) return r
def _safe_dict(d: Union[T, Dict[str, Any], list], safe_types: tuple = None, **kwargs) -> Union[T, list, dict, DictObject]: safe_types = tuple(empty_if(safe_types, (str, int, float), itr=True, zero=True)) iter_types = tuple(empty_if(kwargs.pop('iter_types', None), (dict, list, tuple, set), itr=True, zero=True)) unsafe_cast = kwargs.get('unsafe_cast', _try_dict) def _call_self(xdx): return _safe_dict(xdx, safe_types=safe_types, **kwargs) def _so(xdx): return _safe_obj(xdx, safe_types=safe_types, iter_types=iter_types, unsafe_cast=unsafe_cast, **kwargs) if isinstance(d, dict): log.debug(f"(_safe_dict) Object {d!r} appears to be a dict(-like) object - iterating over it, and recursively calling" f" _safe_obj.") safex = {k: _so(v) for k, v in d.items()} for k, v in safex.items(): if isinstance(v, dict) and not isinstance(d[k], dict): log.debug(f"(_safe_dict) Found new dict - converting it's contents to be safe by calling self (_safe_dict). " f"This new dict is (key: {k!r}): {v!r}") safex[k] = _call_self(v) # return DictObject({**safex, **unsafex, **objsafe}) return DictObject(safex) if isinstance(d, iter_types): itr_cast = type(d) log.debug(f"(_safe_dict) Object {d!r} appears to be a iterable object - iterating over it, and recursively calling" f" _safe_obj. iter_types: {iter_types!r}") return itr_cast([_so(v) for v in d]) log.debug(f"(_safe_dict) Object {d!r} doesn't match known types. Passing it to _safe_obj then calling self (_safe_dict)" f"with it's result") res_so = _so(d) log.debug(f"Result from _safe_obj: {res_so} (original object: {d!r})") res_self = _call_self(_so(d)) log.debug(f"Result from _safe_dict(_safe_obj(d)): {res_so} (original object: {d!r})") return res_self
def _render_node_row(self, row: Optional[NodeTableRow] = None, columns: list = None) -> str: columns = empty_if(columns, self.table_columns.keys(), itr=True) r = '' for c in columns: col_obj = self.table_columns[c] data, padding = col_obj.title, col_obj.title_padding if row: data, padding = getattr(row, c), col_obj.content_padding r += ("{:<" + str(padding) + "}").format(str(data)) return r
async def test_get_blog(self, host=None, *args, **kwargs) -> Tuple[Union[list, dict], float, int]: """Test a node for functioning full node get_blog""" host = empty_if(host, self.host) mtd = 'condenser_api.get_blog' params = [self.test_acc, -1, 10] res, tt, tr = await rpc(host=host, method=mtd, params=params) log.debug( f'get_blog check if result from {host} has blog, entry_id, comment, and comment.body' ) self._check_blog(res) return res, tt, tr
def insert(self, url, network=None, enabled=1, fail_count=0, last_fail=None, **kwargs): """ Insert a node into the database using the arguments specified to this function. :param str url: The URL for the RPC node :param str network: Defaults to :attr:`.DEFAULT_NETWORK` if not specified :param int enabled: Either 1 (enabled) or 0 (disabled) :param int fail_count: The amount of times this node has responded with a non-200 status. :param str last_fail: The date/time when this node last failed :param kwargs: Any additional columns to insert. """ data = dict( url=url, network=empty_if(network, self.network), enabled=convert_bool_int(enabled), fail_count=int(fail_count), last_fail=convert_datetime(last_fail), updated_at=datetime.utcnow() ) return self.adapter.insert('nodes', **data, **kwargs)
def list_tables(opts): db = empty_if(opts.db, settings.DB_NAME, itr=True) print("\nTable list for database:", db, "\n") tables = core.get_tables(db) headers = ['Name', 'Char Set', 'Collation'] # tline = "+{}+{}+{}+".format('-' * 41, '-' * 41, '-' * 41) tline = spaceize(3, size=41) print(tline) # print("| {:<40}| {:<40}| {:<40}|".format(*headers)) print(columnize(*headers)) print(tline) for t in tables: # print("| {:<40}| {:<40}| {:<40}|".format(t.table, t.character_set, t.collation)) print(columnize(t.table, t.character_set, t.collation)) print(tline)
async def test_get_content( self, host=None, *args, **kwargs) -> Tuple[Union[list, dict], float, int]: """Test a node for functioning full node get_content""" host = empty_if(host, self.host) mtd = 'condenser_api.get_content' params = [self.test_acc, self.test_post] res, tt, tr = await rpc(host=host, method=mtd, params=params) log.debug( f'get_content check if result from {host} has title, author and body' ) self._check_blog_item(res) return res, tt, tr
def test_emptyif_with_is_not_empty(self): self.assertEqual(helpers.empty_if("", "empty", "not empty"), "empty") self.assertEqual(helpers.empty_if(None, "empty", "not empty"), "empty") self.assertEqual(helpers.empty_if(0, "empty", "not empty", zero=True), "empty") self.assertEqual(helpers.empty_if("hello", "empty", "not empty"), "not empty") self.assertEqual(helpers.empty_if(1234, "empty", "not empty"), "not empty") self.assertEqual(helpers.empty_if([1, 2, 3], "empty", "not empty"), "not empty")