def _get_opening_row_items(self): directive = ".. csv-table:: " if typepy.is_null_string(self.table_name): return [directive] return [directive + MultiByteStrDecoder(self.table_name).unicode_str]
def sanitize(self, replacement_text=""): sanitized_var_name = self._invalid_var_name_re.sub(replacement_text, self._str) # delete invalid char(s) in the beginning of the variable name is_require_remove_head = any( [ is_null_string(replacement_text), self._invalid_var_name_head_re.search(replacement_text) is not None, ] ) if is_require_remove_head: sanitized_var_name = self._invalid_var_name_head_re.sub("", sanitized_var_name) else: match = self._invalid_var_name_head_re.search(sanitized_var_name) if match is not None: sanitized_var_name = match.end() * replacement_text + self._invalid_var_name_head_re.sub( "", sanitized_var_name ) try: self._validate(sanitized_var_name) except ReservedNameError as e: if e.reusable_name is False: sanitized_var_name += "_" except NullNameError: pass return sanitized_var_name
def make_update(cls, table, set_query, where=None): """ Make UPDATE query. :param str table: Table name of executing the query. :param str set_query: SET part of the UPDATE query. :param str where: Add a WHERE clause to execute query, if the value is not |None|. :return: Query of SQLite. :rtype: str :raises ValueError: If ``set_query`` is empty string. :raises simplesqlite.InvalidTableNameError: |raises_validate_table_name| """ validate_table_name(table) if typepy.is_null_string(set_query): raise ValueError("SET query is null") query_list = [ "UPDATE {:s}".format(cls.to_table_str(table)), "SET {:s}".format(set_query), ] if typepy.is_not_null_string(where): query_list.append("WHERE {:s}".format(where)) return " ".join(query_list)
def to_attr_str_list(cls, name_list, operation_query=""): """ :param list/tuple name_list: List of attribute names. :param str operation_query: Used as a SQLite function if the value is not empty. :return: List of strings that suitable for attribute names of a SQLite query. :rtype: list/itertools.imap :Examples: >>> from simplesqlite.sqlquery import SqlQuery >>> list(SqlQuery.to_attr_str_list(["key", "a+b"])) ['key', '[a+b]'] >>> SqlQuery.to_attr_str_list(["key", "a+b"], operation_query="AVG") ['AVG(key)', 'AVG([a+b])'] .. seealso:: :py:meth:`.to_attr_str` """ if typepy.is_null_string(operation_query): return list(map(cls.to_attr_str, name_list)) return [ "{:s}({:s})".format(operation_query, cls.to_attr_str(name)) for name in name_list ]
def _preprocess_header(self, col_idx, header): if typepy.is_null_string(header): return self.__get_default_header(col_idx) if dataproperty.is_multibyte_str(header): return header return Attr.sanitize(header)
def parse(self, ping_message): icmp_replies = self._parse_icmp_reply(ping_message) stats_headline, packet_info_line, body_line_list = self._preprocess_parse_stats( lines=ping_message ) packet_pattern = ( pp.Word(pp.nums) + pp.Literal("packets transmitted,") + pp.Word(pp.nums) + pp.Literal("packets received,") ) destination = self._parse_destination(stats_headline) duplicates = self._parse_duplicate(packet_info_line) parse_list = packet_pattern.parseString(_to_unicode(packet_info_line)) packet_transmit = int(parse_list[0]) packet_receive = int(parse_list[2]) is_valid_data = True try: rtt_line = body_line_list[1] except IndexError: is_valid_data = False if not is_valid_data or typepy.is_null_string(rtt_line): return PingStats( destination=destination, packet_transmit=packet_transmit, packet_receive=packet_receive, duplicates=duplicates, icmp_replies=icmp_replies, ) rtt_pattern = ( pp.Literal("round-trip min/avg/max/stddev =") + pp.Word(pp.nums + ".") + "/" + pp.Word(pp.nums + ".") + "/" + pp.Word(pp.nums + ".") + "/" + pp.Word(pp.nums + ".") + pp.Word(pp.nums + "ms") ) parse_list = rtt_pattern.parseString(_to_unicode(rtt_line)) return PingStats( destination=destination, packet_transmit=packet_transmit, packet_receive=packet_receive, duplicates=duplicates, rtt_min=float(parse_list[1]), rtt_avg=float(parse_list[3]), rtt_max=float(parse_list[5]), rtt_mdev=float(parse_list[7]), icmp_replies=icmp_replies, )
def __validate_interface(self): if not self.__is_ipv6(): return if not ipaddress.ip_network(six.text_type(self.destination_host)).is_link_local: return if typepy.is_null_string(self.interface): raise ValueError("interface required to ping to IPv6 link local address")
def __write_chapter(self): if typepy.is_null_string(self.table_name): return self._write_line( "{:s} {:s}".format( "#" * (self._indent_level + 1), MultiByteStrDecoder(self.table_name).unicode_str ) )
def parse(self, ping_message): icmp_replies = self._parse_icmp_reply(ping_message) stats_headline, packet_info_line, body_line_list = self._preprocess_parse_stats( lines=ping_message ) packet_pattern = ( pp.Literal("Packets: Sent = ") + pp.Word(pp.nums) + pp.Literal(", Received = ") + pp.Word(pp.nums) ) destination = self._parse_destination(stats_headline) duplicates = self._parse_duplicate(packet_info_line) parse_list = packet_pattern.parseString(_to_unicode(packet_info_line)) packet_transmit = int(parse_list[1]) packet_receive = int(parse_list[3]) is_valid_data = True try: rtt_line = body_line_list[2].strip() except IndexError: is_valid_data = False if not is_valid_data or typepy.is_null_string(rtt_line): return PingStats( destination=destination, packet_transmit=packet_transmit, packet_receive=packet_receive, duplicates=duplicates, icmp_replies=icmp_replies, ) rtt_pattern = ( pp.Literal("Minimum = ") + pp.Word(pp.nums) + pp.Literal("ms, Maximum = ") + pp.Word(pp.nums) + pp.Literal("ms, Average = ") + pp.Word(pp.nums) ) parse_list = rtt_pattern.parseString(_to_unicode(rtt_line)) return PingStats( destination=destination, packet_transmit=packet_transmit, packet_receive=packet_receive, duplicates=duplicates, rtt_min=float(parse_list[1]), rtt_avg=float(parse_list[5]), rtt_max=float(parse_list[3]), icmp_replies=icmp_replies, )
def execute_query(self, query, caller=None): """ Send arbitrary SQLite query to the database. :param str query: Query to executed. :param tuple caller: Caller information. Expects the return value of :py:meth:`logging.Logger.findCaller`. :return: The result of the query execution. :rtype: sqlite3.Cursor :raises simplesqlite.NullDatabaseConnectionError: |raises_check_connection| :raises simplesqlite.OperationalError: |raises_operational_error| .. warning:: This method can execute an arbitrary query. i.e. No access permissions check by |attr_mode|. """ import time self.check_connection() if typepy.is_null_string(query): return None if self.__is_profile: exec_start_time = time.time() try: result = self.connection.execute(query) except sqlite3.OperationalError as e: if caller is None: caller = logging.getLogger().findCaller() file_path, line_no, func_name = caller[:3] message_list = [ "failed to execute query at %{:s}({:d}) {:s}".format( file_path, line_no, func_name), " - query: {}".format(query), " - msg: {}".format(str(e)), " - db: {}".format(self.database_path), ] raise OperationalError(os.linesep.join(message_list)) if self.__is_profile: self.__dict_query_count[query] = ( self.__dict_query_count.get(query, 0) + 1) elapse_time = time.time() - exec_start_time self.__dict_query_totalexectime[query] = ( self.__dict_query_totalexectime.get(query, 0) + elapse_time) return result
def __validate_db_path(database_path): if typepy.is_null_string(database_path): raise ValueError("null path") if database_path == MEMORY_DB_NAME: return try: pathvalidate.validate_filename(os.path.basename(database_path)) except AttributeError: raise TypeError( "database path must be a string: actual={}".format( type(database_path)))
def parse(self, ping_message): """ Parse ping command output. Args: ping_message (str or :py:class:`~pingparsing.PingResult`): ``ping`` command output. Returns: :py:class:`~pingparsing.PingStats`: Parsed result. """ try: # accept PingResult instance as an input if typepy.is_not_null_string(ping_message.stdout): ping_message = ping_message.stdout except AttributeError: pass logger.debug("parsing ping result: {}".format(ping_message)) self.__parser = NullPingParser() if typepy.is_null_string(ping_message): logger.debug("ping_message is empty") self.__stats = PingStats() return self.__stats ping_lines = _to_unicode(ping_message).splitlines() parser_class_list = ( LinuxPingParser, WindowsPingParser, MacOsPingParser, AlpineLinuxPingParser, ) for parser_class in parser_class_list: self.__parser = parser_class() try: self.__stats = self.__parser.parse(ping_lines) return self.__stats except ParseError as e: if e.reason != ParseErrorReason.HEADER_NOT_FOUND: raise e except pp.ParseException: pass self.__parser = NullPingParser() return self.__stats
def url(ctx, url, format_name, encoding, proxy): """ Scrape tabular data from a URL and convert data to a SQLite database file. """ if typepy.is_empty_sequence(url): sys.exit(ExitCode.NO_INPUT) initialize_log_handler(ctx.obj[Context.LOG_LEVEL]) logger = make_logger("{:s} url".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) try: app_configs = app_config_mgr.load() except ValueError as e: logger.debug(msgfy.to_debug_message(e)) app_configs = {} if typepy.is_empty_sequence(encoding): encoding = app_configs.get(ConfigKey.DEFAULT_ENCODING) logger.debug("use default encoding: {}".format(encoding)) if typepy.is_null_string(proxy): proxy = app_configs.get(ConfigKey.PROXY_SERVER) convert_configs = load_convert_config(logger, ctx.obj[Context.CONVERT_CONFIG], subcommand="url") con, is_create_db = create_database(ctx.obj[Context.OUTPUT_PATH], ctx.obj[Context.DUP_DATABASE]) converter = UrlConverter( logger=logger, con=con, symbol_replace_value=ctx.obj[Context.SYMBOL_REPLACE_VALUE], add_pri_key_name=ctx.obj[Context.ADD_PRIMARY_KEY_NAME], convert_configs=convert_configs, index_list=ctx.obj.get(Context.INDEX_LIST), is_type_inference=ctx.obj[Context.TYPE_INFERENCE], is_type_hint_header=ctx.obj[Context.TYPE_HINT_HEADER], verbosity_level=ctx.obj.get(Context.VERBOSITY_LEVEL), format_name=format_name, encoding=encoding, proxy=proxy, ) converter.convert(url) sys.exit(finalize(con, converter, is_create_db))
def __init__(self, tabledata, dup_col_handler="error", is_type_inference=True): super(SQLiteTableDataSanitizer, self).__init__(tabledata) if typepy.is_null_string(tabledata.table_name): raise NameValidationError("table_name is empty") self.__upper_headers = [] for header in self._tabledata.headers: if not header: continue try: header = header.upper() except AttributeError: header = six.text_type(header).upper() self.__upper_headers.append(header) self.__dup_col_handler = dup_col_handler self.__is_type_inference = is_type_inference
def make_select(cls, select, table, where=None, extra=None): """ Make SELECT query. :param str select: Attribute for SELECT query. :param str table: Table name of executing the query. :param str where: Add a WHERE clause to execute query, if the value is not |None|. :param extra extra: Add additional clause to execute query, if the value is not |None|. :return: Query of SQLite. :rtype: str :raises ValueError: ``select`` is empty string. :raises simplesqlite.InvalidTableNameError: |raises_validate_table_name| :Examples: >>> from simplesqlite.sqlquery import SqlQuery >>> SqlQuery.make_select(select="value", table="example") 'SELECT value FROM example' >>> SqlQuery.make_select(select="value", table="example", where=SqlQuery.make_where("key", 1)) 'SELECT value FROM example WHERE key = 1' >>> SqlQuery.make_select(select="value", table="example", where=SqlQuery.make_where("key", 1), extra="ORDER BY value") 'SELECT value FROM example WHERE key = 1 ORDER BY value' """ validate_table_name(table) if typepy.is_null_string(select): raise ValueError("SELECT query is null") query_list = [ "SELECT {:s}".format(select), "FROM {:s}".format(cls.to_table_str(table)), ] if typepy.is_not_null_string(where): query_list.append("WHERE {:s}".format(where)) if typepy.is_not_null_string(extra): query_list.append(extra) return " ".join(query_list)
def has_attr(self, table_name, attr_name): """ :param str table_name: Table name that the attribute exists. :param str attr_name: Attribute name to be tested. :return: |True| if the table has the attribute. :rtype: bool :raises simplesqlite.TableNotFoundError: |raises_verify_table_existence| :Sample Code: .. code:: python import simplesqlite table_name = "sample_table" con = simplesqlite.SimpleSQLite("sample.sqlite", "w") con.create_table_from_data_matrix( table_name=table_name, attr_name_list=["attr_a", "attr_b"], data_matrix=[[1, "a"], [2, "b"]]) print(con.has_attr(table_name, "attr_a")) print(con.has_attr(table_name, "not_existing")) try: print(con.has_attr("not_existing", "attr_a")) except simplesqlite.TableNotFoundError as e: print(e) :Output: .. parsed-literal:: True False 'not_existing' table not found in /tmp/sample.sqlite """ self.verify_table_existence(table_name) if typepy.is_null_string(attr_name): return False return attr_name in self.get_attr_name_list(table_name)
def validate_access_permission(self, valid_permission_list): """ :param valid_permission_list: List of permissions that access is allowed. :type valid_permission_list: |list|/|tuple| :raises ValueError: If the |attr_mode| is invalid. :raises IOError: If the |attr_mode| not in the ``valid_permission_list``. :raises simplesqlite.NullDatabaseConnectionError: |raises_check_connection| """ self.check_connection() if typepy.is_null_string(self.mode): raise ValueError("mode is not set") if self.mode not in valid_permission_list: raise IOError( "invalid access: expected-mode='{}', current-mode='{}'".format( "' or '".join(valid_permission_list), self.mode))
def __setup_ifb(self): if self.direction != TrafficDirection.INCOMING: return 0 if typepy.is_null_string(self.ifb_device): return -1 return_code = 0 return_code |= spr.SubprocessRunner("modprobe ifb").run() return_code |= run_command_helper( "ip link add {:s} type ifb".format(self.ifb_device), self.REGEXP_FILE_EXISTS, self.EXISTS_MSG_TEMPLATE.format( "failed to add ip link: ip link already exists.")) return_code |= spr.SubprocessRunner("ip link set dev {:s} up".format( self.ifb_device)).run() base_command = "tc qdisc add" return_code |= run_command_helper( "{:s} dev {:s} ingress".format(base_command, self.__device), self.REGEXP_FILE_EXISTS, self.EXISTS_MSG_TEMPLATE.format( "failed to '{:s}': ingress qdisc already exists.".format( base_command))) return_code |= spr.SubprocessRunner(" ".join([ "tc filter add", "dev {:s}".format(self.__device), "parent ffff: protocol {:s} u32 match u32 0 0".format( self.protocol), "flowid {:x}:".format(self.__get_device_qdisc_major_id()), "action mirred egress redirect", "dev {:s}".format(self.ifb_device), ])).run() return return_code
def test_dst_net_exclude_dst_network(self, device_option, dst_host_option, transmitter, pingparser): if device_option is None: pytest.skip("device option is null") if typepy.is_null_string(dst_host_option): pytest.skip("destination host is null") delay = 100 for tc_target in [device_option]: execute_tcdel(tc_target) transmitter.destination = dst_host_option # w/ latency tc --- runner_helper([ Tc.Command.TCSET, tc_target, "--delay", "{:d}ms".format(delay) ]) ping_result = transmitter.ping() assert ping_result.returncode == 0 with_tc_rtt_avg = pingparser.parse(ping_result).rtt_avg # exclude certain network --- runner_helper(" ".join([ Tc.Command.TCSET, tc_target, "--exclude-dst-network {:s}/24".format( ".".join(dst_host_option.split(".")[:3] + ["0"])), "--delay {:d}ms".format(delay), "--overwrite", ])) without_tc_rtt_avg = pingparser.parse(transmitter.ping()).rtt_avg # assertion --- rtt_diff = with_tc_rtt_avg - without_tc_rtt_avg assert rtt_diff > (delay * ASSERT_MARGIN) # finalize --- execute_tcdel(tc_target)
def __set_pre_network_filter(self): if self._is_use_iptables(): return 0 if all([ typepy.is_null_string(self._tc_obj.network), not typepy.type.Integer(self._tc_obj.port).is_type(), ]): flowid = "{:s}:{:d}".format(self._tc_obj.qdisc_major_id_str, self.get_qdisc_minor_id()) else: flowid = "{:s}:2".format(self._tc_obj.qdisc_major_id_str) return SubprocessRunner(" ".join([ "tc filter add", self.dev, "protocol ip", "parent {:s}:".format(self._tc_obj.qdisc_major_id_str), "prio 2 u32 match ip {:s} {:s}".format( self._get_network_direction_str(), ANYWHERE_NETWORK), "flowid {:s}".format(flowid), ])).run()
def test_dst_net_latency_distro(self, device_option, dst_host_option, transmitter, pingparser, delay, delay_distro): if typepy.is_null_string(dst_host_option): pytest.skip("destination host is null") execute_tcdel(device_option) transmitter.destination_host = dst_host_option # w/o latency tc --- ping_result = transmitter.ping() assert ping_result.returncode == 0 pingparser.parse(ping_result.stdout) without_tc_rtt_avg = pingparser.rtt_avg without_tc_rtt_mdev = pingparser.rtt_mdev # w/ latency tc --- assert SubprocessRunner([ Tc.Command.TCSET, "--device {:s}".format(device_option), "--delay {:d}ms".format(delay), "--delay-distro {:d}ms".format(delay_distro), ]).run() == 0 ping_result = transmitter.ping() assert ping_result.returncode == 0 pingparser.parse(ping_result.stdout) with_tc_rtt_avg = pingparser.rtt_avg with_tc_rtt_mdev = pingparser.rtt_mdev # assertion --- rtt_diff = with_tc_rtt_avg - without_tc_rtt_avg assert rtt_diff > (delay / 2.0) rtt_diff = with_tc_rtt_mdev - without_tc_rtt_mdev assert rtt_diff > (delay_distro / 2.0) # finalize --- execute_tcdel(device_option)
def add_filter(self): command_item_list = [ "tc filter add", self.dev, "protocol {:s}".format(self._tc_obj.protocol), "parent {:s}:".format(self._tc_obj.qdisc_major_id_str), "prio 1", ] if self._is_use_iptables(): command_item_list.append("handle {:d} fw".format( self._get_unique_mangle_mark_id())) else: if typepy.is_null_string(self._tc_obj.network): network = get_anywhere_network(self._tc_obj.ip_version) else: network = self._tc_obj.network command_item_list.extend([ "u32", "match {:s} {:s} {:s}".format( self._tc_obj.protocol_match, self._get_network_direction_str(), network), ]) if self._tc_obj.src_port: command_item_list.append("match {:s} sport {:d} 0xffff".format( self._tc_obj.protocol_match, self._tc_obj.src_port)) if self._tc_obj.dst_port: command_item_list.append("match {:s} dport {:d} 0xffff".format( self._tc_obj.protocol_match, self._tc_obj.dst_port)) command_item_list.append("flowid {:s}:{:d}".format( self._tc_obj.qdisc_major_id_str, self.get_qdisc_minor_id())) return subprocrunner.SubprocessRunner( " ".join(command_item_list)).run()
def to_table_data(self): if typepy.is_empty_sequence(self._loader.header_list): header_list = self._source_data[0] if any([typepy.is_null_string(header) for header in header_list]): raise DataError( "the first line includes empty string item." "all of the items should contain header name." "actual={}".format(header_list)) data_matrix = self._source_data[1:] else: header_list = self._loader.header_list data_matrix = self._source_data if not data_matrix: raise DataError("data row must be greater or equal than one") self._loader.inc_table_count() yield TableData( self._loader.make_table_name(), header_list, data_matrix, dp_extractor=self._loader.dp_extractor)
def __set_pre_network_filter(self): if self._is_use_iptables(): return 0 if all([ typepy.is_null_string(self._tc_obj.dst_network), not typepy.type.Integer(self._tc_obj.dst_port).is_type(), ]): flowid = "{:s}:{:d}".format(self._tc_obj.qdisc_major_id_str, self._get_qdisc_minor_id()) else: flowid = "{:s}:2".format(self._tc_obj.qdisc_major_id_str) return SubprocessRunner(" ".join([ self._tc_obj.get_tc_command(Tc.Subcommand.FILTER), self._dev, "protocol {:s}".format(self._tc_obj.protocol), "parent {:s}:".format(self._tc_obj.qdisc_major_id_str), "prio 2 u32 match {:s} {:s} {:s}".format( self._tc_obj.protocol, self._get_network_direction_str(), get_anywhere_network(self._tc_obj.ip_version)), "flowid {:s}".format(flowid), ])).run()
def validate_bandwidth_rate(self): if typepy.is_null_string(self.__bandwidth_rate): return # convert bandwidth string [K/M/G bit per second] to a number bandwidth_rate = Humanreadable(self.__bandwidth_rate, kilo_size=KILO_SIZE).to_kilo_bit() if not RealNumber(bandwidth_rate).is_type(): raise InvalidParameterError( "bandwidth_rate must be number: actual={}".format( bandwidth_rate)) if bandwidth_rate <= 0: raise InvalidParameterError( "bandwidth_rate must be greater than zero: actual={}".format( bandwidth_rate)) no_limit_kbits = get_no_limit_kbits(self.get_tc_device()) if bandwidth_rate > no_limit_kbits: raise InvalidParameterError( "bandwidth_rate must be less than {}: actual={}".format( no_limit_kbits, bandwidth_rate))
def __add_mangle_mark(self, mark_id): dst_network = None src_network = None if self._tc_obj.direction == TrafficDirection.OUTGOING: dst_network = self._tc_obj.dst_network if typepy.is_null_string(self._tc_obj.src_network): chain = "OUTPUT" else: src_network = self._tc_obj.src_network chain = "PREROUTING" elif self._tc_obj.direction == TrafficDirection.INCOMING: src_network = self._tc_obj.dst_network chain = "INPUT" self._tc_obj.iptables_ctrl.add( IptablesMangleMarkEntry( ip_version=self._tc_obj.ip_version, mark_id=mark_id, source=src_network, destination=dst_network, chain=chain, ))
def validate_bandwidth_rate(self): if typepy.is_null_string(self.__bandwidth_rate): return # convert bandwidth string [K/M/G bit per second] to a number bandwidth_rate = Humanreadable(self.__bandwidth_rate, kilo_size=KILO_SIZE).to_kilo_bit() if not RealNumber(bandwidth_rate).is_type(): raise InvalidParameterError("bandwidth_rate must be a number", value=bandwidth_rate) if bandwidth_rate <= 0: raise InvalidParameterError( "bandwidth_rate must be greater than zero", value=bandwidth_rate) no_limit_kbits = get_no_limit_kbits(self.get_tc_device()) if bandwidth_rate > no_limit_kbits: raise InvalidParameterError( "exceed bandwidth rate limit", value="{} kbps".format(bandwidth_rate), expected="less than {} kbps".format(no_limit_kbits))
def _fetch_source(self, loader_class): loader_source_type = loader_class("").source_type if loader_source_type not in [SourceType.TEXT, SourceType.FILE]: raise ValueError("unknown loader source: type={}".format(loader_source_type)) r = requests.get(self.__url, proxies=self.__proxies) try: r.raise_for_status() except requests.HTTPError as e: raise HTTPError(e) if typepy.is_null_string(self._encoding): self._encoding = r.encoding logger.debug( "\n".join( [ "_fetch_source: ", " source-type={}".format(loader_source_type), " content-type={}".format(r.headers["Content-Type"]), " encoding={}".format(self._encoding), " status-code={}".format(r.status_code), ] ) ) if loader_source_type == SourceType.TEXT: self._source = r.text elif loader_source_type == SourceType.FILE: self.__temp_dir_path = tempfile.mkdtemp() self._source = "{:s}.xlsx".format( make_temp_file_path_from_url(self.__temp_dir_path, self.__url) ) with open(self._source, "wb") as f: f.write(r.content)
def check_connection(self): """ :raises simplesqlite.NullDatabaseConnectionError: |raises_check_connection| :Examples: .. code:: python import simplesqlite con = simplesqlite.SimpleSQLite("sample.sqlite", "w") print("---- connected to a database ----") con.check_connection() print("---- disconnected from a database ----") con.close() try: con.check_connection() except simplesqlite.NullDatabaseConnectionError as e: print(e) .. parsed-literal:: ---- connected to a database ---- ---- disconnected from a database ---- null database connection """ if self.connection is None: raise NullDatabaseConnectionError( "null database connection") if typepy.is_null_string(self.database_path): raise NullDatabaseConnectionError( "null database file path")
def check_connection(self): """ :raises simplesqlite.NullDatabaseConnectionError: |raises_check_connection| :Sample Code: .. code:: python import simplesqlite con = simplesqlite.SimpleSQLite("sample.sqlite", "w") print("---- connected to a database ----") con.check_connection() print("---- disconnected from a database ----") con.close() try: con.check_connection() except simplesqlite.NullDatabaseConnectionError as e: print(e) :Output: .. code-block:: none ---- connected to a database ---- ---- disconnected from a database ---- null database connection """ if self.connection is None: raise NullDatabaseConnectionError( "null database connection") if typepy.is_null_string(self.database_path): raise NullDatabaseConnectionError( "null database file path")
def test_dst_net_uniform_latency(self, device_option, dst_host_option, transmitter, pingparser, shaping_algo, delay): if device_option is None: pytest.skip("device option is null") if typepy.is_null_string(dst_host_option): pytest.skip("destination host is null") execute_tcdel(device_option) transmitter.destination_host = dst_host_option # w/o latency tc --- ping_result = transmitter.ping() assert ping_result.returncode == 0 pingparser.parse(ping_result) without_tc_rtt_avg = pingparser.rtt_avg # w/ latency tc --- assert SubprocessRunner([ Tc.Command.TCSET, "--device {:s}".format(device_option), "--delay {}ms".format(delay), "--shaping-algo {:s}".format(shaping_algo), ]).run() == 0 ping_result = transmitter.ping() assert ping_result.returncode == 0 pingparser.parse(ping_result) with_tc_rtt_avg = pingparser.rtt_avg # assertion --- rtt_diff = with_tc_rtt_avg - without_tc_rtt_avg assert rtt_diff > (delay * ASSERT_MARGIN) # finalize --- execute_tcdel(device_option)
def sanitize_network(network): """ :return: Network string :rtype: str :raises ValueError: if the network string is invalid. """ import ipaddress if typepy.is_null_string(network): return "" if network.lower() == "anywhere": return ANYWHERE_NETWORK try: ipaddress.IPv4Address(six.text_type(network)) return network + "/32" except ipaddress.AddressValueError: pass ipaddress.IPv4Network(six.text_type(network)) # validate network str return network
def _verify_table_name(self): if all([self._is_require_table_name, typepy.is_null_string(self.table_name)]): raise EmptyTableNameError( "table_name must be a string, with at least one or more character." )
def __del__(self): if typepy.is_null_string(self.__temp_dir_path): return os.removedirs(self.__temp_dir_path) self.__temp_dir_path = None
def _validate_title(self): if typepy.is_null_string(self.title): raise ValueError("spreadsheet title is empty")
def _validate_table_name(self): try: if typepy.is_null_string(self.table_name): raise ValueError("table name is empty") except (TypeError, AttributeError): raise TypeError("table_name must be a string")
def _sanitize_table_name(table_name): if typepy.is_null_string(table_name): raise InvalidTableNameError( "table name is empty after the template replacement") return table_name.strip("_")
def _get_table_directive(self): if typepy.is_null_string(self.table_name): return ".. table:: \n" return ".. table:: {}\n".format( MultiByteStrDecoder(self.table_name).unicode_str)
def is_invalid_param(rate, delay, loss, corrupt): params = [rate, delay, loss, corrupt] return all([typepy.is_null_string(param) for param in params])
def is_valid_combination(row): if all([typepy.is_null_string(param) for param in row]): return False return True
def _verify_property(self) -> None: super()._verify_property() if typepy.is_null_string(self.table_name): raise EmptyTableNameError( "table_name must be a string of one or more characters")
def create_from_file_extension(cls, file_extension): """ Create a table writer class instance from a file extension. Supported file extensions are as follows: ================== =================================== Extension Writer Class ================== =================================== ``".csv"`` :py:class:`~.CsvTableWriter` ``".htm"`` :py:class:`~.HtmlTableWriter` ``".html"`` :py:class:`~.HtmlTableWriter` ``".js"`` :py:class:`~.JavaScriptTableWriter` ``".json"`` :py:class:`~.JsonTableWriter` ``".jsonl"`` :py:class:`~.JsonLinesTableWriter` ``".ltsv"`` :py:class:`~.LtsvTableWriter` ``".ldjson"`` :py:class:`~.JsonLinesTableWriter` ``".md"`` :py:class:`~.MarkdownTableWriter` ``".ndjson"`` :py:class:`~.JsonLinesTableWriter` ``".py"`` :py:class:`~.PythonCodeTableWriter` ``".rst"`` :py:class:`~.RstGridTableWriter` ``".tsv"`` :py:class:`~.TsvTableWriter` ``".xls"`` :py:class:`~.ExcelXlsTableWriter` ``".xlsx"`` :py:class:`~.ExcelXlsxTableWriter` ``".sqlite"`` :py:class:`~.SqliteTableWriter` ``".sqlite3"`` :py:class:`~.SqliteTableWriter` ``".tsv"`` :py:class:`~.TsvTableWriter` ``".toml"`` :py:class:`~.TomlTableWriter` ================== =================================== :param str file_extension: File extension string (case insensitive). :return: Writer instance that coincides with the ``file_extension``. :rtype: :py:class:`~pytablewriter.writer._table_writer.TableWriterInterface` :raises pytablewriter.WriterNotFoundError: |WriterNotFoundError_desc| the file extension. """ ext = os.path.splitext(file_extension)[1] if typepy.is_null_string(ext): file_extension = file_extension else: file_extension = ext file_extension = file_extension.lstrip(".").lower() for table_format in TableFormat: if file_extension not in table_format.file_extensions: continue if table_format.format_attribute & FormatAttr.SECONDARY_EXT: continue return table_format.writer_class() raise WriterNotFoundError( "\n".join( [ "{:s} (unknown file extension).".format(file_extension), "", "acceptable file extensions are: {}.".format(", ".join(cls.get_extensions())), ] ) )
def _verify_property(self): super(PandasDataFrameWriter, self)._verify_property() if typepy.is_null_string(self.table_name): raise EmptyTableNameError("table_name must be a string of one or more characters")
def url(ctx, url, format_name, output_path, encoding, proxy): """ Scrape tabular data from a URL and convert data to a SQLite database file. """ if typepy.is_empty_sequence(url): sys.exit(ExitCode.NO_INPUT) con = create_database(ctx, output_path) verbosity_level = ctx.obj.get(Context.VERBOSITY_LEVEL) schema_extractor = get_schema_extractor(con, verbosity_level) result_counter = ResultCounter() logger = make_logger("{:s} url".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) if typepy.is_null_string(proxy): proxy = app_config_manager.load().get(ConfigKey.PROXY_SERVER) proxies = { "http": proxy, "https": proxy, } try: loader = create_url_loader(logger, url, format_name, encoding, proxies) except ptr.LoaderNotFoundError as e: try: loader = create_url_loader(logger, url, "html", encoding, proxies) except ptr.LoaderNotFoundError as e: logger.error(e) sys.exit(ExitCode.FAILED_LOADER_NOT_FOUND) table_creator = TableCreator(logger=logger, dst_con=con) try: for tabledata in loader.load(): logger.debug(u"loaded tabledata: {}".format( six.text_type(tabledata))) sqlite_tabledata = ptr.SQLiteTableDataSanitizer( tabledata).sanitize() try: table_creator.create(sqlite_tabledata, ctx.obj.get(Context.INDEX_LIST)) result_counter.inc_success() except (ValueError) as e: logger.debug(u"url={}, message={}".format(url, str(e))) result_counter.inc_fail() continue logger.info( get_success_message( verbosity_level, url, schema_extractor.get_table_schema_text( sqlite_tabledata.table_name).strip())) except ptr.InvalidDataError as e: logger.error(u"invalid data: url={}, message={}".format(url, str(e))) result_counter.inc_fail() write_completion_message(logger, output_path, result_counter) sys.exit(result_counter.get_return_code())
def test_normal(self, value, expected): assert is_null_string(value) == expected
def _get_table_directive(self): if typepy.is_null_string(self.table_name): return ".. table:: \n" return ".. table:: {}\n".format(MultiByteStrDecoder(self.table_name).unicode_str)
def __get_shaping_rule(self, device): if typepy.is_null_string(device): return ({}, []) self.__parse_device(device) where_dev_query = Where(Tc.Param.DEVICE, device) try: class_params = self.__con.select_as_dict( table_name=TcSubCommand.CLASS.value, where=where_dev_query) except TableNotFoundError: class_params = [] try: filter_params = Filter.select(where=where_dev_query) except TableNotFoundError: filter_params = [] shaping_rule_mapping = {} shaping_rules = [] for filter_param in filter_params: filter_param = filter_param.as_dict() self.__logger.debug("{:s} param: {}".format( TcSubCommand.FILTER, filter_param)) shaping_rule = {} filter_key, rule_with_keys = self.__get_filter_key(filter_param) if typepy.is_null_string(filter_key): self.__logger.debug( "empty filter key: {}".format(filter_param)) continue qdisc_id = filter_param.get(Tc.Param.FLOW_ID) if qdisc_id is None: qdisc_id = filter_param.get(Tc.Param.CLASS_ID) try: qdisc_params = Qdisc.select(where=And( [where_dev_query, Where(Tc.Param.PARENT, qdisc_id)])) except TableNotFoundError: qdisc_params = [] for qdisc_param in qdisc_params: qdisc_param = qdisc_param.as_dict() self.__logger.debug("{:s} param: {}".format( TcSubCommand.QDISC, qdisc_param)) if self.is_parse_filter_id: shaping_rule[Tc.Param.FILTER_ID] = filter_param.get( Tc.Param.FILTER_ID) # shaping_rule[Tc.Param.PRIORITY] = filter_param.get( # Tc.Param.PRIORITY) shaping_rule.update( self.__strip_param( qdisc_param, [ Tc.Param.DEVICE, Tc.Param.PARENT, Tc.Param.HANDLE, "direct_qlen" ], )) for class_param in class_params: self.__logger.debug("{:s} param: {}".format( TcSubCommand.CLASS, class_param)) if class_param.get(Tc.Param.CLASS_ID) not in ( filter_param.get(Tc.Param.FLOW_ID), filter_param.get(Tc.Param.CLASS_ID), ): continue if self.is_parse_filter_id: shaping_rule[Tc.Param.FILTER_ID] = filter_param.get( Tc.Param.FILTER_ID) # shaping_rule[Tc.Param.PRIORITY] = filter_param.get( # Tc.Param.PRIORITY) shaping_rule.update( self.__strip_param(class_param, [Tc.Param.DEVICE, Tc.Param.CLASS_ID])) if not shaping_rule: self.__logger.debug( "shaping rule not found for '{}'".format(filter_param)) continue self.__logger.debug("shaping rule found: {} {}".format( filter_key, shaping_rule)) rule_with_keys.update(shaping_rule) shaping_rules.append(rule_with_keys) shaping_rule_mapping[filter_key] = shaping_rule return (shaping_rule_mapping, shaping_rules)
def __get_shaping_rule(self, device): from simplesqlite.query import Where if typepy.is_null_string(device): return {} self.__parse_device(device) where_query = Where(Tc.Param.DEVICE, device) try: class_param_list = self.__con.select_as_dict( table_name=TcSubCommand.CLASS.value, where=where_query) except simplesqlite.TableNotFoundError: class_param_list = [] try: filter_param_list = self.__con.select_as_dict( table_name=TcSubCommand.FILTER.value, where=where_query) except simplesqlite.TableNotFoundError: filter_param_list = [] try: qdisc_param_list = self.__con.select_as_dict( table_name=TcSubCommand.QDISC.value, where=where_query) except simplesqlite.TableNotFoundError: qdisc_param_list = [] shaping_rule_mapping = {} for filter_param in filter_param_list: self.__logger.debug("{:s} param: {}".format( TcSubCommand.FILTER, filter_param)) shaping_rule = {} filter_key = self.__get_filter_key(filter_param) if typepy.is_null_string(filter_key): self.__logger.debug( "empty filter key: {}".format(filter_param)) continue for qdisc_param in qdisc_param_list: self.__logger.debug("{:s} param: {}".format( TcSubCommand.QDISC, qdisc_param)) if qdisc_param.get(Tc.Param.PARENT) not in ( filter_param.get(Tc.Param.FLOW_ID), filter_param.get(Tc.Param.CLASS_ID), ): continue shaping_rule[Tc.Param.FILTER_ID] = filter_param.get( Tc.Param.FILTER_ID) # shaping_rule[Tc.Param.PRIORITY] = filter_param.get( # Tc.Param.PRIORITY) shaping_rule.update( self.__strip_param( qdisc_param, [Tc.Param.DEVICE, Tc.Param.PARENT, Tc.Param.HANDLE])) for class_param in class_param_list: self.__logger.debug("{:s} param: {}".format( TcSubCommand.CLASS, class_param)) if class_param.get(Tc.Param.CLASS_ID) not in ( filter_param.get(Tc.Param.FLOW_ID), filter_param.get(Tc.Param.CLASS_ID), ): continue shaping_rule[Tc.Param.FILTER_ID] = filter_param.get( Tc.Param.FILTER_ID) # shaping_rule[Tc.Param.PRIORITY] = filter_param.get( # Tc.Param.PRIORITY) shaping_rule.update( self.__strip_param(class_param, [Tc.Param.DEVICE, Tc.Param.CLASS_ID])) if not shaping_rule: self.__logger.debug( "shaping rule not found for '{}'".format(filter_param)) continue self.__logger.debug("shaping rule found: {} {}".format( filter_key, shaping_rule)) shaping_rule_mapping[filter_key] = shaping_rule return shaping_rule_mapping
def parse(self, device, text): self._clear() if typepy.is_null_string(text): return [] filter_data_matrix = [] self.__buffer = self._to_unicode(text).splitlines() self.__parse_idx = 0 while self.__parse_idx < len(self.__buffer): line = self._to_unicode(self.__buffer[self.__parse_idx].strip()) self.__parse_idx += 1 if typepy.is_null_string(line): continue self.__device = device try: self.__parse_mangle_mark(line) except pp.ParseException: logger.debug("failed to parse mangle: {}".format(line)) else: filter_data_matrix.append({ Tc.Param.DEVICE: self.__device, Tc.Param.CLASS_ID: self.__classid, Tc.Param.HANDLE: self.__handle, }) self._clear() continue tc_filter = self.__get_filter() try: self.__parse_flow_id(line) self.__parse_protocol(line) self.__parse_priority(line) self.__parse_filter_id(line) if tc_filter.get(Tc.Param.FLOW_ID): logger.debug("store filter: {}".format(tc_filter)) filter_data_matrix.append(tc_filter) self._clear() self.__parse_flow_id(line) self.__parse_protocol(line) self.__parse_priority(line) self.__parse_filter_id(line) continue except pp.ParseException: logger.debug("failed to parse flow id: {}".format(line)) try: if self.__ip_version == 4: self.__parse_filter_ipv4(line) elif self.__ip_version == 6: self.__parse_filter_ipv6(line) else: raise ValueError("unknown ip version: {}".format( self.__ip_version)) except pp.ParseException: logger.debug("failed to parse filter: {}".format(line)) if self.__flow_id: filter_data_matrix.append(self.__get_filter()) if filter_data_matrix: self.__con.create_table_from_data_matrix( table_name=self._tc_subcommand, attr_name_list=list(self.__get_filter()), data_matrix=filter_data_matrix) logger.debug("tc {:s} parse result: {}".format( self._tc_subcommand, json.dumps(filter_data_matrix, indent=4))) return filter_data_matrix
def create_from_file_extension(cls, file_extension): """ Create a table writer class instance from a file extension. Supported file extensions are as follows: ================== =================================== Extension Writer Class ================== =================================== ``".csv"`` :py:class:`~.CsvTableWriter` ``".htm"`` :py:class:`~.HtmlTableWriter` ``".html"`` :py:class:`~.HtmlTableWriter` ``".js"`` :py:class:`~.JavaScriptTableWriter` ``".json"`` :py:class:`~.JsonTableWriter` ``".jsonl"`` :py:class:`~.JsonLinesTableWriter` ``".ltsv"`` :py:class:`~.LtsvTableWriter` ``".ldjson"`` :py:class:`~.JsonLinesTableWriter` ``".md"`` :py:class:`~.MarkdownTableWriter` ``".ndjson"`` :py:class:`~.JsonLinesTableWriter` ``".py"`` :py:class:`~.PythonCodeTableWriter` ``".rst"`` :py:class:`~.RstGridTableWriter` ``".tsv"`` :py:class:`~.TsvTableWriter` ``".xls"`` :py:class:`~.ExcelXlsTableWriter` ``".xlsx"`` :py:class:`~.ExcelXlsxTableWriter` ``".sqlite"`` :py:class:`~.SqliteTableWriter` ``".sqlite3"`` :py:class:`~.SqliteTableWriter` ``".tsv"`` :py:class:`~.TsvTableWriter` ``".toml"`` :py:class:`~.TomlTableWriter` ================== =================================== :param str file_extension: File extension string (case insensitive). :return: Writer instance that coincides with the ``file_extension``. :rtype: :py:class:`~pytablewriter.writer._table_writer.TableWriterInterface` :raises pytablewriter.WriterNotFoundError: |WriterNotFoundError_desc| the file extension. """ ext = os.path.splitext(file_extension)[1] if typepy.is_null_string(ext): file_extension = file_extension else: file_extension = ext file_extension = file_extension.lstrip(".").lower() for table_format in TableFormat: if file_extension not in table_format.file_extension_list: continue if table_format.format_attribute & FormatAttr.SECONDARY_EXT: continue return table_format.writer_class() raise WriterNotFoundError( "\n".join( [ "{:s} (unknown file extension).".format(file_extension), "", "acceptable file extensions are: {}.".format( ", ".join(cls.get_extension_list()) ), ] ) )
def __validate_ping_param(self): if typepy.is_null_string(self.destination_host): raise ValueError("required destination_host") self.__validate_count() self.__validate_interface()