def make_select(cls, select, table, where=None, extra=None): """ Make SELECT query. :param str select: Attribute for SELECT query :param str table: Table name of execute query. :param str where: Add WHERE clause to execute query if not ``None`` :param extra extra: Add additional clause to execute query if not ``None`` :return: Query of SQLite. :rtype: str :raises ValueError: ``select`` is empty string. .. seealso:: :py:func:`validate_table_name() <simplesqlite.validate_table_name>` """ validate_table_name(table) if dataproperty.is_empty_string(select): raise ValueError("SELECT query is null") query_list = [ "SELECT " + select, "FROM " + cls.to_table_str(table), ] if dataproperty.is_not_empty_string(where): query_list.append("WHERE " + where) if dataproperty.is_not_empty_string(extra): query_list.append(extra) return " ".join(query_list)
def __get_filter_key(self, filter_param): network_format = "network={:s}" port_format = "port={:d}" key_item_list = [] if "handle" in filter_param: handle = filter_param.get("handle") IntegerType(handle).validate() handle = int(handle) for mangle in IptablesMangleController.parse(): if mangle.mark_id != handle: continue key_item_list.append(network_format.format(mangle.destination)) if dataproperty.is_not_empty_string(mangle.source): key_item_list.append("source={:s}".format(mangle.source)) key_item_list.append("protocol={}".format(mangle.protocol)) break else: raise ValueError("mangle mark not found: {}".format(mangle)) else: if dataproperty.is_not_empty_string(filter_param.get("network")): key_item_list.append( network_format.format(filter_param.get("network"))) if IntegerType(filter_param.get("port")).is_type(): key_item_list.append( port_format.format(filter_param.get("port"))) return ", ".join(key_item_list)
def create_table_from_json(self, json_source, table_name=""): """ Create a table from a JSON file/text. :param str json_source: Path to the JSON file or JSON text. :param str table_name: Table name to create. .. seealso:: :py:meth:`.loader.JsonTableFileLoader.load` :py:meth:`.loader.JsonTableTextLoader.load` """ from .loader import JsonTableFileLoader from .loader import JsonTableTextLoader loader = JsonTableFileLoader(json_source) if dataproperty.is_not_empty_string(table_name): loader.table_name = table_name try: for tabledata in loader.load(): self.create_table_from_tabledata(tabledata) return except IOError: pass loader = JsonTableTextLoader(json_source) if dataproperty.is_not_empty_string(table_name): loader.table_name = table_name for tabledata in loader.load(): self.create_table_from_tabledata(tabledata)
def __get_filter_key(self, filter_param): network_format = "network={:s}" port_format = "port={:d}" key_item_list = [] if "handle" in filter_param: handle = filter_param.get("handle") IntegerType(handle).validate() handle = int(handle) for mangle in IptablesMangleController.parse(): if mangle.mark_id != handle: continue key_item_list.append(network_format.format(mangle.destination)) if dataproperty.is_not_empty_string(mangle.source): key_item_list.append("source={:s}".format(mangle.source)) key_item_list.append("protocol={}".format(mangle.protocol)) break else: raise ValueError("mangle mark not found: {}".format(mangle)) else: if dataproperty.is_not_empty_string(filter_param.get("network")): key_item_list.append(network_format.format(filter_param.get("network"))) if IntegerType(filter_param.get("port")).is_type(): key_item_list.append(port_format.format(filter_param.get("port"))) return ", ".join(key_item_list)
def create_table_from_csv( self, csv_source, table_name="", attribute_name_list=(), delimiter=",", quotechar='"', encoding="utf-8"): """ Create a table from a CSV file/text. :param str csv_source: Path to the CSV file or CSV text. :param str table_name: Table name to create. Use csv file basename as the table name if the value is empty. :param list attribute_name_list: Attribute names of the table. Use the first line of the csv file as attribute list if attribute_name_list is empty. :param str delimiter: A one-character string used to separate fields. :param str quotechar: A one-character string used to quote fields containing special characters, such as the delimiter or quotechar, or which contain new-line characters. :param str encoding: csv file encoding. :raises ValueError: If the csv data is invalid. .. seealso:: :py:meth:`.create_table_with_data` :py:func:`csv.reader` :py:meth:`.loader.CsvTableFileLoader.load` :py:meth:`.loader.CsvTableTextLoader.load` """ from .loader import CsvTableFileLoader from .loader import CsvTableTextLoader loader = CsvTableFileLoader(csv_source) if dataproperty.is_not_empty_string(table_name): loader.table_name = table_name loader.header_list = attribute_name_list loader.delimiter = delimiter loader.quotechar = quotechar loader.encoding = encoding try: for tabledata in loader.load(): self.create_table_from_tabledata(tabledata) return except IOError: pass loader = CsvTableTextLoader(csv_source) if dataproperty.is_not_empty_string(table_name): loader.table_name = table_name loader.header_list = attribute_name_list loader.delimiter = delimiter loader.quotechar = quotechar loader.encoding = encoding for tabledata in loader.load(): self.create_table_from_tabledata(tabledata)
def write_table(self): """ |write_table| with Pandas DataFrame variable definition format. """ import pprint self._verify_property() self._preprocess() if dataproperty.is_not_empty_string(self.table_name): self._write_line(self.table_name + u" = pandas.DataFrame(") else: self._write_line(u"pandas.DataFrame(") self.inc_indent_level() data_frame_text = ( u"\n".join([ self._get_indent_string() + line for line in pprint.pformat(self._value_matrix, indent=1).splitlines() ]) + u")" ) self.dec_indent_level() self.dec_indent_level() self._write_line(data_frame_text) self.inc_indent_level()
def create_database(database_path): db_path = path.Path(database_path) dir_path = db_path.dirname() if dataproperty.is_not_empty_string(dir_path): dir_path.makedirs_p() return simplesqlite.SimpleSQLite(db_path, "w")
def write_table(self): """ |write_table| with Pandas DataFrame variable definition format. """ import pprint self._verify_property() self._preprocess() if dataproperty.is_not_empty_string(self.table_name): self._write_line(self.variable_name + u" = pandas.DataFrame(") else: self._write_line(u"pandas.DataFrame(") self.inc_indent_level() data_frame_text = (u"\n".join([ self._get_indent_string() + line for line in pprint.pformat( self._value_matrix, indent=1).splitlines() ]) + u")") self.dec_indent_level() self.dec_indent_level() self._write_line(data_frame_text) self.inc_indent_level()
def __get_filter(self, device): qdisc_parser = tcconfig.parser.TcQdiscParser() filter_parser = tcconfig.parser.TcFilterParser() # parse qdisc --- command = "tc qdisc show dev %s" % (device) proc = self.__subproc_wrapper.popen_command(command) qdisc_stdout, _stderr = proc.communicate() qdisc_param = qdisc_parser.parse(qdisc_stdout) # parse filter --- command = "tc filter show dev %s" % (device) proc = self.__subproc_wrapper.popen_command(command) filter_stdout, _stderr = proc.communicate() filter_table = {} for filter_param in filter_parser.parse_filter(filter_stdout): key_item_list = [] if dataproperty.is_not_empty_string(filter_param.get("network")): key_item_list.append("network=" + filter_param.get("network")) if dataproperty.is_integer(filter_param.get("port")): key_item_list.append("port=%d" % (filter_param.get("port"))) filter_key = ", ".join(key_item_list) filter_table[filter_key] = {} if filter_param.get("flowid") == qdisc_param.get("parent"): work_qdisc_param = dict(qdisc_param) del work_qdisc_param["parent"] filter_table[filter_key] = work_qdisc_param return filter_table
def run_command_helper(command, error_regexp, message, exception=None): if logger.level != logbook.DEBUG: spr.set_logger(is_enable=False) proc = spr.SubprocessRunner(command) proc.run() if logger.level != logbook.DEBUG: spr.set_logger(is_enable=True) if proc.returncode == 0: return 0 match = error_regexp.search(proc.stderr) if match is None: logger.error(proc.stderr) return proc.returncode if dataproperty.is_not_empty_string(message): logger.notice(message) if exception is not None: raise exception(command) return proc.returncode
def make_worksheet(self, sheet_name): """ Make a worksheet to the current workbook. :param str sheet_name: Name of the worksheet to create. Name of the work sheet will automatically decided (like ``"Sheet1"``) if the ``sheet_name`` is empty. """ if dataproperty.is_not_empty_string(sheet_name): if sheet_name in self.__sheet_table: # the sheet is already exists self.stream = self.__sheet_table.get(sheet_name) return work_sheet_name = sheet_name else: work_sheet_name = None worksheet = self.workbook.workbook.add_worksheet( work_sheet_name) self.__sheet_table[worksheet.name] = worksheet self.stream = worksheet
def to_attr_str(cls, name, operation_query=""): """ :param str name: Attribute name. :param str operation_query: Used as a SQLite function if the value is not empty. :return: String that suitable for attribute name of a SQLite query. :rtype: str :Examples: >>> from simplesqlite.sqlquery import SqlQuery >>> SqlQuery.to_attr_str("key") 'key' >>> SqlQuery.to_attr_str("a+b") '[a+b]' >>> SqlQuery.to_attr_str("key", operation_query="SUM") 'SUM(key)' """ name = cls.sanitize_attr(name) if cls.__RE_TO_ATTR_QUOTE.search(name): sql_name = '"%s"' % (name) elif cls.__RE_TO_ATTR_BRACKET.search(name): sql_name = "[%s]" % (name) elif name == "join": sql_name = "[%s]" % (name) else: sql_name = name if dataproperty.is_not_empty_string(operation_query): sql_name = "%s(%s)" % (operation_query, sql_name) return sql_name
def make_update(cls, table, set_query, where=None): """ Make UPDATE query. :param str table: Table name of execute query. :param str set_query: SET part of UPDATE query. :return: Query of SQLite. :rtype: str :raises ValueError: If ``set_query`` is empty string. .. seealso:: :py:func:`validate_table_name() <simplesqlite.validate_table_name>` """ validate_table_name(table) if dataproperty.is_empty_string(set_query): raise ValueError("SET query is null") query_list = [ "UPDATE " + cls.to_table_str(table), "SET " + set_query, ] if dataproperty.is_not_empty_string(where): query_list.append("WHERE " + where) return " ".join(query_list)
def make_update(cls, table, set_query, where=None): """ Make UPDATE query. :param str table: Table name of executing the query. :param str set_query: SET part of the UPDATE query. :param str where: Add a WHERE clause to execute query, if the value is not |None|. :return: Query of SQLite. :rtype: str :raises ValueError: If ``set_query`` is empty string. :raises ValueError: |raises_validate_table_name| """ validate_table_name(table) if dataproperty.is_empty_string(set_query): raise ValueError("SET query is null") query_list = [ "UPDATE " + cls.to_table_str(table), "SET " + set_query, ] if dataproperty.is_not_empty_string(where): query_list.append("WHERE " + where) return " ".join(query_list)
def __set_network_filter(self, qdisc_major_id): command_list = [ "tc filter add", "dev " + self.__get_tc_device(), "protocol ip", "parent {:x}:".format(qdisc_major_id), "prio 1", ] if self.__is_use_iptables(): mark_id = ( IptablesMangleController.get_unique_mark_id() + self.__FILTER_IPTABLES_MARK_ID_OFFSET) command_list.append("handle {:d} fw".format(mark_id)) self.__add_mangle_mark(mark_id) else: if all([ dataproperty.is_empty_string(self.network), self.port is None, ]): return 0 command_list.append("u32") if dataproperty.is_not_empty_string(self.network): command_list.append("match ip {:s} {:s}".format( self.__get_network_direction_str(), self.network)) if self.port is not None: command_list.append( "match ip dport {:d} 0xffff".format(self.port)) command_list.append("flowid {:x}:{:d}".format( qdisc_major_id, self.__get_qdisc_minor_id())) return SubprocessRunner(" ".join(command_list)).run()
def removeMatchFileRecursively(cls, search_dir_path, re_remove_list): logger.debug("remove matched file: search-root=%s, re=%s" % ( search_dir_path, str(re_remove_list))) re_compile_list = [ re.compile(re_pattern) for re_pattern in re_remove_list if dataproperty.is_not_empty_string(re_pattern) ] dict_result_pathlist = {} for dir_path, _dir_name_list, filename_list in os.walk(search_dir_path): for filename in filename_list: for re_pattern in re_compile_list: if re_pattern.search(filename): break else: continue remove_path = os.path.join(dir_path, filename) result = cls.remove_object(remove_path) dict_result_pathlist.setdefault(result, []).append(remove_path) return dict_result_pathlist
def __set_network_filter(self, qdisc_major_id): command_list = [ "tc filter add", "dev " + self.__get_tc_device(), "protocol ip", "parent {:x}:".format(qdisc_major_id), "prio 1", ] if self.__is_use_iptables(): mark_id = (IptablesMangleController.get_unique_mark_id() + self.__FILTER_IPTABLES_MARK_ID_OFFSET) command_list.append("handle {:d} fw".format(mark_id)) self.__add_mangle_mark(mark_id) else: if all([ dataproperty.is_empty_string(self.network), self.port is None, ]): return 0 command_list.append("u32") if dataproperty.is_not_empty_string(self.network): command_list.append("match ip {:s} {:s}".format( self.__get_network_direction_str(), self.network)) if self.port is not None: command_list.append("match ip dport {:d} 0xffff".format( self.port)) command_list.append("flowid {:x}:{:d}".format( qdisc_major_id, self.__get_qdisc_minor_id())) return SubprocessRunner(" ".join(command_list)).run()
def main(): options = parse_option() if dataproperty.is_not_empty_string(options.config_file): return_code = 0 loader = TcConfigLoader() loader.is_overwrite = options.overwrite loader.load_pynetcm(options.config_file) for pynetcm_command in loader.get_pynetcm_command_list(): return_code |= subprocrunner.SubprocessRunner( pynetcm_command).run() return return_code tc = TrafficControl(options.device) tc.direction = options.direction tc.bandwidth_rate = options.bandwidth_rate tc.latency_ms = options.network_latency tc.latency_distro_ms = options.latency_distro_ms tc.packet_loss_rate = options.packet_loss_rate tc.corruption_rate = options.corruption_rate tc.network = options.network tc.port = options.port tc.validate() if options.overwrite: tc.delete_tc() tc.set_tc() return 0
def write_table(self): """ |write_table| with Markdown table format. """ if dataproperty.is_not_empty_string(self.table_name): self.__write_chapter(self.table_name) super(MarkdownTableWriter, self).write_table()
def __parse_netem_param(self, line, parse_param_name, word_pattern): pattern = (pp.SkipTo(parse_param_name, include=True) + pp.Word(word_pattern)) try: result = pattern.parseString(_to_unicode(line))[-1] if dataproperty.is_not_empty_string(result): self.__parsed_param[parse_param_name] = result except pp.ParseException: pass
def make_select(cls, select, table, where=None, extra=None): """ Make SELECT query. :param str select: Attribute for SELECT query. :param str table: Table name of executing the query. :param str where: Add a WHERE clause to execute query, if the value is not |None|. :param extra extra: Add additional clause to execute query, if the value is not |None|. :return: Query of SQLite. :rtype: str :raises ValueError: ``select`` is empty string. :raises ValueError: |raises_validate_table_name| :Examples: >>> from simplesqlite.sqlquery import SqlQuery >>> SqlQuery.make_select(select="value", table="example") 'SELECT value FROM example' >>> SqlQuery.make_select(select="value", table="example", where=SqlQuery.make_where("key", 1)) 'SELECT value FROM example WHERE key = 1' >>> SqlQuery.make_select(select="value", table="example", where=SqlQuery.make_where("key", 1), extra="ORDER BY value") 'SELECT value FROM example WHERE key = 1 ORDER BY value' """ validate_table_name(table) if dataproperty.is_empty_string(select): raise ValueError("SELECT query is null") query_list = [ "SELECT " + select, "FROM " + cls.to_table_str(table), ] if dataproperty.is_not_empty_string(where): query_list.append("WHERE " + where) if dataproperty.is_not_empty_string(extra): query_list.append(extra) return " ".join(query_list)
def __parse_netem_param(self, line, parse_param_name, word_pattern): pattern = ( pp.SkipTo(parse_param_name, include=True) + pp.Word(word_pattern)) try: result = pattern.parseString(_to_unicode(line))[-1] if dataproperty.is_not_empty_string(result): self.__parsed_param[parse_param_name] = result except pp.ParseException: pass
def __parse_tbf_rate(self, line): parse_param_name = "rate" pattern = (pp.SkipTo(parse_param_name, include=True) + pp.Word(pp.alphanums + "." + ":")) try: result = pattern.parseString(line)[-1] if dataproperty.is_not_empty_string(result): result = result.rstrip("bit") self.__parsed_param[parse_param_name] = result except pp.ParseException: pass
def _get_start_row_idx(self): row_idx = 0 for row_value_list in self.__all_values: if all([ dataproperty.is_not_empty_string(value) for value in row_value_list ]): break row_idx += 1 return self.start_row + row_idx
def create_database(ctx, database_path): is_append_table = ctx.obj.get(Context.IS_APPEND_TABLE) db_path = path.Path(database_path) dir_path = db_path.dirname() if dataproperty.is_not_empty_string(dir_path): dir_path.makedirs_p() if is_append_table: return simplesqlite.SimpleSQLite(db_path, "a") else: return simplesqlite.SimpleSQLite(db_path, "w")
def __parse_tbf_rate(self, line): parse_param_name = "rate" pattern = ( pp.SkipTo(parse_param_name, include=True) + pp.Word(pp.alphanums + "." + ":")) try: result = pattern.parseString(line)[-1] if dataproperty.is_not_empty_string(result): result = result.rstrip("bit") self.__parsed_param[parse_param_name] = result except pp.ParseException: pass
def command_to_filename(command, suffix=""): sep_char = "/\\" command = command.strip() filename = command.replace(" ", "_") filename = filename.replace("-", "") filename = filename.strip(sep_char).lstrip(sep_char) filename = re.sub("[%s]" % re.escape("/\\"), "-", filename) filename = pathvalidate.sanitize_filename(filename) if dataproperty.is_not_empty_string(suffix): filename += "_" + suffix return filename
def main(): options = parse_option() logger = logbook.Logger("tcset") logger.level = options.log_level subprocrunner.logger.level = options.log_level if options.quiet: subprocrunner.logger.disable() else: subprocrunner.logger.enable() subprocrunner.Which("tc").verify() try: verify_netem_module() except ModuleNotFoundError as e: logger.debug(str(e)) except subprocrunner.CommandNotFoundError as e: logger.error(str(e)) if dataproperty.is_not_empty_string(options.config_file): return set_tc_from_file(logger, options.config_file, options.overwrite) tc = TrafficControl( options.device, direction=options.direction, bandwidth_rate=options.bandwidth_rate, latency_ms=options.network_latency, latency_distro_ms=options.latency_distro_ms, packet_loss_rate=options.packet_loss_rate, corruption_rate=options.corruption_rate, network=options.network, src_network=options.src_network, port=options.port, is_enable_iptables=options.is_enable_iptables ) try: tc.validate() except (NetworkInterfaceNotFoundError, ValueError) as e: logger.error(str(e)) return 1 if options.overwrite: try: tc.delete_tc() except NetworkInterfaceNotFoundError: pass tc.set_tc() return 0
def __strip_empty_col(self): col_idx = 0 t_value_matrix = zip(*self.__all_values) for col_value_list in t_value_matrix: if any([ dataproperty.is_not_empty_string(value) for value in col_value_list ]): break col_idx += 1 self.__all_values = zip(*t_value_matrix[col_idx:])
def main(): options = parse_option() set_log_level(options.log_level) subprocrunner.Which("tc").verify() try: verify_netem_module() except ModuleNotFoundError as e: logger.debug(str(e)) except subprocrunner.CommandNotFoundError as e: logger.error(str(e)) if dataproperty.is_not_empty_string(options.config_file): return set_tc_from_file(logger, options.config_file, options.overwrite) tc = TrafficControl(options.device, direction=options.direction, bandwidth_rate=options.bandwidth_rate, latency_ms=options.network_latency, latency_distro_ms=options.latency_distro_ms, packet_loss_rate=options.packet_loss_rate, corruption_rate=options.corruption_rate, network=options.network, src_network=options.src_network, port=options.port, is_add_shaper=options.is_add_shaper, is_enable_iptables=options.is_enable_iptables, shaping_algorithm=options.shaping_algorithm) try: tc.validate() except (NetworkInterfaceNotFoundError, ValueError) as e: logger.error(str(e)) return 1 if options.overwrite: if options.log_level == logbook.INFO: set_log_level(logbook.ERROR) try: tc.delete_tc() except NetworkInterfaceNotFoundError: pass set_log_level(options.log_level) tc.set_tc() return 0
def write_table(self): """ |write_table| with Python nested list variable definition format. """ self._verify_property() if dataproperty.is_not_empty_string(self.table_name): self._write_line(self.table_name + u" = [") else: self._write_line(u"[") self.inc_indent_level() super(PythonCodeTableWriter, self).write_table() self.dec_indent_level() self._write_line(u"]")
def write_table(self): """ |write_table| with HTML table format. """ self._verify_property() self._preprocess() if dataproperty.is_not_empty_string(self.table_name): self._table_tag = tags.table( id=pathvalidate.sanitize_python_var_name(self.table_name)) self._table_tag += tags.caption(self.table_name) else: self._table_tag = tags.table() self._write_header() self._write_body()
def to_attr_str(cls, name, operation_query=""): """ :param str name: Base name of attribute. :param str operation_query: :return: String that suitable for attribute name. :rtype: str """ if cls.__RE_TO_ATTR_STR.search(name): sql_name = "[%s]" % (name) elif name == "join": sql_name = "[%s]" % (name) else: sql_name = name if dataproperty.is_not_empty_string(operation_query): sql_name = "%s(%s)" % (operation_query, sql_name) return sql_name
def __set_network_filter(self): if all([ dataproperty.is_empty_string(self.network), self.port is None, ]): return 0 command_list = [ "tc filter add", "dev " + self.__device, "protocol ip parent 1: prio 1 u32", "flowid 1:1", ] if dataproperty.is_not_empty_string(self.network): command_list.append("match ip dst " + self.network) if self.port is not None: command_list.append("match ip dport %d 0xffff" % (self.port)) return self.__subproc_wrapper.run(" ".join(command_list))
def to_append_command(self): IntegerType(self.mark_id).validate() command_item_list = [ "iptables -A {:s} -t mangle -j MARK".format(self.chain), "--set-mark {}".format(self.mark_id), ] if any([ dataproperty.is_not_empty_string(self.protocol), IntegerType(self.protocol).is_type(), ]): command_item_list.append("-p {}".format(self.protocol)) if self.__is_valid_srcdst(self.source): command_item_list.append( "-s {:s}".format(self.source)) if self.__is_valid_srcdst(self.destination): command_item_list.append( "-d {:s}".format(self.destination)) return " ".join(command_item_list)
def __set_network_filter(self, qdisc_major_id): if all([ dataproperty.is_empty_string(self.network), self.port is None, ]): return 0 command_list = [ "tc filter add", "dev " + self.__get_tc_device(), "protocol ip", "parent %d:" % (qdisc_major_id), "prio 1 u32", "flowid %d:%d" % (qdisc_major_id, self.__get_qdisc_minor_id()), ] if dataproperty.is_not_empty_string(self.network): command_list.append("match ip %s %s" % ( self.__get_network_direction_str(), self.network)) if self.port is not None: command_list.append("match ip dport %d 0xffff" % (self.port)) return self.__subproc_wrapper.run(" ".join(command_list))
def __get_filter(self, device): if dataproperty.is_empty_string(device): return {} qdisc_parser = TcQdiscParser() filter_parser = TcFilterParser() # parse qdisc --- command = "tc qdisc show dev {:s}".format(device) qdisk_show_runner = SubprocessRunner(command) qdisk_show_runner.run() qdisc_param = qdisc_parser.parse(qdisk_show_runner.stdout) # parse filter --- command = "tc filter show dev {:s}".format(device) filter_show_runner = SubprocessRunner(command) filter_show_runner.run() filter_table = {} for filter_param in filter_parser.parse_filter( filter_show_runner.stdout): key_item_list = [] if dataproperty.is_not_empty_string(filter_param.get("network")): key_item_list.append("network=" + filter_param.get("network")) if dataproperty.is_integer(filter_param.get("port")): key_item_list.append("port={:d}".format( filter_param.get("port"))) filter_key = ", ".join(key_item_list) filter_table[filter_key] = {} if filter_param.get("flowid") == qdisc_param.get("parent"): work_qdisc_param = dict(qdisc_param) del work_qdisc_param["parent"] filter_table[filter_key] = work_qdisc_param return filter_table
def __set_network_filter(self, qdisc_major_id): if all([ dataproperty.is_empty_string(self.network), self.port is None, ]): return 0 command_list = [ "tc filter add", "dev " + self.__get_tc_device(), "protocol ip", "parent %d:" % (qdisc_major_id), "prio 1 u32", "flowid %d:%d" % (qdisc_major_id, self.__get_qdisc_minor_id()), ] if dataproperty.is_not_empty_string(self.network): command_list.append( "match ip %s %s" % (self.__get_network_direction_str(), self.network)) if self.port is not None: command_list.append("match ip dport %d 0xffff" % (self.port)) return self.__subproc_wrapper.run(" ".join(command_list))
def __set_network_filter(self, qdisc_major_id): if all([ dataproperty.is_empty_string(self.network), self.port is None, ]): return 0 command_list = [ "tc filter add", "dev " + self.__get_tc_device(), "protocol ip", "parent {:x}:".format(qdisc_major_id), "prio 1 u32", "flowid {:x}:{:d}".format(qdisc_major_id, self.__get_qdisc_minor_id()), ] if dataproperty.is_not_empty_string(self.network): command_list.append("match ip {:s} {:s}".format( self.__get_network_direction_str(), self.network)) if self.port is not None: command_list.append("match ip dport {:d} 0xffff".format(self.port)) return SubprocessRunner(" ".join(command_list)).run()
def make_worksheet(self, sheet_name): """ Make a worksheet to the current workbook. :param str sheet_name: Name of the worksheet to create. Name of the work sheet will automatically decided (like ``"Sheet1"``) if the ``sheet_name`` is empty. """ if dataproperty.is_not_empty_string(sheet_name): if sheet_name in self.__sheet_table: # the sheet is already exists self.stream = self.__sheet_table.get(sheet_name) return work_sheet_name = sheet_name else: work_sheet_name = None worksheet = self.workbook.workbook.add_worksheet(work_sheet_name) self.__sheet_table[worksheet.name] = worksheet self.stream = worksheet
def __is_valid_srcdst(srcdst): return ( dataproperty.is_not_empty_string(srcdst) and srcdst != ANYWHERE_NETWORK )
def test_normal(self): assert dataproperty.is_not_empty_string(get_execution_command())
def url(ctx, url, format_name, output_path, encoding, proxy): """ Fetch data from a URL and convert data to a SQLite database file. """ if dataproperty.is_empty_sequence(url): sys.exit(ExitCode.NO_INPUT) con = create_database(ctx, output_path) verbosity_level = ctx.obj.get(Context.VERBOSITY_LEVEL) extractor = get_schema_extractor(con, verbosity_level) result_counter = ResultCounter() logger = logbook.Logger("sqlitebiter url") _setup_logger_from_context(logger, ctx.obj[Context.LOG_LEVEL]) proxies = {} if dataproperty.is_not_empty_string(proxy): proxies = { "http": proxy, "https": proxy, } try: loader = ptr.TableUrlLoader(url, format_name, encoding=encoding, proxies=proxies) except ptr.LoaderNotFoundError as e: try: loader = ptr.TableUrlLoader(url, "html", encoding=encoding, proxies=proxies) except (ptr.LoaderNotFoundError, ptr.HTTPError): logger.error(e) sys.exit(ExitCode.FAILED_LOADER_NOT_FOUND) except ptr.HTTPError as e: logger.error(e) sys.exit(ExitCode.FAILED_HTTP) try: for tabledata in loader.load(): sqlite_tabledata = ptr.SQLiteTableDataSanitizer( tabledata).sanitize() try: con.create_table_from_tabledata(sqlite_tabledata) result_counter.inc_success() except (ValueError) as e: logger.debug(u"url={}, message={}".format(url, str(e))) result_counter.inc_fail() continue log_message = get_success_log_format(verbosity_level).format( url, extractor.get_table_schema_text( sqlite_tabledata.table_name).strip()) logger.info(log_message) except ptr.InvalidDataError as e: logger.error(u"invalid data: url={}, message={}".format(url, str(e))) result_counter.inc_fail() write_completion_message(logger, output_path, result_counter) sys.exit(result_counter.get_return_code())