Ejemplo n.º 1
0
Archivo: bot.py Proyecto: flipbug/zoey
 def __init__(self, input):
     patterns = self.fetchCommandPatterns()
     self._parser = Parser(patterns)
     if input:
         self.input = input
     else:
         print self.welcomeMessage
def run(test_config):
    print test_config
    load_config = test_config["load_manager"]
    scenarios_conf = load_config["scenarios"]
    Use_Dashboard = True
    env_pre(test_config)
    if test_config["contexts"]["yardstick_ip"] is None:
        load_config["contexts"]["yardstick_ip"] =\
            conf_parser.ip_parser("yardstick_test_ip")

    if "dashboard" in test_config["contexts"].keys():
        if test_config["contexts"]["dashboard_ip"] is None:
            test_config["contexts"]["dashboard_ip"] =\
                conf_parser.ip_parser("dashboard")
        LOG.info("Create Dashboard data")
        Use_Dashboard = True

    num_vnfs = conf_parser.str_to_list(scenarios_conf["number_vnfs"])
    iterations = scenarios_conf["iterations"]
    interval = scenarios_conf["interval"]
    load_config["result_file"] = os.path.dirname(
        os.path.abspath(__file__)) + "/test_case/result"

    result = []

    for i in range(0, len(num_vnfs)):
        print i
        case_config = {
            "num_vnfs": int(num_vnfs[i]),
            "iterations": iterations,
            "interval": interval
        }
        data_reply = do_test(case_config, Use_Dashboard,
                             test_config["contexts"])
        result.append(data_reply)

    LOG.info("Finished bottlenecks testcase")
    LOG.info("The result data is %s", result)
    return result
Ejemplo n.º 3
0
 def fetch_article(self,
                   keyword,
                   page=1,
                   time=SearchArticleTime.ANYTIME,
                   article_type=SearchArticleType.ALL,
                   referer=None):
     url = SogouRequest.generate_search_article_url(keyword, page, time,
                                                    article_type)
     response = self.__get_and_unlock(
         url,
         unlock_function=self.__unlock_wechat,
         identify_image_callback=self.identify_image_callback_by_hand)
     return Parser.parse_article(response.text)
Ejemplo n.º 4
0
 def func():
     parser = Parser()
     client = self.mode['connect']['clients'][i]
     print(f'SERVER START receiving FROM C{i}...')
     while self.connected['connected']:
         try:
             events_strs = parser.parse(client['socket'].recv(1 << 20))
         except socket.timeout:
             continue
         except json.decoder.JSONDecodeError:
             print('\tJSON Decode Error!')
             continue
         except ConnectionResetError:
             print('Connection Reset by CLIENT')
             break
         for events_str in events_strs:
             events = json.loads(events_str)
             self.players[i + 1].process_pressed(
                 events['key-pressed'], events['key-down'])
             self.pingstamp[i + 1] = max(self.pingstamp[i + 1],
                                         events['ping'])
     print(f'SERVER END receiving FROM C{i}...')
Ejemplo n.º 5
0
def wallet_registrar():
    requestdata = request.get_json()
    #parseo del json para obtener un objeto valido
    message, error = Parser.parseJson(requestdata)

    if error:
        return message, 400
    #crear consulta
    message = coordinator.wallet_registrar(message)

    if message['error'] and message['error']['code'] != 0:
        return message, message['error']['code']

    return jsonify(message), 200
Ejemplo n.º 6
0
def mine():
    txRequest = request.get_json()
    # parseo del json para obtener un objeto valido
    message, error = Parser.parseJson(txRequest)
    if error:
        return message, 400
    # cerrar bloque
    response = coordinator.minar(message)

    if response['error'] \
            and response['error']['code'] != 0:
        return response, response['error']['code']

    return jsonify(response), 200
Ejemplo n.º 7
0
Archivo: bot.py Proyecto: flipbug/zoey
class Bot:

    input = ""
    welcomeMessage = "Hello, i am a bot."
    quitMessage = "Good bye"
    commandNotFoundMessage = "I don't know what you mean"

    _parser = None

    def __init__(self, input):
        patterns = self.fetchCommandPatterns()
        self._parser = Parser(patterns)
        if input:
            self.input = input
        else:
            print self.welcomeMessage

    def __del__(self):
        if not input:
            print self.quitMessage

    def fetchCommandPatterns(self):
        commandPatterns = {}
        for plugin in PluginProvider.plugins:
            if hasattr(plugin, 'commandPatterns'):
                commandPatterns.update(plugin.commandPatterns)
        return commandPatterns

    def start(self):
        if self.input:
            self.dispatchCommand(self.input)
        else:
            self.beginMainLoop()

    def beginMainLoop(self):
        while True:
            input = raw_input(">> ")
            self.dispatchCommand(input)

    def dispatchCommand(self, input):
        try:
            command = self._parser.getCommandFromInput(input)
            for plugin in PluginProvider.plugins:
                if command.getKeyword() in plugin.commandPatterns:
                    pluginObj = plugin()
                    return pluginObj.processCommand(command)
        except NotImplementedError:
            print commandNotFoundMessage
            return False
Ejemplo n.º 8
0
def test_parser_1(caplog):
    p = Parser('examples/problem.yaml', 'examples/instance.yaml')
    assert p.rawProblem.name == "labirinth"
    assert p.rawInstance.size == 2

    assert len(p.problem.types) == 2

    p.problem.setTypes(p.rawProblem.types)
    assert "position" in p.problem.types

    instantiated_variables = p.problem.instantiateVar(p.rawProblem.variables, 
                                                      p.rawInstance.variables)
    assert "treasure_0" in instantiated_variables
    assert "treasure_1" in instantiated_variables
    assert len(instantiated_variables) == 8
Ejemplo n.º 9
0
def wallet_checkFondos():
    requestdata = request.get_json()
    #parseo del json para obtener un objeto valido
    message, error = Parser.parseJson(requestdata)

    if error:
        return message, 400
    #crear consulta
    txregistrada = coordinator.calcularSaldos(message)

    if txregistrada['error']\
    and message['error']['code']!=0:
        return txregistrada, txregistrada['error']['code']

    return jsonify(txregistrada), 200
Ejemplo n.º 10
0
def transacion_create():
    txRequest = request.get_json()
    #parseo del json para obtener un objeto valido
    message, error = Parser.parseJson(txRequest)

    if error:
        return message, 400
    #crear nueva transaccion
    txregistrada = coordinator.registrarTransaccion(message)

    if txregistrada['error'] \
            and message['error']['code']!=0:
        return txregistrada, txregistrada['error']['code']

    return jsonify(txregistrada), 200
Ejemplo n.º 11
0
def main():
    # Initialize parser
    parser = Parser()

    # Get parsed arguments
    hosts = parser.hosts
    ports = parser.ports
    discovery = parser.host_discovery
    stype = parser.scan_type
    rand_ips = parser.rand_ips
    rand_ports = parser.rand_ports

    # Initialize scanner with user defined settings
    scanner = Scanner(discovery, stype, rand_ips, rand_ports)

    # Scan and get output from scan
    output = scanner.scan(hosts, ports)

    # Print output
    names = output['Host'].unique().tolist()
    for name in names:
        host_output = output.loc[output['Host'] == name]
        summary = host_output.groupby(['Host', 'Status'])
        summary = summary.agg({
            'Status': ['count']
        }).rename(columns={
            'Status': '',
            'count': ''
        })
        open_ports = host_output.loc[output['Status'] == 'open']
        closed_ports = host_output.loc[output['Status'] == 'closed']
        filtered_ports = host_output.loc[output['Status'] == 'filtered']

        print(summary)
        print('\n%s\t%s\t%s' % ('Port', 'Desc', 'Status'))
        for _, row in open_ports.iterrows():
            print('%d\t%s\topen' % (row['Port'], row['Description']))
        if parser.show_closed:
            for _, row in closed_ports.iterrows():
                print('%d\t%s\tclosed' % (row['Port'], row['Description']))
            for _, row in filtered_ports.iterrows():
                print('%d\t%s\tfiltered' % (row['Port'], row['Description']))

    # Save output file
    if parser.output:
        save_csv(output)
def run(test_config):
    load_config = test_config["load_manager"]
    scenarios_conf = load_config["scenarios"]
    Use_Dashboard = False

    env_pre(None)
    if test_config["contexts"]["yardstick_ip"] is None:
        load_config["contexts"]["yardstick_ip"] =\
            conf_parser.ip_parser("yardstick_test_ip")

    if "dashboard" in test_config["contexts"].keys():
        if test_config["contexts"]["dashboard_ip"] is None:
            test_config["contexts"]["dashboard_ip"] =\
                conf_parser.ip_parser("dashboard")
        LOG.info("Create Dashboard data")
        Use_Dashboard = True

    cpus = conf_parser.str_to_list(scenarios_conf["cpus"])
    mems = conf_parser.str_to_list(scenarios_conf["mems"])
    pkt_size = conf_parser.str_to_list(scenarios_conf["pkt_size"])
    multistream = conf_parser.str_to_list(scenarios_conf["multistream"])
    search_interval = scenarios_conf["search_interval"]

    load_config["result_file"] = os.path.dirname(
        os.path.abspath(__file__)) + "/test_case/result"

    if len(cpus) != len(mems):
        LOG.error("the cpus and mems config data number is not same!")
        os._exit()

    result = []

    for i in range(0, len(cpus)):
        case_config = {
            "vcpu": cpus[i],
            "memory": int(mems[i]) * 1024,
            "multistreams": multistream,
            "pktsize": pkt_size,
            "search_interval": search_interval
        }

        data_reply = do_test(case_config, Use_Dashboard,
                             test_config["contexts"])
        result.append(data_reply)

    LOG.info("Finished bottlenecks testcase")
    LOG.info("The result data is %s", result)
    return result
Ejemplo n.º 13
0
def darknet_base(inputs, include_yolo_head=True):
    """
    Builds Darknet53 by reading the YOLO configuration file

    :param inputs: Input tensor
    :param include_yolo_head: Includes the YOLO head
    :return: A list of output layers and the network config
    """
    path = os.path.join(ROOT_DIR, 'cfg', '{}.cfg'.format(YOLO_VERSION))
    blocks = Parser.parse_cfg(path)
    x, layers, yolo_layers = inputs, [], []
    ptr = 0
    config = {}

    for block in blocks:
        block_type = block['type']

        if block_type == 'net':
            config = _read_net_config(block)

        elif block_type == 'convolutional':
            x, layers, yolo_layers, ptr = _build_conv_layer(
                x, block, layers, yolo_layers, ptr, config)

        elif block_type == 'shortcut':
            x, layers, yolo_layers, ptr = _build_shortcut_layer(
                x, block, layers, yolo_layers, ptr)

        elif block_type == 'yolo':
            x, layers, yolo_layers, ptr = _build_yolo_layer(
                x, block, layers, yolo_layers, ptr, config)

        elif block_type == 'route':
            x, layers, yolo_layers, ptr = _build_route_layer(
                x, block, layers, yolo_layers, ptr)

        elif block_type == 'upsample':
            x, layers, yolo_layers, ptr = _build_upsample_layer(
                x, block, layers, yolo_layers, ptr)

        elif block_type == 'maxpool':
            x, layers, yolo_layers, ptr = _build_maxpool_layer(
                x, block, layers, yolo_layers, ptr)

        else:
            raise ValueError(
                '{} not recognized as block type'.format(block_type))

    _verify_weights_completed_consumed(ptr)

    if include_yolo_head:
        output_layers = yolo_layers
        return tf.keras.layers.Concatenate(axis=1)(output_layers), config
    else:
        output_layers = [
            layers[i - 1] for i in range(len(layers)) if layers[i] is None
        ]

        # NOTE: Apparently TFLite doesn't like Concatenate.
        # return tf.keras.layers.Concatenate(axis=1)(output_layers), config
        return output_layers, config
Ejemplo n.º 14
0
def load_and_verify(problem: str, instances: str):
    p = Parser(problem, instances)
    print(p.problem)
Ejemplo n.º 15
0
from utils.parser import Parser

if __name__ == "__main__":
    data_path = "../assets/"
    parser = Parser(data_path).getInstance()
    parking_container = parser.read_parking_container()
Ejemplo n.º 16
0
    def input_filename(self, P):
        """Check the content of the input widget to verify that it is valid
        with the rules of the application.

        Arguments:
        - P (str): Value of the entry if the edit is allowed

        Returns:
        - str: Output text processed by application rules
        """
        user_input = P

        date_format, alert = self.view.get_format_date()

        if alert:
            self.view.statusbar.var_alert.set(alert)
            self.view.statusbar.update()
        else:
            self.view.statusbar.var_alert.set("")

        counter = int(self.view.params.start_sbox.get())
        step = int(self.view.params.step_sbox.get())
        digits = self.view.params.digits_sbox.get()

        if sys.platform == "win32":
            self.view.check_valid_characters_filename(user_input)

        for index, initial in enumerate(self.initial_filenames):
            dirname, filename = os.path.split(initial)
            filename, ext = os.path.splitext(filename)

            self.parser = Parser(self.changed_filenames, user_input, filename,
                                 dirname)

            # Name [n]
            temp_input = self.parser.name_n(ext, index)

            # Name from first character [nX]
            temp_input = self.parser.name_truncate_x(temp_input, ext, index)

            # Name from last character [n-X]
            temp_input = self.parser.name_last_x(temp_input, ext, index)

            # Name from n character [n,X]
            temp_input = self.parser.name_start_x(temp_input, ext, index)

            # Add counter
            temp_input = self.parser.add_counter(temp_input, digits, counter,
                                                 ext, index)
            counter += step

            # Add date
            try:
                temp_input = self.parser.add_date(temp_input, date_format, ext,
                                                  index)
            except TypeError:
                pass

        self.replace_filename = self.changed_filenames[:]

        self.populate_treeview(self.replace_filename)
        return True
Ejemplo n.º 17
0
class MultipleRenaming:
    """Class Multiple Renaming."""
    def __init__(self):
        self.view = View(controller=self)
        self.parser = Parser

        self.initial_filenames = list()
        self.initial_filepath = list()
        self.changed_filenames = list()
        self.replace_filename = list()

    def open_filenames(self):
        """Open files and display the number in the status bar."""
        self.initial_filepath = askopenfilenames()
        self.initial_filenames = list()

        for basename in self.initial_filepath:
            self.initial_filenames.append(basename)

        self.changed_filenames = self.initial_filenames[:]
        self.replace_filename = self.initial_filenames[:]

        self.view.statusbar.var_nbfiles.set(len(self.initial_filenames))

        self.populate_treeview()

    def populate_treeview(self, argument=None):
        """Parse filenames and send to view.display_treeview."""
        data = list()

        for initial, changed in zip(self.initial_filenames,
                                    self.changed_filenames):
            date_creation = datetime.fromtimestamp(os.path.getmtime(initial))
            date_modified = datetime.fromtimestamp(os.path.getctime(initial))
            new_name, ext = os.path.splitext(os.path.basename(changed))
            name_modified = arguments_parsing(argument, new_name, ext)

            _data = dict()
            _data["old_name"] = os.path.basename(initial)
            _data["new_name"] = name_modified
            _data["size"] = get_human_readable_size(os.path.getsize(initial))
            _data["created_at"] = datetime.strftime(date_creation,
                                                    "%Y/%m/%d %H:%M:%S")
            _data["modified_at"] = datetime.strftime(date_modified,
                                                     "%Y/%m/%d %H:%M:%S")
            _data["location"] = os.path.abspath(initial)
            data.append(_data)

        self.view.display_treeview(data)

    def input_filename(self, P):
        """Check the content of the input widget to verify that it is valid
        with the rules of the application.

        Arguments:
        - P (str): Value of the entry if the edit is allowed

        Returns:
        - str: Output text processed by application rules
        """
        user_input = P

        date_format, alert = self.view.get_format_date()

        if alert:
            self.view.statusbar.var_alert.set(alert)
            self.view.statusbar.update()
        else:
            self.view.statusbar.var_alert.set("")

        counter = int(self.view.params.start_sbox.get())
        step = int(self.view.params.step_sbox.get())
        digits = self.view.params.digits_sbox.get()

        if sys.platform == "win32":
            self.view.check_valid_characters_filename(user_input)

        for index, initial in enumerate(self.initial_filenames):
            dirname, filename = os.path.split(initial)
            filename, ext = os.path.splitext(filename)

            self.parser = Parser(self.changed_filenames, user_input, filename,
                                 dirname)

            # Name [n]
            temp_input = self.parser.name_n(ext, index)

            # Name from first character [nX]
            temp_input = self.parser.name_truncate_x(temp_input, ext, index)

            # Name from last character [n-X]
            temp_input = self.parser.name_last_x(temp_input, ext, index)

            # Name from n character [n,X]
            temp_input = self.parser.name_start_x(temp_input, ext, index)

            # Add counter
            temp_input = self.parser.add_counter(temp_input, digits, counter,
                                                 ext, index)
            counter += step

            # Add date
            try:
                temp_input = self.parser.add_date(temp_input, date_format, ext,
                                                  index)
            except TypeError:
                pass

        self.replace_filename = self.changed_filenames[:]

        self.populate_treeview(self.replace_filename)
        return True

    def search_and_replace(self, event):
        """Search and replace function.

        Arguments:
        - event (dict): Bind event.
        """
        search_expr = self.view.params.find_entry.get()
        replace_expr = self.view.params.replace_entry.get()

        if sys.platform == "win32":
            self.view.check_valid_characters_filename(replace_expr)

        if len(search_expr) > 0:
            self.changed_filenames = self.replace_filename[:]
            for index, word in enumerate(self.replace_filename):
                _dirname = os.path.dirname(word)
                _basename = os.path.basename(word)

                if search_expr in word:
                    self.changed_filenames[index] = os.path.join(
                        _dirname, _basename.replace(search_expr, replace_expr))
        else:
            self.changed_filenames = self.replace_filename[:]
        self.populate_treeview(self.changed_filenames)

    def rename(self):
        """Execute file renaming."""
        for index, (initial, modified) in enumerate(
                zip(self.initial_filenames, self.changed_filenames)):
            dirname = os.path.dirname(initial)
            basename_initial = os.path.basename(initial)
            extension_initial = os.path.splitext(basename_initial)[1]
            for key, value in self.view.arguments_dict.items():
                if self.view.params.arguments_cbox.get() in value:
                    arg_key = key

            # Apply argument options
            modified = arguments_parsing(arg_key,
                                         os.path.splitext(modified)[0],
                                         extension_initial)

            os.rename(initial, os.path.join(dirname, modified))

            # Convert tuple to list.
            self.initial_filenames = list(self.initial_filenames)

            # Update renamed file
            self.initial_filenames[index] = os.path.join(dirname, modified)

        self.populate_treeview()
        self.view.params.filename_entry.focus()

        if self.view.params.close_var.get():
            sys.exit()
Ejemplo n.º 18
0
def run(test_config):
    con_dic = test_config["load_manager"]
    env_pre(None)
    if test_config["contexts"]["yardstick_ip"] is None:
        con_dic["contexts"]["yardstick_ip"] =\
            conf_parser.ip_parser("yardstick_test_ip")

    if "dashboard" in test_config["contexts"].keys():
        if test_config["contexts"]["dashboard_ip"] is None:
            test_config["contexts"]["dashboard_ip"] =\
                conf_parser.ip_parser("dashboard")
        LOG.info("Create Dashboard data")
        Use_Dashboard = True
        DashBoard.dashboard_system_bandwidth(test_config["contexts"])

    data = {}
    rx_pkt_a = con_dic['scenarios']['rx_pkt_sizes'].split(',')
    tx_pkt_a = con_dic['scenarios']['tx_pkt_sizes'].split(',')
    data["rx_pkt_sizes"] = rx_pkt_a
    data["tx_pkt_sizes"] = tx_pkt_a
    con_dic["result_file"] = os.path.dirname(
        os.path.abspath(__file__)) + "/test_case/result"
    cur_role_result = 1
    pre_role_result = 1
    pre_reply = {}
    data_return = {}
    data_max = {}
    data_return["throughput"] = 1

    for test_x in data["tx_pkt_sizes"]:
        data_max["throughput"] = 1
        bandwidth_tmp = 1
        for test_y in data["rx_pkt_sizes"]:
            case_config = {
                "tx_msg_size": float(test_x),
                "rx_msg_size": float(test_y),
                "test_time": con_dic['scenarios']['test_times'],
                "pod_info": conf_parser.bottlenecks_config["pod_info"]
            }
            data_reply = do_test(case_config, Use_Dashboard,
                                 test_config["contexts"])

            conf_parser.result_to_file(data_reply, test_config["out_file"])
            bandwidth = data_reply["throughput"]
            if (data_max["throughput"] < bandwidth):
                data_max = data_reply
            if (abs(bandwidth_tmp - bandwidth) / bandwidth_tmp < 0.025):
                LOG.info("this group of data has reached top output")
                break
            else:
                pre_reply = data_reply
                bandwidth_tmp = bandwidth
        cur_role_result = float(pre_reply["throughput"])
        if (abs(pre_role_result - cur_role_result) / pre_role_result < 0.025):
            LOG.info("The performance increases slowly")
        if data_return["throughput"] < data_max["throughput"]:
            data_return = data_max
        pre_role_result = cur_role_result
    LOG.info("Find bottlenecks of this config")
    LOG.info("The max data is %d", data_return["throughput"])
    return data_return
from sklearn.neighbors import KNeighborsClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.preprocessing import StandardScaler

from ble.subscriber_thread import MQTTSubscriber
from localization.WifiLocalizer import WifiLocalizer
from map.elements.planimetry.sd_instance import SmartDirectionInstance
from utils.parser import Parser

from fingerprinting.main import getPosition
data_path = "/Users/filipkrasniqi/PycharmProjects/smartdirections/assets/smart_directions/"
app = Flask(__name__)
id_sds = {}
subscribers = {}

parser = Parser(data_path).getInstance()


def activate_sd_instance(id_sd, id_device, id_building, id_POI):
    if id_sd not in subscribers:
        subscriberThread = MQTTSubscriber("MQTT", id_sd)
        subscriberThread.start()
        subscribers[id_sd] = subscriberThread
    else:
        subscriberThread = subscribers[id_sd]
    # device gets tracked: needed to have info about anchors
    if id_device != -1 and id_device not in id_sds:
        id_sds[id_device] = id_sd
    if id_building >= 0 and id_POI >= 0:
        subscriberThread.activate_device(id_device, id_building, id_POI)
Ejemplo n.º 20
0
 def generator():
     for soup in self.soup_list:
         yield Parser(soup)
Ejemplo n.º 21
0
 def fetch_gzh_info(self, keyword):
     url = SogouRequest.generate_search_gzh_url(keyword)
     response = self.__get_and_unlock(url,
                                      self.__unlock_sogou,
                                      self.identify_image_callback_by_hand)
     return Parser.parse_gzh(response.text)
Ejemplo n.º 22
0

if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('indir', help='Input directory (blocks)')
    parser.add_argument('outdir', help='Output directory')
    parser.add_argument('lang', help='Language code')
    parser.add_argument('--nworker',
                        '-n',
                        default=1,
                        help='Number of workers (default=1)')
    args = parser.parse_args()

    nworker = int(args.nworker)
    os.makedirs(args.outdir, exist_ok=True)
    parser = Parser(args.lang)

    logger.info('processing...')
    pool = multiprocessing.Pool(processes=nworker)
    logger.info('# of workers: %s' % nworker)
    logger.info('parent pid: %s' % os.getpid())
    for i in sorted(os.listdir(args.indir),
                    key=lambda x: os.path.getsize('%s/%s' % (args.indir, x)),
                    reverse=True):
        inpath = '%s/%s' % (args.indir, i)
        outpath = '%s/%s.pp' % (args.outdir, i)
        pool.apply_async(
            process_block,
            args=(
                inpath,
                outpath,
Ejemplo n.º 23
0
 def fetch_history_urls_from_profile(self, profile_url):
     response = self.__get_and_unlock(profile_url,
                                      unlock_function=self.__unlock_wechat,
                                      identify_image_callback=self.identify_image_callback_by_hand)
     return Parser.parse_urls_from_profile(response.text)
Ejemplo n.º 24
0
 def delete(self):
     to_delete = self.sd_instances.pop(self.indexSelected)
     Parser().getInstance().clear_sd(to_delete)
     self.saveSDInstances()
     self.showList()
Ejemplo n.º 25
0
def run(test_config):
    con_dic = test_config["load_manager"]
    scenarios_conf = con_dic["scenarios"]

    if test_config["contexts"]["yardstick_ip"] is None:
        con_dic["contexts"]["yardstick_ip"] =\
            conf_parser.ip_parser("yardstick_test_ip")

    env_pre(test_config)
    LOG.info("yardstick environment prepare done!")

    stack_num = scenarios_conf["num_stack"]
    test_num = conf_parser.str_to_list(scenarios_conf["num_thread"])
    rw = scenarios_conf["rw"]
    bs = scenarios_conf["bs"]
    size = scenarios_conf["size"]
    rwmixwrite = scenarios_conf["rwmixwrite"]
    numjobs = scenarios_conf["num_jobs"]
    direct = scenarios_conf["direct"]
    volume_num = scenarios_conf["volume_num"]
    volume_size = scenarios_conf["volume_size"]

    for value in test_num:
        result = []
        out_num = 0
        num = int(value)
        # pool = multiprocessing.Pool(processes=num)
        threadings = []
        LOG.info("begin to run %s thread" % num)

        starttime = datetime.datetime.now()

        for i in xrange(0, num):
            case_config = {
                "stack_num": int(stack_num),
                "volume_num": volume_num,
                "rw": rw,
                "bs": bs,
                "size": size,
                "rwmixwrite": rwmixwrite,
                "numjobs": numjobs,
                "direct": direct,
                "volume_size": int(volume_size)
            }
            tmp_thread = threading.Thread(target=func_run,
                                          args=(case_config, ))
            threadings.append(tmp_thread)
            tmp_thread.start()

        for one_thread in threadings:
            one_thread.join()
        while not q.empty():
            result.append(q.get())
        for item in result:
            out_num = out_num + float(item)

        print(result)

        endtime = datetime.datetime.now()
        LOG.info("%s thread success %d times" % (num, out_num))
        during_date = (endtime - starttime).seconds

        data_reply = config_to_result(num, out_num, during_date)
        conf_parser.result_to_file(data_reply, test_config["out_file"])

    LOG.info('END POSCA stress multistack storage test')
    return data_reply
Ejemplo n.º 26
0
 def saveSDInstances(self):
     Parser().getInstance().write_sd_instances(self.sd_instances)
Ejemplo n.º 27
0
 def getSDInstances(self):
     return Parser().getInstance().read_smartdirections_instances()
Ejemplo n.º 28
0
class SongsPK(object):
    def __init__(self):
        self.BASE_URL = "https://songspk.shop"
        self.SEARCH_ALBUM_URL = self.BASE_URL + "/search?type=albums&q="
        self.parser = Parser()

    def search_for_albums(self, q):
        return self.parser.parse_data(self.SEARCH_ALBUM_URL + q, {
            'items': [
                {
                    'name': 'albums',
                    'has_children': True,
                    'child_selector': {
                        'type': 'tag',
                        'value': 'figure'
                    },
                    'type': 'selector',
                    'value': 'body > section > main > content > div.archive-body > div.col-body'
                }
            ],
            'keys': {
                'albums': {
                    'image': {
                        'selector': 'div.thumb-image > a > img',
                        'attr': 'src',
                        'prefix': self.BASE_URL,
                        'type': 'selector'
                    },
                    'title': {
                        'selector': 'figcaption > h3 > a',
                        'attr': 'text',
                        'type': 'selector'
                    },
                    'url': {
                        'selector': 'figcaption > h3 > a',
                        'attr': 'href',
                        'prefix': self.BASE_URL,
                        'type': 'selector'
                    },
                }
            }
        })

    def fetch_album_details(self, url):
        return self.parser.parse_data(url, {
            'items': [
                {
                    'name': 'tracks',
                    'has_children': True,
                    'child_selector': {
                        'type': 'tag',
                        'value': 'li'
                    },
                    'type': 'selector',
                    'value': '.page-tracklist-body > ul'
                },
                {
                    'name': 'title',
                    'has_children': False,
                    'type': 'selector',
                    'selector': 'body > section > main > content > div.page-meta-wrapper > div > div.col-md-9.page-meta > ul > li:nth-of-type(1) > div.col-md-9.col-xs-6.text-left',
                    'attr': 'text'
                },
                {
                    'name': 'url',
                    'has_children': False,
                    'type': 'finder',
                    'selector': {
                        'tag': 'link',
                        'extra': {
                            'rel': 'canonical'
                        }
                    },
                    'attr': 'href',
                },
                {
                    'name': 'image',
                    'has_children': False,
                    'type': 'selector',
                    'selector': 'body > section > main > content > div.page-meta-wrapper > div > div.col-md-3.page-cover > img',
                    'attr': 'src',
                    'prefix': self.BASE_URL
                }
            ],
            'keys': {
                'tracks': {
                    'title': {
                        'selector': 'div.col-md-7.col-xs-10.col-text > h3 > a',
                        'attr': 'text',
                        'type': 'selector'
                    },
                    'url': {
                        'selector': 'div.col-md-4.col-xs-5.col-download > a',
                        'attr': 'href',
                        'type': 'selector'
                    }
                }
            }
        })
def run(test_config):
    con_dic = test_config["load_manager"]

    env_pre(test_config)
    LOG.info("yardstick environment prepare done!")

    test_duration = float(con_dic["scenarios"]["test_duration_hours"]) * 3600
    vim_pair_ttl = float(con_dic["scenarios"]["vim_pair_ttl"])
    vim_pair_lazy_cre_delay = float(
        con_dic["scenarios"]["vim_pair_lazy_cre_delay"])
    vim_pair_num = int(
        math.ceil((test_duration - vim_pair_ttl) / vim_pair_lazy_cre_delay) +
        1)

    threadings = []
    result = []
    vim_pair_success_num = 0

    start_time = datetime.datetime.now()

    LOG.info("Data-path test duration are %i seconds", test_duration)
    LOG.info("TTL of each VM pair are %i seconds", vim_pair_ttl)
    LOG.info("Creation delay between VM pairs are %i seconds",
             vim_pair_lazy_cre_delay)
    LOG.info("Number of VM pairs to be created are %i", vim_pair_num)

    for vim_pair_index in xrange(0, vim_pair_num):
        index_thread = threading.Thread(target=func_run, args=(con_dic, ))
        threadings.append(index_thread)
        index_thread.start()
        vim_pair_error = False
        for wait_time in xrange(0, int(vim_pair_lazy_cre_delay)):
            time.sleep(1)
            while not q.empty():
                result.append(q.get())
            for one_result in result:
                if '0' == one_result[0]:
                    vim_pair_error = True
                break
        if vim_pair_error:
            break
    for one_thread in threadings:
        one_thread.join()
    while not q.empty():
        result.append(q.get())
    for item in result:
        vim_pair_success_num += int(item[0])

    end_time = datetime.datetime.now()
    added_duration = (end_time - start_time).seconds
    LOG.info("Number of success VM pairs/threads are %s out %s ",
             vim_pair_success_num, vim_pair_num)

    return_result = config_to_result(test_duration, added_duration,
                                     vim_pair_ttl, vim_pair_lazy_cre_delay,
                                     vim_pair_num, vim_pair_success_num,
                                     result)

    conf_parser.result_to_file(return_result, test_config["out_file"])

    return vim_pair_error
Ejemplo n.º 30
0
 def __init__(self):
     self.BASE_URL = "https://songspk.shop"
     self.SEARCH_ALBUM_URL = self.BASE_URL + "/search?type=albums&q="
     self.parser = Parser()
Ejemplo n.º 31
0
            'type': float,
            'default': 2.0,
            'help': 'Regularization for the matrix sqrt algorithm'
        }],
        [('--reg2', ), {
            'type': float,
            'default': 1E-8,
            'help': 'Regularization for the gradient of the bures metric'
        }],
        [('--log_period', '-lp'), {
            'type': int,
            'default': 50,
            'help': 'Logging period'
        }]]

argparser = Parser("Deep Elliptical Embeddings")
argparser.add_arguments(args)
opt = argparser.get_dictionary()

viz = Visdom(port=8098)
vm = VisualManager(viz, 'marco')

root = r'/mnt/DATA/Prob_IR/'
context_dataset_name = r'context_data'
encoded_docs_filename = r'encoded_docs_model'
word_index_filename = r'word_index'
emb_filename = r'embeddings_dim_' + str(opt['dim']) + '_margin_' + str(
    opt['margin'])
emb_path = os.path.join(root, emb_filename)

context_dataset_path = os.path.join(root, context_dataset_name)
Ejemplo n.º 32
0
class System(object):
    def __init__(self, listen_port, path):

        self._controller_listen_port = listen_port
        # Root directory of the controller
        self._sys_root = os.path.dirname(os.path.abspath(__file__))
        # Load system configuration
        self._system_conf = self._sys_root + "/" + path
        self._federazione = None
        self._customers = None
        self._dp_to_customer = {
        }  # chiave :(id datapath, ingress_port) valore: customer
        self._controller = None

        # Create parser object
        self._parser = Parser(self._controller_listen_port)

    def load_system_configuration(self):
        # Start parser
        self._parser.load(self._system_conf)
        self._federazione = self._parser.get_federazione()
        self._customers = self._parser.get_customers()
        self._controller = self._parser.get_controller()

    def get_controller_info(self):
        return self._controller

    def init(self, public_to_private_a, public_to_private_b):

        server = Server(self._controller_listen_port,
                        self._controller.get_ip(), self._controller,
                        public_to_private_a, public_to_private_b)
        eventlet.spawn(server.run)

    def add_node(self, datapath, ip):
        print "Controller : " + self._controller.get_name(
        ) + " ip: " + self._controller.get_ip(
        ) + " Pool ip pubblici: " + self._controller.get_public_subnet(
        ).__str__()
        for customer in self._controller.get_customers():
            # Identifico il datapath tramite l'ip
            if customer.get_ip_datapath() == ip:
                customer.set_datapath(datapath)
                self._dp_to_customer[datapath.id,
                                     customer.get_ingress_port()] = customer
                # print "Aggiunto datapath id: " + str(
                # datapath.id) + " ip: " + ip + " porta di ingresso: " + customer.get_ingress_port() + " al customer: " + customer.get_name()

        print "Federation: "
        print self._federazione
        print "Customer: "
        print self._customers
        #   print self._dp_to_customer
        #Ask ce MAC address to send DNS queries--->Habib
        cs = self._controller.get_customers()
        #print '-------cs-------' ,cs[0]
        # print 'ofproto', cs[0].get_datapath()
        if cs[0].get_ns_domain_name() is None:
            self.send_arp_request(cs[0].get_datapath(), cs[0].get_next_hop(),
                                  cs[0].get_ingress_port())
        else:
            self.send_arp_request(cs[0].get_datapath(), cs[0].get_next_hop(),
                                  cs[0].get_ingress_port())
            self.send_arp_request(cs[0].get_datapath(), cs[0].get_router(),
                                  cs[0].get_out_port())

    def handle_packet(self, pkt, dpid, in_port, data, datapath,
                      public_to_private_a, public_to_private_b):
        fph = FactoryPacketHandler.get_instance()
        handler = fph.create_handler(pkt, self._dp_to_customer, in_port, data,
                                     datapath, self._controller,
                                     self._federazione, public_to_private_a,
                                     public_to_private_b)
        # print '@@@@@@@@@@@@@@@@@@@@@@@ Handler', type(handler)
        handler.handle_packet()

    #Send arp request to get MAC for DNS packets --->Habib
    def send_arp_request(self, datapath, dstip, port):
        #Controller sends the DNS packet to destination name server via controller by creating a DNS query
        new_pkt = packet.Packet()
        new_pkt.add_protocol(
            ethernet.ethernet(ethertype=ether.ETH_TYPE_ARP,
                              src='9f:ff:ff:ff:ff:ff',
                              dst='ff:ff:ff:ff:ff:ff'))
        new_pkt.add_protocol(
            arp.arp(hwtype=1,
                    proto=0x800,
                    hlen=6,
                    plen=4,
                    opcode=arp.ARP_REQUEST,
                    src_mac='9f:ff:ff:ff:ff:ff',
                    dst_mac='ff:ff:ff:ff:ff:ff',
                    src_ip=self._controller.get_ip(),
                    dst_ip=dstip))
        self.send_arp_packet(datapath, new_pkt, port)

    #Send the DNS packet to the destination name server
    def send_arp_packet(self, datapath, pkt, port):
        ofproto = datapath.ofproto
        parser = datapath.ofproto_parser
        pkt.serialize()
        data = pkt.data
        actions = [parser.OFPActionOutput(port=int(port))]
        out = datapath.ofproto_parser.OFPPacketOut(
            datapath=datapath,
            buffer_id=ofproto.OFP_NO_BUFFER,
            in_port=datapath.ofproto.OFPP_CONTROLLER,
            actions=actions,
            data=data)
        datapath.send_msg(out)