Exemplo n.º 1
0
def test_process_events(process_event_task_mock):
    events = [{'wait': 0.01, 'type': 'edit'} for x in range(5)]
    p = Processor()
    assert p.async_results == deque()
    p._process_events(events)
    assert len(p.async_results) == 5
    process_event_task_mock.assert_called_with(events[0])
Exemplo n.º 2
0
    def __init__(self, args):
        # Ignore warnings about SSL connections
        warnings.simplefilter("ignore", ResourceWarning)

        self.processor = Processor()
        self.user_id = args["user"]
        self.resultsLimit = args["top"]
        self.table = None
Exemplo n.º 3
0
def test_list_instances(client_mock):
    client_mock().describe_instances.return_value = {
        'Reservations': [{
            'Instances': []
        }]
    }
    processor = Processor()
    actual = processor.list_instances()
    assert actual == {'instances': []}
Exemplo n.º 4
0
def main():
    # Loading Parameters
    parser = init_parameters()
    args, _ = parser.parse_known_args()

    # Updating Parameters (cmd > yaml > default)
    args = update_parameters(parser, args)

    # Setting save_dir
    save_dir = get_save_dir(args)
    U.set_logging(save_dir)
    with open('{}/config.yaml'.format(save_dir), 'w') as f:
        yaml.dump(vars(args), f)

    # Processing
    if args.generate_data or args.generate_label:
        g = Generator(args)
        g.start()

    elif args.extract or args.visualization:
        if args.extract:
            p = Processor(args, save_dir)
            p.extract()
        if args.visualization:
            v = Visualizer(args)
            v.start()

    else:
        p = Processor(args, save_dir)
        p.start()
Exemplo n.º 5
0
def predict():
    """Predict url is for an API calls to predict the price of
    a listing and returns json format predictions list
    :return: json format price prediction
    """
    try:
        processor = Processor()
        input_params = processor.process_input(request.data)
        predictions = processor.predict(input_params)
        json_input = json.loads(request.data)
        insert_inference(json.dumps(json_input), json.dumps(predictions.tolist()))
        return json.dumps({"Predicted Price": predictions.tolist()})
    except (KeyError, json.JSONDecodeError, AssertionError, ValueError):
        return json.dumps({"error": "CHECK INPUT"}), 400
    except Exception as err:
        return json.dumps({"error": "PREDICTION FAILED", "message": {err}}), 500
Exemplo n.º 6
0
def process_file(cpp_path, build):
    if not file_is_cpp(cpp_path):
        return
    print(cpp_path, "... ", end="")
    fname = os.path.basename(cpp_path)
    gml_path, is_script = reconstruct_gml_path(build, fname)
    if not gml_path:
        cprint("cannot find GML source, skipping!", "yellow")
        return
    try:
        Processor.inject_types(cpp_path, gml_path)
        if is_script:
            Processor.handle_threading(cpp_path, gml_path)
        cprint("processed", "green")
    except FileNotFoundError:
        pass
Exemplo n.º 7
0
def home():
    """Homepage that has an input form where you can enter data
    and it returns the prediction of a listing.
    :return: html template based on GET or POST request
    """
    if request.method == "POST":
        try:
            processor = Processor()
            processed_form = processor.process_form(request.form.to_dict())
            json_input, input_params = processed_form
            prediction = processor.predict(input_params)
            insert_inference(json_input, json.dumps(prediction.tolist()))
            return render_template("prediction.html", prediction=prediction)
        except:
            return "Something went wrong, please try again later", 400
    else:
        return render_template("home.html")
Exemplo n.º 8
0
 def generate_processors(self, processors_amount):
     self.processors = []
     for i in range(0, processors_amount):
         self.processors.append(
             Processor(id=i,
                       tasks_list=self.tasks,
                       strategy=self.strategy,
                       arbitrationRule=self.arbitration,
                       expropriation=self.expropriation))
Exemplo n.º 9
0
def test_partition_key_generation():

    sample_request_id = '156be180-becf-11e9-890a-dfff977ea657'
    event = {'requestContext': {'requestId': sample_request_id}}
    partition_key = Processor.generate_partition_key(event)

    assert type(partition_key) is str
    assert len(partition_key) == 64
    assert partition_key.isalnum()
Exemplo n.º 10
0
def test_inactivity_value_no_file():
    # this file does not exist
    inactivity_file_name = TEST_BASE + 'input/inactivity_period_bad_file.txt'
    output_file = ''

    try:
        Processor(inactivity_file_name, output_file)
    except IOError:
        assert True
Exemplo n.º 11
0
def test_inactivity_value_bad():
    # this file has a non-numeric value
    inactivity_file_name = TEST_BASE + 'input/inactivity_period_bad_value.txt'
    output_file = ''

    try:
        Processor(inactivity_file_name, output_file)
    except ValueError:
        assert True
Exemplo n.º 12
0
 def __init__(self, port):
     print('initializing')
     self.init_server(('localhost', port))
     self.processor = Processor(self.sample_rate)
     self.s2t = S2T(self.sample_rate)
     self.chat = ChatMan('ckpt/39900_checkpoint.tar')
     self.t2s = T2S(self.sample_rate)
     # TODO: independence speaker
     self.speaker = Audio('output', self.sample_rate, 1.0)
     self.listen()
Exemplo n.º 13
0
def test_inactivity_value():
    inactivity_file_name = TEST_BASE + 'input/inactivity_period.txt'
    output_file = ''

    try:
        proc = Processor(inactivity_file_name, output_file)
    except ValueError:
        assert False
    else:
        assert proc.inactivity_time == 2
Exemplo n.º 14
0
def execute_and_print(processor: Processor):
    c = processor.clock_update()
    clear()
    print(f"END OF CLOCK CYCLE {processor.clock_time}", end=' ')
    print("RESULTS")
    processor.first_stage.print_info_to_console()
    processor.second_stage.print_to_console()
    processor.third_stage.print_to_console()
    processor.fourth_stage.print_to_console()
    processor.fifth_stage.print_to_console()
    return c
Exemplo n.º 15
0
def test_output():
    output_file_name = TEST_BASE + 'output/sessionization.txt'
    raw_data = get_raw_log_data(TEST_BASE + 'input/log.csv')
    inactivity_file_name = TEST_BASE + 'input/inactivity_period.txt'

    proc = Processor(inactivity_file_name, output_file_name)
    proc.process_logs(raw_data)

    with open(output_file_name) as f:
        results = f.readlines()

    with open(TEST_BASE + 'output/ref.txt') as f:
        expected = f.readlines()

    assert len(results) == len(expected), 'Incorrect number of result items'

    for i, r in enumerate(results):
        expected_split = expected[i].split(',')
        r_split = r.split(',')
        for j, piece in enumerate(r_split):
            assert piece == expected_split[j], \
                'Incorrect value for piece {}'.format(j)
Exemplo n.º 16
0
def main():
    parser = Init_parameters()

    # Update parameters by yaml
    args = parser.parse_args()
    if os.path.exists('/home/aayadi/projet/RA-GCNv22/configs/' + args.config +
                      '.yaml'):
        with open(
                '/home/aayadi/projet/RA-GCNv22/configs/' + args.config +
                '.yaml', 'r') as f:
            yaml_arg = yaml.load(f, Loader=yaml.FullLoader)
            default_arg = vars(args)
            for k in yaml_arg.keys():
                if k not in default_arg.keys():
                    raise ValueError('Do NOT exist the parameter {}'.format(k))
            parser.set_defaults(**yaml_arg)
    else:
        raise ValueError('Do NOT exist this config: {}'.format(args.config))

    # Update parameters by cmd
    args = parser.parse_args()

    # Show parameters
    print('\n************************************************')
    print('The running config is presented as follows:')
    v = vars(args)
    for i in v.keys():
        print('{}: {}'.format(i, v[i]))
    print('************************************************\n')

    # Processing
    os.environ['CUDA_VISIBLE_DEVICES'] = ','.join(list(map(str, args.gpus)))
    if args.visualization:
        if args.extract:
            p = Processor(args)
            p.extract()

        print('Starting visualizing ...')
        v = Visualizer(args)
        v.show_wrong_sample()
        v.show_important_joints()
        v.show_heatmap()
        v.show_skeleton()
        print('Finish visualizing!')

    else:
        p = Processor(args)
        p.start()
Exemplo n.º 17
0
def test_next_result_ready():
    a = Mock()
    a.ready.return_value = False
    p = Processor()
    p.async_results = deque([a])
    res = p._next_result_ready()
    assert res is False
    a.ready.return_value = True
    p.async_results = deque([a])
    res = p._next_result_ready()
    assert res is True
Exemplo n.º 18
0
def list_instances(event, context):
    processor = Processor()
    instances = processor.list_instances()
    return make_response(instances, 200)
Exemplo n.º 19
0
class Metrics:
    def __init__(self, args):
        # Ignore warnings about SSL connections
        warnings.simplefilter("ignore", ResourceWarning)

        self.processor = Processor()
        self.user_id = args["user"]
        self.resultsLimit = args["top"]
        self.table = None

    def _load_table(self, event):
        table = agate.Table.from_object(list(self.processor.messagesQueue))

        event.set()

        self.table = table

        return

    def _analyze_senders(self, event):
        data = (self.table.pivot("fields/from").where(
            lambda row: row["fields/from"] is not None).order_by(
                "Count", reverse=True).limit(self.resultsLimit))

        _values = data.columns.values()

        data_keys = list(_values[0].values())
        data_count = [[i] for i in list(map(int, list(_values[1].values())))]

        event.set()

        print(f"\n\n{helpers.h1_icn} Senders (top {self.resultsLimit})\n")
        args = {
            "stacked": False,
            "width": 55,
            "no_labels": False,
            "format": "{:<,d}",
            "suffix": "",
            "vertical": False,
            "different_scale": False,
        }

        chart(colors=[94], data=data_count, args=args, labels=data_keys)

    def _analyze_count(self, event):
        # Average emails per day
        total = self.table.aggregate([("total", agate.Count())])["total"]
        total_senders = (
            self.table.distinct("fields/from").select("fields/from").aggregate(
                [("total", agate.Count())])["total"])

        if total == 0:
            first_email_date = ""
            last_email_date = None
        else:
            date_data = self.table.where(
                lambda row: row["fields/date"] is not None).compute([(
                    "reduce_to_datetime",
                    agate.Formula(
                        agate.DateTime(datetime_format="%Y-%m-%d %H:%M:%S"),
                        lambda row: helpers.reduce_to_datetime(row[
                            "fields/date"]),
                    ),
                )])
            first_email_date = (date_data.order_by("reduce_to_datetime").limit(
                1).columns["fields/date"].values()[0])
            last_email_date = (date_data.order_by(
                "reduce_to_datetime",
                reverse=True).limit(1).columns["fields/date"].values()[0])
        event.set()

        metrics = [
            ["Total emails", total],
            ["Senders", total_senders],
            ["First Email Date", first_email_date],
        ]

        if last_email_date:
            date_delta = helpers.convert_date(
                last_email_date) - helpers.convert_date(first_email_date)
            avg_email_per_day = total / date_delta.days
            metrics.append(["Avg. Emails/Day", f"{avg_email_per_day:.2f}"])

        print(f"\n\n{helpers.h1_icn} Stats\n")
        print(termtables.to_string(metrics))

    def _analyze_date(self, event):
        table = self.table.where(
            lambda row: row["fields/date"] is not None).compute([
                (
                    "reduce_to_date",
                    agate.Formula(
                        agate.Text(),
                        lambda row: helpers.reduce_to_date(row["fields/date"]),
                    ),
                ),
                (
                    "reduce_to_year",
                    agate.Formula(
                        agate.Number(),
                        lambda row: helpers.reduce_to_year(row["fields/date"]),
                    ),
                ),
                (
                    "reduce_to_time",
                    agate.Formula(
                        agate.Number(),
                        lambda row: helpers.reduce_to_time(row["fields/date"]),
                    ),
                ),
            ])

        years = table.distinct(
            "reduce_to_year").columns["reduce_to_year"].values()

        _data = {}

        for year in years:
            _data[year] = (table.where(lambda row: row[
                "reduce_to_year"] == year).select("reduce_to_date").pivot(
                    "reduce_to_date").order_by("reduce_to_date"))

        event.set()

        print(f"\n\n{helpers.h1_icn} Date\n")

        for year in years:
            data_keys = list(_data[year].columns["reduce_to_date"].values())
            _counts = list(
                map(int, list(_data[year].columns["Count"].values())))
            _sum = sum(_counts)
            data_count = [[i] for i in _counts]

            args = {
                "color": False,
                "custom_tick": False,
                "start_dt": f"{year}-01-01"
            }

            print(f"\n{helpers.h2_icn} Year {year} ({_sum:,} emails)\n")
            calendar_heatmap(data=data_count, args=args, labels=data_keys)

    def analyse(self):
        """
        read from the messages queue, and generate:
        1. Counter for From field
        2. Counter for Time field (by hour)
        """

        # {'id': '16f39fe119ee8427', 'labels': ['UNREAD', 'CATEGORY_UPDATES', 'INBOX'], 'fields': {'from': 'Coursera <*****@*****.**>', 'date': 'Tue, 24 Dec 2019 22:13:09 +0000'}}

        with concurrent.futures.ThreadPoolExecutor() as executor:
            progress = Spinner(f"{helpers.loader_icn} Loading messages ")

            event = Event()

            future = executor.submit(self._load_table, event)

            while not event.isSet() and future.running():
                progress.next()
                time.sleep(0.1)

            progress.finish()

            progress = Spinner(f"{helpers.loader_icn} Analysing count ")

            event = Event()

            future = executor.submit(self._analyze_count, event)

            while not event.isSet() and future.running():
                progress.next()
                time.sleep(0.1)

            progress.finish()

            progress = Spinner(f"{helpers.loader_icn} Analysing senders ")

            event = Event()

            future = executor.submit(self._analyze_senders, event)

            while not event.isSet() and future.running():
                progress.next()
                time.sleep(0.1)

            progress.finish()

            progress = Spinner(f"{helpers.loader_icn} Analysing dates ")

            event = Event()

            future = executor.submit(self._analyze_date, event)

            while not event.isSet() and future.running():
                progress.next()
                time.sleep(0.1)

            progress.finish()

    def start(self):
        messages = self.processor.get_messages()

        self.processor.get_metadata(messages)

        self.analyse()
Exemplo n.º 20
0
def test_processor_init():
    p = Processor()
    assert p.async_results is not None
    assert isinstance(p.async_results, deque)
Exemplo n.º 21
0
def test_processor_process(process_events_mock, write_outfile_mock):
    events = [{'wait': 0.01, 'type': 'edit'} for x in range(5)]
    p = Processor()
    p.process(events=events)
    process_events_mock.assert_called_once()
    write_outfile_mock.assert_called_once()
Exemplo n.º 22
0
from src.utils import _hash, log

url = 'tcp://10.9.70.170:4004'
family_name = 'poc-blockchain'
version = ['1.0']


def func(transation, context):
    print('----')
    print(transation)
    try:
        payload = structure.Payload()
        payload.ParseFromString(transation.payload)
        print(payload)
        log.info(payload)
        info = Actions[payload.action](transation, context, payload,
                                       family_name)
        log.info(info)
    except Exception as e:
        print(e)
        log.exception(e)


if __name__ == '__main__':
    handler = Handler(family_name, version, func)
    processor = Processor(handler, url)
    processor.start()
    # entity = structure.Entity()
    # entity1 = structure.Entity()
    # entity.childs.append(entity1)
Exemplo n.º 23
0
    print("RESULTS")
    processor.first_stage.print_info_to_console()
    processor.second_stage.print_to_console()
    processor.third_stage.print_to_console()
    processor.fourth_stage.print_to_console()
    processor.fifth_stage.print_to_console()
    return c


if __name__ == '__main__':
    print(
        "*****************WELCOME TO THE MIPS PROCESSOR*****************************"
    )
    var = input("ENTER X to EXIT, OR PRESS ENTER TO CONTINUE: ")
    while var != 'x' and var != 'X':
        main_processor = Processor()
        clear()
        file_found = False
        var3 = 'w'
        while not file_found and var3 != 'x' and var3 != 'X':
            file_found = True
            print(
                "TIP: ENTER PATHS RELATIVE TO src, OTHERWISE ENTER FULL PATH")
            file_path = input("Please Enter asm file: ")
            try:
                main_processor.load_file_into_im(file_path)
            except FileNotFoundError:
                file_found = False
                clear()
                var3 = input(
                    "FILE NOT FOUND:\nPRESS X TO EXIT OR PRESS ENTER TO RETRY ENTERING FILE PATH: "
Exemplo n.º 24
0
    def run(self):

        # Necessarities
        single_core = Processor()

        # Create User Output
        HELPER[0].schedule_start_message(self, self.schedule_mode)

        # Scheduling - loop
        while len(PROCESS_LIST) > 0:

            # Determine which Scheduler an get the decision accordingly
            if self.schedule_mode == FCFS:
                process_found, active_index, finish = first_come_first_serve(
                    single_core)

            elif self.schedule_mode == SJF:
                process_found, active_index, finish = smallest_job_first(
                    single_core)

            elif self.schedule_mode == SRTF:
                process_found, active_index, finish = shortest_remaining_time_first(
                    single_core)

            elif self.schedule_mode == RR:
                process_found, active_index, finish = round_robin(single_core)

            elif self.schedule_mode == PBS:
                process_found, active_index, finish = priority_based(
                    single_core)

            elif self.schedule_mode == EDF:
                process_found, active_index, finish = earliest_deadline_first(
                    single_core)

            elif self.schedule_mode == HRRN:
                process_found, active_index, finish = highest_response_ratio_next(
                    single_core)

            else:
                self.window.display_text("Error Occured! Please try again!")
                return

            # CPU got to wait, no Process arrived yet
            if not process_found:
                self.window.display_text(
                    f"No Process in Queue. Running Empty...")
                self.window.display_text(
                    f"System-Clock: {single_core.get_clock_time_step()}")
                single_core.work()

            else:

                # Put the CPU to work according to the decisions made
                single_core.work_process(self, active_index,
                                         self.schedule_mode, single_core,
                                         finish)

        # Finish Message
        self.window.display_end_line()
        self.window.display_text(f"Finished all Processes!")
        self.window.display_text(
            f"Average Waiting Time: {single_core.get_average_waiting()}")

        return
Exemplo n.º 25
0
def main():
    parser = Init_parameters()

    # Update parameters by yaml
    args = parser.parse_args()
    if os.path.exists('./configs/' + args.config + '.yaml'):
        with open('./configs/' + args.config + '.yaml', 'r') as f:
            yaml_arg = yaml.load(f, Loader=yaml.FullLoader)
            default_arg = vars(args)
            for k in yaml_arg.keys():
                if k not in default_arg.keys():
                    raise ValueError('Do NOT exist the parameter {}'.format(k))
            parser.set_defaults(**yaml_arg)
    else:
        raise ValueError('Do NOT exist this config: {}'.format(args.config))
    # Update parameters by cmd
    args = parser.parse_args()
    # Show parameters
    print('\n************************************************')

    #if type(args.gpus) == int:
    #    n = args.gpus
    #    if n == 4:
    #        args.gpus = [0, 1, 2, 3]
    #    else:
    #        args.gpus = [0]

    print('The running config is presented as follows:')
    print_default_keys = ['config', 'batch_size', 'pretrained', 'model_stream']
    print_eval_keys = [
        'occlusion_part', 'occlusion_time', 'occlusion_block',
        'occlusion_rand', 'jittering_joint', 'jittering_frame', 'sigma'
    ]

    v = vars(args)
    if '-g' in sys.argv or '--gpus' in sys.argv:
        aa = args.gpus
        args.gpus = [int(x) for x in aa.split(',')]
    else:
        if node == 'obama':
            args.gpus = [0, 1, 2, 3]
        elif node == 'puma':
            args.gpus = [0]
        else:
            args.gpus = [0]

    for i in v.keys():
        if i in print_default_keys:
            print('{}: {}'.format(i, v[i]))

    if args.evaluate:
        for i in v.keys():
            if i in print_eval_keys:
                if v[i]:
                    print('{}: {}'.format(i, v[i]))

    print('************************************************\n')
    # Processing
    os.environ['CUDA_VISIBLE_DEVICES'] = ','.join(list(map(str, args.gpus)))
    if args.visualization:
        if args.extract:
            p = Processor(args)
            p.extract()

        print('Starting visualizing ...')
        v = Visualizer(args)
        v.show_wrong_sample()
        v.show_important_joints()
        v.show_heatmap()
        v.show_skeleton()
        print('Finish visualizing!')

    else:

        if args.baseline:
            p = Processor_BS(args)
        else:
            p = Processor(args)
        p.start()