def queue_delete(self, queue_name, if_unused=False, if_empty=False):
     try:
         return self.channel.queue_delete(queue_name,
                                          if_unused=if_unused,
                                          if_empty=if_empty)
     except pika.exceptions.AMQPError as err:
         Logger.error(__file__, err.args)
 def exchange_unbind(self, destination, source, routing_key=''):
     try:
         return self.channel.exchange_unbind(destination=destination,
                                             source=source,
                                             routing_key=routing_key)
     except pika.exceptions.AMQPError as err:
         Logger.error(__file__, err.args)
 def queue_unbind(self, queue_name, exchange_name, routing_key=None):
     try:
         return self.channel.queue_unbind(queue_name,
                                          exchange_name,
                                          routing_key=routing_key)
     except pika.exceptions.AMQPError as err:
         Logger.error(__file__, err.args)
    def __generate_orders(self):
        '''
        Generate green zone orders records with generators configurations
        '''

        Logger.info(__file__, 'Started generation order history in green zone')

        previous_time = datetime.datetime.now()
        t = 0
        for period in range(len(self.configs.orders_volumes_for_generation)):
            zone_index = 0
            for zone in self.configs.orders_volumes_for_generation[period]:
                for i in range(
                        int(self.configs.orders_volumes_for_generation[period]
                            [zone])):
                    order = self.__generate_general_order_information()

                    self.history.orders[order.id] = order
                    records = self.__get_order_in_zone(
                        order.id, order.status_sequence, period, zone,
                        order.statuses_in_blue_zone)
                    self.history.records.extend(records)
                    self.inc_statistic('Generated orders', 1)
                    self.inc_statistic('Generated records', len(records))

                    if len(self.history.orders) % self.configs.settings[
                            Values.GENERAL_SECTION_NAME][
                                Values.BATCH_SIZE] == 0:
                        self.add_time_statistic(
                            'Order history generation',
                            (datetime.datetime.now() -
                             previous_time).total_seconds() * 1000)
                        self.send_to_rmq()
                        previous_time = datetime.datetime.now()
                zone_index += 1
    def report(data):
        print()
        Logger.info(__file__, 'Start reporting')

        print()
        print('========== REPORT ==========')

        for key in data:
            item = data[key]
            print(f'----- {key} -----')
            if isinstance(item, list):
                if len(item) > 1:
                    max_value = max(item)
                    min_value = min(item)
                    avg_value = sum(item) / len(item)
                    total = sum(item)

                    print(f'Max: {max_value} ms')
                    print(f'Min: {min_value} ms')
                    print(f'Avg: {avg_value} ms')
                    print(f'Total: {total} ms')
                else:
                    print(f'{key}: {item[0]} ms')
            else:
                print(item)
            print()

        print('========== REPORT END ==========')
        print()
        Logger.info(__file__, 'Reporting finished')
        print()
    def declare_exchange(self,
                         exchange_name,
                         exchange_type,
                         passive=False,
                         durable=True,
                         auto_delete=False):
        '''
        Declare new exchange

        :param exchange_name: exchange name
        :param exchange_type: exhange type
        :param passive:
        :param durable:
        :param auto_delete:
        :return:
        '''

        Logger.debug(
            __file__, 'Declaring exchange {} with exchange type'.format(
                exchange_name, exchange_type.value))

        return self.conn.declare_exchange(exchange_name,
                                          exchange_type,
                                          passive=passive,
                                          durable=durable,
                                          auto_delete=auto_delete)
    def __calculate_first_generation_period_start_date(self):
        '''
        Calculate start date of first generation perid
        '''

        Logger.info(
            __file__,
            'Starting calculating start date of first period for generation orders'
        )

        current_date = datetime.datetime.today().replace(hour=0,
                                                         minute=0,
                                                         second=0,
                                                         microsecond=0)
        day_of_week = current_date.weekday() + 1

        self.configs.is_current_date_in_trading_period = day_of_week in (1, 2,
                                                                         5)
        days_to_last_period = day_of_week + 7 - 5 if day_of_week < 5 else day_of_week - 5
        days_to_start_period = days_to_last_period + (
            7 * (len(self.configs.orders_volumes_for_generation)))
        self.configs.start_date = current_date + datetime.timedelta(
            days=-days_to_start_period)

        Logger.info(
            __file__,
            'Calculating start date of first period for generation orders finished. Start at {}'
            .format(self.configs.start_date))
Пример #8
0
    def __add_parameter(self, cfg_parser, section_name, parameter_name, value):
        cfg_parser.set(section_name, parameter_name, str(value))

        Logger.add_to_journal(
            __file__, Level.DEBUG,
            "Added parameter '{}' to section {} with value: {}".format(
                parameter_name, section_name, value))
    def execute_generation(self):
        '''
        Execute generation orders history
        '''

        self.history.clear_history()

        Logger.info(__file__, 'Generating order history started')

        self.__generate_orders()

        Logger.info(__file__, 'Generating order history finished')

        self.rmq.publish(
            self.configs.settings[Values.RMQ_SECTION_NAME][
                Values.RMQ_EXCHANGE_NAME],
            self.configs.settings[Values.RMQ_SECTION_NAME][
                Values.RMQ_EXCHANGE_GREEN_RECORDS_ROUTING_KEY], 'stop')

        self.rmq.publish(
            self.configs.settings[Values.RMQ_SECTION_NAME][
                Values.RMQ_EXCHANGE_NAME],
            self.configs.settings[Values.RMQ_SECTION_NAME][
                Values.RMQ_EXCHANGE_RED_RECORDS_ROUTING_KEY], 'stop')

        self.rmq.publish(
            self.configs.settings[Values.RMQ_SECTION_NAME][
                Values.RMQ_EXCHANGE_NAME],
            self.configs.settings[Values.RMQ_SECTION_NAME][
                Values.RMQ_EXCHANGE_BLUE_RECORDS_ROUTING_KEY], 'stop')

        self.fininsh_event.set()
    def __msg_consumer(self, channel, method, header, body):
        if body == b'stop':
            self.stop_messages_count += 1
            if self.stop_messages_count == 3:
                self.send_consumed_data_to_mysql()

                self.rmq.stop_consuming()
                self.finish_event.set()
        else:
            self.set_stat('Consumed messages', 1)
            order_record = Entities.Protobuf.OrderInformation_pb2.OrderInformation(
            )
            order_record.ParseFromString(body)

            self.to_data_list(order_record)

            if len(self.consumed_data) == self.configs.settings[
                    Values.GENERAL_SECTION_NAME][Values.BATCH_SIZE]:
                OrderHistoryMaker.add_time_statistic(
                    'Consuming data from RabbitMQ',
                    (datetime.datetime.now() -
                     self.previous_time).total_seconds() * 1000)
                Logger.info(__file__, "Batch size data consumed")
                self.send_consumed_data_to_mysql()
                self.previous_time = datetime.datetime.now()

        channel.basic_ack(delivery_tag=method.delivery_tag)
Пример #11
0
    def get_db_report_date(cls):
        from Service.LoggerService.Implementation.DefaultPythonLoggingService import \
            DefaultPythonLoggingService as Logger
        from Config.Configurations import Configuration
        from Config.Configurations import ValuesNames as Values
        from Service.DbService.Implementation.MySqlService import MySqlService
        from Entities.StatisticsDataStorage import StatisticsDataStorage

        Logger.info(__file__, 'Getting statistic from db started')
        mysql_settings = Configuration().settings[Values.MYSQL_SECTION_NAME]

        mysql = MySqlService(user=mysql_settings[Values.MYSQL_USER],
                             password=mysql_settings[Values.MYSQL_PASSWORD],
                             host=mysql_settings[Values.MYSQL_HOST],
                             port=mysql_settings[Values.MYSQL_PORT],
                             database=mysql_settings[Values.MYSQL_DB_NAME])
        try:
            mysql.open_connection()

            for (value, name) in mysql.execute(Values.MYSQL_GET_REPORT_QUERY,
                                               select=True):
                if name == '1':
                    name = 'Red zone orders avg amount'
                if name == '2':
                    name = 'Green zone orders avg amount'
                if name == '3':
                    name = 'Blue zone orders avg amount'

                StatisticsDataStorage.statistics[name] = value
            Logger.info(__file__, 'Database service configurated')

        except AttributeError as er:
            Logger.error(__file__, er.args)
            Logger.info(__file__, 'Sending records to MySQL aborted')
        Logger.info(__file__, 'Getting statistic from db finished')
    def set_general_order_info(self, general_order_info):
        if isinstance(general_order_info, GeneralOrderInformation):
            self.__order_general_information = general_order_info

            Logger.debug(__file__,
                         'Set general order info parameter to {}'.format(self.__order_general_information.__str__()))
        return self
    def open(self,
             user=pika.connection.Parameters.DEFAULT_USERNAME,
             password=pika.connection.Parameters.DEFAULT_PASSWORD,
             host=pika.connection.Parameters.DEFAULT_PORT,
             port=pika.connection.Parameters.DEFAULT_PORT,
             virtual_host=pika.connection.Parameters.DEFAULT_VIRTUAL_HOST):
        try:

            self.__user = user
            self.__password = password
            self.__host = host
            self.__port = port
            self.__virtual_host = virtual_host
            credentials = pika.PlainCredentials(username=user,
                                                password=password)
            params = pika.ConnectionParameters(host=host,
                                               port=port,
                                               virtual_host=virtual_host,
                                               credentials=credentials)

            self.connection = pika.BlockingConnection(params)
            self.channel = self.connection.channel()

        except pika.exceptions.AMQPError as err:
            Logger.error(__file__, err.args)
            return None
 def queue_unbind(self, queue_name, exchange_name, routing_key=None):
     Logger.debug(
         __file__,
         'Unbinding queue {} from exchange {} with routing key  {}'.format(
             queue_name, exchange_name, routing_key))
     return self.conn.queue_unbind(queue_name=queue_name,
                                   exchange_name=exchange_name,
                                   routing_key=routing_key)
Пример #15
0
 def report(self):
     Logger.info(__file__, 'Start reporting')
     print('Start getting reporting data at {}'.format(
         datetime.datetime.now()))
     Utils.get_db_report_date()
     ConsoleReporter.report(StatisticsDataStorage.statistics)
     print('Reporter finished data at {}'.format(datetime.datetime.now()))
     Logger.info(__file__, 'Reporting finished')
 def commit(self):
     try:
         self.__conn.commit()
         return True
     except mysql.connector.Error as err:
         Logger.error(__file__, err.msg)
         self.rollback()
         return False
 def get_cursor(self):
     try:
         return self.__conn.cursor()
     except mysql.connector.Error as err:
         Logger.error(__file__, err.msg)
         if not self.is_available():
             self.reconnect()
             self.get_cursor()
    def consume(self, queue_name, on_consume_callback, reconnect=False):
        try:
            if not reconnect:
                self.consume_info[queue_name] = on_consume_callback
            self.channel.basic_consume(queue=queue_name,
                                       on_message_callback=on_consume_callback)

        except pika.exceptions.AMQPError as err:
            Logger.error(__file__, err.args)
Пример #19
0
 def start(self):
     Logger.add_to_journal(__file__, Level.INFO, 'Launcher started')
     self.__load_configs()
     self.__execute()
     self.generator_and_publisher_thread = None
     self.consumer_thread = None
     self.generator_and_publisher_event = None
     self.consumer_event = None
     self.report_thread = None
 def __open_conn(self):
     try:
         self.__conn = None
         self.__conn = mysql.connector.connect(user=self.__user,
                                               password=self.__password,
                                               host=self.__host,
                                               port=self.__port,
                                               database=self.__database)
     except mysql.connector.Error as err:
         Logger.error(__file__, err.msg)
Пример #21
0
    def __get_friends_list(self):
        Logger.info(__file__, "Load friends list web elements")

        list = self.get_elements(by=By.XPATH,
                                 value=self.__FRIENDS_XPATH,
                                 wait=self.configs.settings[Values.SETTINGS][
                                     Values.ELEMENT_WAIT_TIME])

        Logger.debug(__file__, f"Size of loaded friends list: {len(list)}")
        return list
    def queue_purge(self, queue_name):
        '''
        Clearing queue

        :param queue_name: queue name
        :return:
        '''

        Logger.debug(__file__, 'Purging queue {}'.format(queue_name))
        return self.conn.queue_purge(queue_name)
    def open(self):

        self.__open_conn()
        if not self.is_available():
            self.reconnect()
        else:

            Logger.debug(__file__, 'Created mysql connection with params {} {} {} {}'.format(self.__user,
                                                                                             self.__password,
                                                                                             self.__host,
                                                                                             self.__database))
    def declare_queue(self, queue_name, durable):
        '''
        Declare new queue

        :param queue_name: queue name
        :return:
        '''

        Logger.debug(__file__,
                     'Declaring queue with name {}'.format(queue_name))
        return self.conn.declare_queue(queue_name=queue_name, durable=durable)
 def declare_queue(self,
                   queue_name,
                   durable=True,
                   exclusive=False,
                   auto_delete=False):
     try:
         return self.channel.queue_declare(queue=queue_name,
                                           durable=durable,
                                           exclusive=exclusive,
                                           auto_delete=auto_delete)
     except pika.exceptions.AMQPError as err:
         Logger.error(__file__, err.args)
    def exchange_delete(self, exchange_name=None, if_unused=False):
        '''
        Delete exchange

        :param exchange_name: exchange name
        :param if_unused:
        :return:
        '''

        Logger.debug(__file__, 'Deleting exchange {}'.format(exchange_name))
        return self.conn.exchange_delete(exchange_name=exchange_name,
                                         if_unused=if_unused)
    def exchange_unbind(self, destination, source, routing_key=''):
        '''
        Unind exchange
        '''

        Logger.debug(
            __file__,
            'Unbind exchange: destination {}, sourse, routing_key'.format(
                destination, source, routing_key))
        return self.conn.exchange_unbind(destination=destination,
                                         source=source,
                                         routing_key=routing_key)
Пример #28
0
    def __init__(self,
                 user='******',
                 password='',
                 host='127.0.0.1',
                 port='3306',
                 database=''):
        self.__connection = MySqlConnection(user=user,
                                            password=password,
                                            host=host,
                                            port=port,
                                            database=database)

        Logger.debug(__file__, 'Created mysql service')
    def open(self, path, mode='r'):
        if path is None:
            if self.path is not None:
                raise TypeError('Path parameter of file descriptor must bo not None')
            else:
                path = self.path
        if mode is None:
            raise TypeError('Mode parameter of file descriptor must bo not None')

        try:
            self.__file = open(file=path, mode=mode, newline='')
        except Exception as err:
            Logger.error(__file__, err.__str__())
    def send_consumed_data_to_mysql(self):
        Logger.info(__file__, 'Sending readed batch records to MySQL')
        self.previous_time = datetime.datetime.now()

        self.mysql.execute_multiple(Values.MYSQL_INSERT_QUERY,
                                    self.consumed_data)

        OrderHistoryMaker.add_time_statistic(
            'Send data to MySQL',
            (datetime.datetime.now() - self.previous_time).total_seconds() *
            1000)

        self.consumed_data.clear()