def cache_line(line_data):
        order_id, line = line_data
        output_ref = None
        is_output_required = True
        try:
            hash_tag = hashlib.md5(line.encode('utf-8')).hexdigest()
            entry = DataHandlerService().execute(
                f"SELECT LINE_ID FROM LinesCache WHERE HashTag == '{hash_tag}'"
            )
            if len(entry) == 0:
                DataHandlerService().execute(
                    insert_sql('LinesCache', ['LINE_ID', 'HashTag', 'Line']),
                    *(None, hash_tag, line))
                line_ref = DataHandlerService().get_last_row_id
            else:
                line_ref = entry[0][0]

            if is_output_required:
                entry1 = DataHandlerService().execute(
                    f"SELECT ORDER_ID FROM LinesCacheMap WHERE LINE_REF == {line_ref}"
                )
                if len(entry1) != 0:
                    output_ref = entry1[0][0]
                    if output_ref != order_id:
                        is_output_required = False

        except Exception as e:
            f, li = get_error_info()
            raise type(e)(f"Unexpected error: {e}; File: {f}:{li}")
        return output_ref, order_id, line_ref
예제 #2
0
 def _start_period(self, period_name=None, alias=None):
     module: HostModule = self._modules.get_connection(alias)
     table = db.TableSchemaService().tables.Points
     db.DataHandlerService().execute(insert_sql(table.name, table.columns),
                                     module.host_id, period_name or module.alias,
                                     datetime.now().strftime(DB_DATETIME_FORMAT),
                                     None)
    def start(self):
        self._configuration.update({'event': Event()})
        table = db.TableSchemaService().tables.TraceHost
        db.DataHandlerService().execute(insert_sql(table.name, table.columns),
                                        *(None, self.alias))

        self._host_id = db.DataHandlerService().get_last_row_id
def cache_timestamp(timestamp):
    table = TableSchemaService().tables.TimeLine
    last_tl_id = DataHandlerService().execute(
        table.queries.select_last.sql.format(timestamp=timestamp))
    if len(last_tl_id) == 0:
        DataHandlerService().execute(insert_sql(table.name, table.columns),
                                     *(None, timestamp))
        last_tl_id = DataHandlerService().get_last_row_id
    else:
        last_tl_id = last_tl_id[0][0]
    return last_tl_id
    def upload(self, output, max_workers: int = DEFAULT_MAX_WORKERS):
        logger.debug(
            f"Cache invoked {'concurrently' if max_workers > 1 else 'as sequence'}"
        )
        lines_cache = list(
            self.concurrent_lines_cache(output, max_workers)
            if max_workers > 1 else self.sequence_line_cache(output))

        if any([_ref[0] is None for _ref in lines_cache]):
            output_data = DataHandlerService().execute(TableSchemaService(
            ).tables.LinesCacheMap.queries.last_output_id.sql)
            self.output_ref = output_data[0][0] + 1 if output_data != [(None, )
                                                                       ] else 0
            DataHandlerService().execute(
                insert_sql('LinesCacheMap',
                           ['OUTPUT_REF', 'ORDER_ID', 'LINE_REF']),
                [[self.output_ref] + lr[1:] for lr in lines_cache])
        return self.output_ref
 def __str__(self):
     return insert_sql(self._table.name, self._table.columns)
 def emit(self, record):
     DataHandlerService().execute(
         insert_sql(self._table.name, self._table.columns),
         *log.format_record(record))