Пример #1
0
    def simple_query(self, query, params, condition=''):
        """
         Simple database query
         Input parameters:
           query - query number (one from QUERY_... constants)
           params - parameter tuple
                   when only one parameter is required specify as (parameter,)
         Return value:
             success:
               - First row of query result
             failure:
               - raise error
        """
        if self.__cnx is None:
            raise DbException.CONNECTION_FAILED
        try:
            complete_query = query.__add__(condition)
            self.__cursor.execute(complete_query, params)
            result = self.__cursor.fetchone()
            self.__cnx.commit()
            if result is None:
                raise DbException.QUERY_NO_RESULT
        except mysql.connector.Error:
            LOG.error("Query: '%s', parameters (%s)" % (complete_query, str(params)))
            raise DbException.MYSQL_CONNECTOR

        return result
Пример #2
0
 def load_label(self, index):
     labels = []
     try:
         with open(self.label_files[index], 'r') as f:
             for l in f.readlines():
                 target = l.split(' ')
                 labels.append([self.class_to_idx(target[0]), int(target[1]), int(target[2]), int(target[3]), int(target[4])])
     except:
         LOG.error('annotation file {} non-existent'.format(self.label_files[index]))
     return labels
Пример #3
0
    def log(self, ip, msg, resultcode, cid, stage):
        """ Logs CLIENT connection to the database using inform parameters """

        try:
            # Always [try to] log to the log table
            self.__cursor.execute(Queries.INSERT_CLIENTLOG,
                (cid, resultcode, ip, msg))
            self.__cursor.fetchone()
            self.__cnx.commit()

        except mysql.connector.Error as err:
            LOG.error("Database: Error during LOG, %s" % err.msg)
Пример #4
0
 def disconnect(self):
     """
      Disconnect from database
      Input parameter: cnx = connection
     """
     if self.__cnx is None:
         LOG.error("Error: disconnecting when not connected")
         return
     try:
         # Try to clean up database cursor object. If that fails the garbage
         # collector will automatically deal with it after connection has
         # been closed.
         self.__cursor.close()
     except mysql.connector.Error as err:
         LOG.error("Error closing cursor: %s" % err.msg)
     finally:
         # Closing the connection with the database server may lead to
         # exceptions but they are handled in such a way that they never lead
         # to an error disturbing normal operation. Closing the connection
         # can thus safely be done here.
         self.__cnx.close()
Пример #5
0
    def connect(self):
        """
         Connect to the database and create a cursor
         No input parameters
        """
        config = {
            'user': MYSQL_USER,
            'password': MYSQL_PASSWORD,
            'database': MYSQL_DATABASE,
            'unix_socket': '/run/mysqld/mysqld.sock',
            'raise_on_warnings': False,
            'charset': 'latin1',
            'collation': 'latin1_general_ci',
            'buffered': False,
            }

        try:
            self.__cnx = mysql.connector.connect(**config)
        except mysql.connector.Error as err:
            if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
                LOG.error("Cannot connect to database: No access")
            elif err.errno == errorcode.ER_BAD_DB_ERROR:
                LOG.error("Cannot connect to database: Unknown database")
            else:
                LOG.error("Cannot connect to database: Unknown error")

            raise DbException.CONNECTION_FAILED

        self.__cursor = self.__cnx.cursor(prepared=True)
Пример #6
0
    def load_mosaic(self, index):
        # mosaic增强
        labels4 = []
        s = self.img_size
        # mosaic增强四部分图片的相交点
        yc, xc = [int(random.uniform(-x, 2 * s + x)) for x in self.mosaic_border]
        # 随机出其他三张图片
        indices = [index] + [random.randint(0, len(self.img_files) - 1) for _ in range(3)]
        for i, index in enumerate(indices):
            # Load image
            img = cv2.imread(self.img_files[index])
            assert img is not None, LOG.error('Image not found' + self.img_files[index])
            h, w = img.shape[:2]

            # place img in img4
            if i == 0:  # top left
                img4 = np.full((s * 2, s * 2, img.shape[2]), 114, dtype=np.uint8)  # base image with 4 tiles
                x1a, y1a, x2a, y2a = max(xc - w, 0), max(yc - h, 0), xc, yc  # xmin, ymin, xmax, ymax (large image)
                x1b, y1b, x2b, y2b = w - (x2a - x1a), h - (y2a - y1a), w, h  # xmin, ymin, xmax, ymax (small image)
            elif i == 1:  # top right
                x1a, y1a, x2a, y2a = xc, max(yc - h, 0), min(xc + w, s * 2), yc
                x1b, y1b, x2b, y2b = 0, h - (y2a - y1a), min(w, x2a - x1a), h
            elif i == 2:  # bottom left
                x1a, y1a, x2a, y2a = max(xc - w, 0), yc, xc, min(s * 2, yc + h)
                x1b, y1b, x2b, y2b = w - (x2a - x1a), 0, max(xc, w), min(y2a - y1a, h)
            elif i == 3:  # bottom right
                x1a, y1a, x2a, y2a = xc, yc, min(xc + w, s * 2), min(s * 2, yc + h)
                x1b, y1b, x2b, y2b = 0, 0, min(w, x2a - x1a), min(y2a - y1a, h)

            img4[y1a:y2a, x1a:x2a] = img[y1b:y2b, x1b:x2b]  # img4[ymin:ymax, xmin:xmax]
            padw = x1a - x1b
            padh = y1a - y1b

            # Labels
            x = load_label(index)
            labels = x.copy()
            if x.size > 0:  # xyxy format
                labels[:, 1] = x[:, 1] + padw
                labels[:, 2] = x[:, 2] + padh
                labels[:, 3] = x[:, 3] + padw
                labels[:, 4] = x[:, 4] + padh
            labels4.append(labels)

        # Concat/clip labels
        if len(labels4):
            labels4 = np.concatenate(labels4, 0)
            np.clip(labels4[:, 1:], 0, 2 * s, out=labels4[:, 1:])

        return img4, labels4
Пример #7
0
    def __init__(self, path, class_names, hyp, img_size = 640, transform=None):
        super(FreeDataset, self).__init__()
        self.transform = transform if transform else FreeDatasetAugment(hyp)
        self.img_size = img_size
        # mosaic增强图片
        self.mosaic_border = [-img_size // 2, -img_size // 2]
        self.root = str(Path(path)) + os.sep
        assert (os.path.isdir(self.root)), LOG.error("dataset path is not dir.")

        # 获取所有文件,并过滤不支持的格式
        files = glob.glob(self.root + 'imgs' + os.sep + '*.*')
        self.img_files = [x for x in files if os.path.splitext[-1].lower() in img_formats]
        label_path = self.root + 'labels' + os.sep
        self.label_files = [label_path + x.split(os.sep)[-1].replace(os.path.splitext(x)[-1], '.txt')
                            for x in self.img_files]

        assert (len(self.img_files) == len(self.label_files)), LOG.error("image annotation lose.")

        # 类别和idx对应
        self.classes = []
        self.classes.append('__background__')
        self.classes += [i.strip() for i in class_names.split(',')]
        self.class_to_idx = dict(zip(self.classes, range(len(self.classes))))
        LOG.info("class to idx: {}".format(self.class_to_idx))
Пример #8
0
def session_handler(reader, writer):
    """
        This is a call back routine for an SSL session
        Read messages from the device, handle them and write the responses
    """

    # initialize defaults
    session = {
        'queue': [],
        'rpc': {
            'id': 0
        },
        'cpe': {},
        'dm': {},
        'rtwan_layer3_keep': False,
        'voice_layer3_keep': False
    }

    # start authentication of client
    state = RPCS.Authenticating
    state = state.prepare_session(writer, session)

    try:
        LOG.debug("Open connection with: %s", session['cpe']['ip'])

        # RPC SERVER
        while state in RPCS.RPC_SERVER_STATES:
            response = None
            request = yield from asyncio.wait_for(reader.readline(),
                                                  timeout=RPCSD_TIMEOUT)
            if reader.at_eof():
                # Received disconnect signal from CPE
                update_northbound_handler(session)
                session['log'] = {
                    'rc': 'error',
                    'msg': 'Connection terminated by client'
                }
                LOG.debug("CPE closed connection with RPC Server")
                break

            session['rpc']['message'] = parse_message(request, 'request')
            # Respond with error to invalid requests, when possible
            if 'error' in session['rpc']['message']:
                if session['rpc']['message']['error'] is not None:
                    response = session['rpc']['message']
            # Handle message and move between states
            else:
                state, response = state.run(session)
            # Prepare and send response
            if not (response is None):
                LOG.debug("Message to cpe: " + str(response))
                send_rpc(response, writer)

        # RPC CLIENT
        while state in RPCS.RPC_CLIENT_STATES:
            request = None
            rpc = None
            # Only listen for messages during 'ExpectResponse' state
            if state == RPCS.ExpectResponse:
                request = yield from asyncio.wait_for(reader.readline(),
                                                      timeout=RPCSD_TIMEOUT)
                if reader.at_eof():
                    # Received disconnect signal from CPE
                    update_northbound_handler(session)
                    session['log'] = {
                        'rc': 'error',
                        'msg': 'Connection terminated by client'
                    }
                    LOG.debug("CPE closed connection with RPC Server")
                    break

                session['rpc']['message'] = parse_message(request, 'response')

            # Either check if there are RPCs in queue to send, or handle CPE
            # response
            state, rpc = state.run(session)
            if rpc is not None:
                LOG.debug("Message to cpe: " + str(rpc))
                send_rpc(rpc, writer)

    except asyncio.TimeoutError:
        LOG.debug("CPE connection timed out")
        # Write an error to the database
        session['log'] = {'rc': 'error', 'msg': 'Connection timeout'}
        reader.feed_eof()
    except ConnectionResetError:
        LOG.debug("CPE closed the connection with RPC Server")
        session['log'] = {'rc': 'error', 'msg': 'Connection reset by peer'}
        reader.feed_eof()
    except Exception:
        # Catch everything else, prevents daemon from crashing
        err = sys.exc_info()[0]
        traceback.print_exc()
        LOG.error("Internal server error: %s", err)
        reader.feed_eof()
    finally:
        # Make sure session is always closed tidily even if an error occurs
        if 'db' in session:
            log_to_database(session)
            session['db'].disconnect()
        writer.close()
        # Get any response left and throw away
        while not (reader.at_eof()):
            yield from reader.read()
        LOG.debug("Close connection with: %s", session['cpe']['ip'])

        # Running a forceful full garbage collection
        collected = gc.collect()
        LOG.debug("Unreachable objects after forceful garbage collection: %d",
                  collected)
Пример #9
0
            LOG.info(s + '%10.4g' * 4 % results)

        # 模型保存
        if (epoch + 1) % 10 == 0 or (epoch + 1) == args.epochs:
            epoch_model = Path(args.save_path) / 'yolov5_ep%g.pth' % (epoch+1)
            torch.save(ema.ema, epoch_model)
        fi = fitness(np.array(results).reshape(1, -1))  # fitness_i = weighted combination of [P, R, mAP, F1]
        if fi > best_fitness:
            best_fitness = fi
            torch.save(ema.ema, best)

    if args.local_rank in [-1, 0]:
        LOG.info('%g epochs completed in %.3f hours = %i.\n' % (epoch - start_epoch + 1, (time.time() - t0) / 3600))

    dist.destroy_process_group() if args.local_rank not in [-1, 0] else None
    torch.cuda.empty_cache()


if __name__ == '__main__':
    config.log_setting(config.logger["file"], config.logger["level"], config.logger["format"], 
        config.logger["backupCount"], config.logger["interval"])
    LOG.info("*******************train start.*******************")
    LOG.info(args)
    LOG.info(config.hyp)
    LOG.info(config.dataset)
    LOG.info(config.logger)
    if args.labels is None:
        LOG.error("labels is none.")
    LOG.info("***************training***************")
    train()
    LOG.info("*******************exited.*******************")