Пример #1
0
    def _write_finished(self):
        """
        文件写入结束

        @throws {Md5VerifyError} - 当文件校验失败时抛出异常
        """
        # 关闭文件句柄
        self._info_file_handle.close()
        self._info_file_handle = None
        self._tmp_file_handle.close()
        self._tmp_file_handle = None
        os.close(self._lock_file_handle)
        self._lock_file_handle = None

        # 检查md5
        if self._info['md5'] != '':
            _file_md5 = NetTool.get_file_md5(self._temp_file)
            if self._info['md5'] != _file_md5:
                raise Md5VerifyError('md5 verify error')

        # 修改临时文件名
        os.rename(self._temp_file, self._file)

        # 删除临时文件
        FileTool.remove_file(self._info_file)
        FileTool.remove_file(self._lock_file)
Пример #2
0
    def screenshot(self, filename: str = None) -> Image:
        """
        保存屏幕截图

        @param {str} filename=None - 要保存的路径

        @returns {PIL.Image} - 图片对象
        """
        # 截图
        _cmd = 'shell uiautomator runtest UiTestTools.jar -c com.snaker.testtools.uiScreenShot'
        self.adb_run_inner(_cmd)

        # 获取文件
        _filename = filename
        if filename is None:
            _filename = os.path.join(self.tmp_path, 'uiShot.png')
        _cmd = 'pull /data/local/tmp/uiShot.png %s' % _filename
        self.adb_run_inner(_cmd)

        # 加载为对象
        with open(_filename, 'rb') as _f:
            _image = Image.open(BytesIO(_f.read()))

        if filename is None:
            # 删除临时文件
            FileTool.remove_file(_filename)

        return _image
Пример #3
0
def test_case2():
    # 测试多个日志类相互影响的情况
    try:
        FileTool.remove_file(_TEMP_DIR + '/test_case2.json')
    except:
        pass
    try:
        FileTool.remove_file(_TEMP_DIR + '/test_case2-1.json')
    except:
        pass

    try:
        FileTool.remove_files(path=_TEMP_DIR + '/log/', regex_str='test_case2*')
    except:
        pass
    try:
        FileTool.remove_files(path=_TEMP_DIR + '/log/', regex_str='test_case2-1*')
    except:
        pass

    _logger = simple_log.Logger(conf_file_name=_TEMP_DIR + '/test_case2.json',
                                logger_name=simple_log.EnumLoggerName.ConsoleAndFile,
                                config_type=simple_log.EnumLoggerConfigType.JSON_FILE,
                                logfile_path=_TEMP_DIR + '/log/test_case2.log')
    # ConsoleAndFile 的配置level为DEBUG,但对应的ConsoleHandler的level为DEBUG,FileHandler的level为INFO
    # 日志是否显示会根据logger的level和handler的level,以级别比较高的匹配输出
    # 注意默认root的handler应该为空,否则无论如何都会执行root的输出,如果自己又另外指定了输出,那就会有2个相同输出日志
    _logger.log(simple_log.DEBUG,
                'test_case2:write_log:DEBUG:1:界面应显示本日志,文件不应显示本日志')
    _logger.debug('test_case2:write_log:DEBUG:1-1:界面应显示本日志,文件不应显示本日志')
    _logger.log(simple_log.INFO,
                'test_case2:write_log:INFO:2:界面应显示本日志,文件应显示本日志')
    _logger.info('test_case2:write_log:INFO:2-1:界面应显示本日志,文件应显示本日志')

    # 新增logger,但与原logger的loggername一样,实际上会互相影响,同时如果handler一样,也会受影响
    _logger1 = simple_log.Logger(conf_file_name=_TEMP_DIR + '/test_case2-1.json',
                                 logger_name=simple_log.EnumLoggerName.ConsoleAndFile,
                                 config_type=simple_log.EnumLoggerConfigType.JSON_FILE,
                                 logfile_path=_TEMP_DIR + '/log/test_case2-1.log')
    _logger1.setLevel(simple_log.DEBUG)

    _logger.log(simple_log.DEBUG,
                'test_case2:write_log:DEBUG:3:界面应显示本日志,文件不应显示本日志,但实际受logger1影响,也记录了日志;本应记录在日志1中,但受影响记录在日志2中')
    _logger.log(simple_log.INFO,
                'test_case2:write_log:INFO:4:界面应显示本日志,文件应显示本日志;本应记录在日志1中,但受影响记录在日志2中')
    _logger1.log(simple_log.DEBUG,
                 'test_case2:write_log:DEBUG:5-1:界面应显示本日志,文件应显示本日志')
    _logger1.log(simple_log.INFO,
                 'test_case2:write_log:INFO:6-1:界面应显示本日志,文件应显示本日志')

    del _logger

    _logger1.log(simple_log.DEBUG,
                 'test_case2:write_log:DEBUG:6-1:界面应显示本日志,文件应显示本日志')
    _logger1.log(simple_log.INFO,
                 'test_case2:write_log:INFO:7-1:界面应显示本日志,文件应显示本日志')

    del _logger1
Пример #4
0
    def product_info_to_xls(cls, path: str) -> bool:
        """
        将产品信息写入excel文件

        @param {str} path - 要获取产品信息的目录

        @returns {bool} - 处理是否成功
        """
        try:
            # 标题行
            _title = dict()
            _col = 2
            for _key in PROP_NAME_TRAN_DICT.keys():
                _title[_key] = _col
                _col += 1

            # 创建excel文件
            _xls_file = os.path.join(path, 'product_info_list.xls')
            if os.path.exists(_xls_file):
                # 删除文件
                FileTool.remove_file(_xls_file)

            # 创建一个新的Workbook
            _book = xlwt.Workbook()
            _sheet = _book.add_sheet('product_info')  # 在工作簿中新建一个表格

            # 写入标题
            _sheet.write(0, 0, '网站产品ID')
            _sheet.write(0, 1, '产品目录')
            for _word in _title.keys():
                print()
                _sheet.write(0, _title[_word], _word)

            _current_row = [1]  # 当前行

            # 逐个产品进行写入
            cls._write_product_info_to_xls(path, _sheet, _title, _current_row)

            # 保存excel
            _book.save(_xls_file)
            return True
        except:
            print('product_info_to_xls error:\r\n%s' %
                  (traceback.format_exc(), ))
            return False
Пример #5
0
    def labelimg_del_not_rgb_pic(cls, path: str):
        """
        删除位深不为RGB三通道的图片
        (解决image_size must contain 3 elements[4]报错)

        @param {str} path - 要处理的路径
        """
        _path = os.path.realpath(path)
        # 遍历所有子目录
        _sub_dirs = FileTool.get_dirlist(path=_path, is_fullpath=True)
        for _dir in _sub_dirs:
            # 递归删除子目录的信息
            cls.labelimg_del_not_rgb_pic(_dir)

        # 检查自己目录下的图片
        _files = FileTool.get_filelist(path=_path, is_fullname=False)
        for _file in _files:
            _file_ext = FileTool.get_file_ext(_file)
            if _file_ext == 'xml':
                # 标签文件不处理
                continue

            # 打开图片判断位深
            _fp = open(os.path.join(_path, _file), 'rb')
            _img = Image.open(_fp)
            if _img.mode != 'RGB':
                # 需要删除掉
                _fp.close()
                _img_file = os.path.join(_path, _file)
                _xml_file = os.path.join(
                    _path,
                    FileTool.get_file_name_no_ext(_file) + '.xml')
                print('delete %s' % _img_file)
                FileTool.remove_file(_img_file)
                if os.path.exists(_xml_file):
                    FileTool.remove_file(_xml_file)
            else:
                _fp.close()
Пример #6
0
    def page_source(self) -> str:
        """
        获取当前页面源码

        @property {str}
        """
        # 生成源码
        _cmd = 'shell uiautomator runtest UiTestTools.jar -c com.snaker.testtools.uiDumpXml'
        self.adb_run_inner(_cmd)

        # 获取文件
        _cmd = 'pull /data/local/tmp/local/tmp/uidump.xml %s' % self.tmp_path
        self.adb_run_inner(_cmd)

        # 打开文件
        _page_source = ''
        _file = os.path.join(self.tmp_path, 'uidump.xml')
        with open(_file, 'r', encoding='utf-8') as _f:
            _page_source = _f.read()

        # 删除临时文件
        FileTool.remove_file(_file)

        return _page_source
Пример #7
0
    def UploadFile(cls,
                   upload_type: str,
                   note: str,
                   interface_seq_id: str,
                   methods=['POST']):
        """
        上传文件(单文件上传)  (/api/Qa/UploadFiles/<upload_type>/<note>/<interface_seq_id>)

        @param {str} upload_type - 文件类型,必须在UploadFileConfig表中有配置
        @param {str} note - 文件注解
        @param {str} interface_seq_id - 客户端序号,客户端可传入该值来支持异步调用

        @return {str} - 返回回答的json字符串
            status : 处理状态
                00000 - 成功, 返回一条回答
                10001 - 没有指定上传文件
                2XXXX - 处理失败
            msg : 处理状态对应的描述
            answer_type: 'text'或'json',指示返回的答案是文本数组,还是一个json对象
            answers : 回答内容
            url : 文件上传后的url,含文件名和url路径
        """
        _ret_json = {
            'interface_seq_id': interface_seq_id,
            'status': '00000',
            'msg': 'success',
            'answer_type': 'text',
            'answers': [],
            'url': ''
        }
        _qa_loader = RunTool.get_global_var('QA_LOADER')
        try:
            if 'file' not in request.files or request.files[
                    'file'].filename == '':
                _ret_json['status'] = '10001'
                _ret_json['msg'] = 'No file upload!'
                return jsonify(_ret_json)

            # 获取上传类型配置
            _upload_config = UploadFileConfig.get_or_none(
                UploadFileConfig.upload_type == upload_type)
            if _upload_config is None:
                _ret_json['status'] = '10002'
                _ret_json['msg'] = 'upload type not exists!'
                return jsonify(_ret_json)

            # 检查文件大小
            if _upload_config.size > 0:
                if request.content_length > _upload_config.size * 1024 * 1024:
                    _ret_json['status'] = '10003'
                    _ret_json['msg'] = 'upload file size to large!'
                    return jsonify(_ret_json)

            # 检查文件类型是否支持
            _file = request.files['file']
            _old_filename = _file.filename
            _file_ext = FileTool.get_file_ext(_old_filename)
            _allow_ext = eval(_upload_config.exts.upper())
            if len(_allow_ext) > 0 and _file_ext.upper() not in _allow_ext:
                _ret_json['status'] = '10004'
                _ret_json['msg'] = 'Type [%s] not allow upload [.%s] file!' % (
                    upload_type, _file_ext)
                return jsonify(_ret_json)

            # 处理新的文件名
            def _replace_var_fun(m):
                _match_str = m.group(0)
                _value = None
                if _match_str.startswith('{$datetime='):
                    # 按格式化字符替换当前的时间
                    _key = _match_str[11:-2]
                    _value = datetime.datetime.now().strftime(_key)
                elif _match_str.startswith('{$uuid='):
                    # 指定uuid字符类型
                    _key = _match_str[7:-2]
                    str(uuid.uuid1())
                    _value = eval('str(uuid.uuid%s())' % _key)
                elif _match_str.startswith('{$random'):
                    # 产生指定两个整数之间的随机数,总位数与最大的数一致,左补零
                    _key = _match_str[8:-2]
                    _args = eval('(%s)' % _key)
                    _value = StringTool.fill_fix_string(
                        str(random.randint(*_args)), len(_args[1]), '0')
                elif _match_str.startswith('{$file_ext='):
                    # 原文件扩展名
                    _value = _file_ext
                elif _match_str.startswith('{$file_name='):
                    # 原文件指定位置的字符
                    _key = _match_str[12:-2]
                    _args = eval('(%s)' % _key)
                    if len(_args) > 1:
                        _value = _old_filename[_args[0]:_args[1]]
                    else:
                        _value = _old_filename[_args[0]:]

                if _value is not None:
                    return str(_value)
                else:
                    return _match_str

            if _upload_config.rename == '':
                _new_filename = _old_filename
            else:
                _new_filename = re.sub(r'\{\$.+?\$\}', _replace_var_fun,
                                       _upload_config.rename, re.M)

            # 处理保存文件路径和url路径
            if _upload_config.url != '':
                _ret_json['url'] = '%s/%s' % (_upload_config.url,
                                              _new_filename)

            _save_path = os.path.realpath(
                os.path.join(_qa_loader.execute_path, _upload_config.save_path,
                             _new_filename))

            # 创建文件目录
            FileTool.create_dir(os.path.split(_save_path)[0], exist_ok=True)

            # 保存文件
            _file.save(_save_path)

            # 上传后处理
            _after = eval(_upload_config.after)
            if len(_after) > 0:
                _after_fun = _qa_loader.plugins['upload_after'][_after[0]][
                    _after[1]]
                _status, _msg, _answers = _after_fun(upload_type, note,
                                                     _new_filename, _save_path,
                                                     _ret_json['url'],
                                                     **_after[2])
                _ret_json['status'] = _status
                _ret_json['msg'] = _msg
                if len(_answers) > 0 and type(_answers[0]) == dict:
                    _ret_json['answer_type'] = 'json'
                    _ret_json['answers'] = _answers[0]
                else:
                    _ret_json['answers'] = _answers
                if _ret_json['status'] != '00000':
                    # 后处理失败,删除文件
                    FileTool.remove_file(_save_path)
                    if _qa_loader.logger:
                        _qa_loader.logger.debug(
                            'remove upload file [dest:%s][source:%s] when after deal error[%s]: %s'
                            % (_new_filename, _old_filename, _status, _msg))
        except:
            if _qa_loader.logger:
                _qa_loader.logger.error('Exception: %s' %
                                        traceback.format_exc(),
                                        extra={'callFunLevel': 1})
            _ret_json = {
                'interface_seq_id': interface_seq_id,
                'status': '20001',
                'msg': '上传文件异常'
            }

        return jsonify(_ret_json)
Пример #8
0
    def labelimg_pic_deal(cls, path: str):
        """
        TFRecord图片兼容处理
        1.删除位深不为RGB三通道的图片
        (解决image_size must contain 3 elements[4]报错)
        2.转换图片格式为jpg
        3.检查xml文件的文件名和路径是否正确

        @param {str} path - 要处理的路径
        """
        _path = os.path.realpath(path)
        # 遍历所有子目录
        _sub_dirs = FileTool.get_dirlist(path=_path, is_fullpath=True)
        for _dir in _sub_dirs:
            # 递归删除子目录的信息
            cls.labelimg_pic_deal(_dir)

        # 检查自己目录下的图片
        _files = FileTool.get_filelist(path=_path, is_fullname=False)
        for _file in _files:
            _file_ext = FileTool.get_file_ext(_file)
            if _file_ext == 'xml':
                # 标签文件不处理
                continue

            _img_file = os.path.join(_path, _file)
            _file_no_ext = FileTool.get_file_name_no_ext(_file)

            if _file_ext in ('png', 'gif'):
                # 转换图片格式
                _fp = open(_img_file, 'rb')
                _img = Image.open(_fp)
                _rgb_im = _img.convert('RGB')

                _rgb_im.save(os.path.join(_path, _file_no_ext + '.jpg'))
                _fp.close()

                # 删除原文件,修改xml中的文件名
                FileTool.remove_file(_img_file)
                _xml_file = os.path.join(_path, _file_no_ext + '.xml')
                if os.path.exists(_xml_file):
                    _tree = ET.parse(os.path.join(_path, _xml_file))
                    _root = _tree.getroot()
                    _root.find('filename').text = _file_no_ext + '.jpg'
                    _root.find('path').text = os.path.join(
                        _path, _file_no_ext + '.jpg')
                    _tree.write(os.path.join(_path, _xml_file),
                                encoding='utf-8',
                                method="xml",
                                xml_declaration=None)

                # 修改文件名变量
                _img_file = os.path.join(_path, _file_no_ext + '.jpg')

            # 打开图片判断位深
            _fp = open(_img_file, 'rb')
            _img = Image.open(_fp)
            if _img.mode != 'RGB':
                # 需要删除掉
                _fp.close()
                _xml_file = os.path.join(
                    _path,
                    FileTool.get_file_name_no_ext(_file) + '.xml')
                print('delete %s' % _img_file)
                FileTool.remove_file(_img_file)
                if os.path.exists(_xml_file):
                    FileTool.remove_file(_xml_file)
            else:
                _fp.close()

            # 检查xml文件
            _xml_file = os.path.join(_path, _file_no_ext + '.xml')
            if os.path.exists(_xml_file):
                _tree = ET.parse(os.path.join(_path, _xml_file))
                _root = _tree.getroot()
                if _root.find('filename'
                              ).text != _file_no_ext + '.jpg' or os.path.split(
                                  _root.find('path').text)[0] != _path:
                    _root.find('filename').text = _file_no_ext + '.jpg'
                    _root.find('path').text = os.path.join(
                        _path, _file_no_ext + '.jpg')
                    _tree.write(os.path.join(_path, _xml_file),
                                encoding='utf-8',
                                method="xml",
                                xml_declaration=None)
Пример #9
0
    def __init__(self,
                 file: str,
                 is_resume: bool = True,
                 file_size: int = None,
                 md5: str = None,
                 is_overwrite: bool = False,
                 temp_ext: str = 'tmp',
                 info_ext: str = 'info',
                 extend_info: dict = None,
                 thread_num: int = 1,
                 block_size: int = 4096,
                 cache_size: int = 1024,
                 auto_expand: bool = True):
        """
        初始化文件保存对象

        @param {str} file - 文件保存路径(含文件名)
        @param {bool} is_resume=True - 指定是否续传(自动查找已下载的信息), 如果不指定续传将自动删除原来已下载临时文件
            注:如果指定续传,且可以找到原来的临时文件,则以下参数将使用原来的信息,如果有传入则会进行差异值的校验:
                file_size、md5
        @param {int} file_size=None - 文件大小,单位为byte, 如果为None代表未知文件大小, 此时auto_expand参数固定为True
        @param {str} md5=None - 验证文件的md5字符串,如果不传代表不进行验证
        @param {bool} is_overwrite=False - 是否覆盖已有文件,如果为否,则目标文件已存在的情况下抛出异常
        @param {str} temp_ext='tmp' - 处理过程中临时文件扩展名
        @param {str} info_ext='info' - 处理过程中信息文件扩展名
        @param {dict} extend_info=None - 处理过程中要保存的信息字典,例如保存文件下载路径,引用页等信息
        @param {int} thread_num=1 - 写入处理线程数量
        @param {int} block_size=4096 - 每次写入块大小,单位为byte
        @param {int} cache_size=1024 - 单线程缓存大小,单位为kb(注意:真实缓存大小还需要乘以处理线程数量)
        @param {bool} auto_expand=True - 是否自动扩展文件大小(否则在初始化时会自动创建指定大小的文件)

        @throws {FileExistsError} - 如果下载文件已存在且不允许覆盖的情况抛出异常
        @throws {FileNotFoundError} - 续传情况下临时文件不存在则抛出异常
        @throws {InfoFileLockError} - 如果已打开信息文件进行文件存储处理,抛出该异常
        """
        # 检查文件是否存在
        self._file = os.path.abspath(file)
        self._path, self._filename = os.path.split(self._file)
        if os.path.exists(self._file):
            # 文件已存在
            if is_overwrite:
                FileTool.remove_file(self._file)
            else:
                raise FileExistsError('file exists: %s' % self._file)
        else:
            # 创建目录
            FileTool.create_dir(self._path, exist_ok=True)

        # 文件信息字典,该字典登记文件基本信息和写入情况
        self._info: dict = None

        # 锁文件,控制一个文件不能被多个类处理, 先尝试创建锁文件,如果创建失败会抛出异常
        self._lock_file = os.path.join(self._path,
                                       '%s.%s' % (self._filename, 'lock'))
        try:
            self._lock_file_handle = os.open(
                self._lock_file, os.O_CREAT | os.O_EXCL | os.O_RDWR)
        except:
            raise InfoFileLockError('info file is locked')

        try:
            # 获取是否debug状态
            self._debug_on = DebugTool.is_debug_on()
            self._lock_print_timeout = None
            if self._debug_on:
                self._lock_print_timeout = 5.0  # 打印锁等待超时时间

            # 处理信息字典、临时文件、信息文件
            self._temp_file = os.path.join(
                self._path, '%s.%s' % (self._filename, temp_ext))
            self._info_file = os.path.join(
                self._path, '%s.%s' % (self._filename, info_ext))
            self._auto_expand = auto_expand
            self._thread_num = thread_num
            self._block_size = block_size
            self._cache_size = cache_size * 1024

            # 数据处理锁
            self._cache_info_lock = threading.RLock()  # 缓存信息更新锁
            self._tmp_file_lock = threading.RLock()  # 缓存文件写入锁
            self._is_finished = False  # 要控制的完成状态
            self._dealed_finished_lock = threading.RLock()  # 控制多线程操作结束函数的状态更新锁
            self._dealed_finished = False  # 控制多线程操作结束函数只执行一次的变量

            if is_resume and os.path.exists(self._info_file):
                # 自动续传情况
                self._info_file_handle = open(self._info_file,
                                              'r+',
                                              encoding='utf-8')
                self._info_file_handle.seek(0)
                self._info = json.loads(self._info_file_handle.read())

                # 检查传入信息是否一致
                if file_size is not None and file_size != self._info[
                        'file_size']:
                    raise AttributeError(
                        'resume info [file_size] inconsistency, info file [%s], now [%s]'
                        % (str(self._info['file_size']), str(file_size)))

                if md5 is not None and md5 != self._info['md5']:
                    raise AttributeError(
                        'resume info [md5] inconsistency, info file [%s], now [%s]'
                        % (self._info['md5'], md5))

                # 检查临时文件
                self._temp_file = os.path.join(self._path,
                                               self._info['tmp_file'])
                if not os.path.exists(self._temp_file):
                    # 临时文件不存在
                    raise FileNotFoundError('temp file is not found: %s' %
                                            self._temp_file)

                self._tmp_file_handle = open(self._temp_file, 'rb+')
                self._tmp_file_handle.seek(0)
            else:
                # 删除已存在的临时文件信息
                if os.path.exists(self._temp_file):
                    FileTool.remove_file(self._temp_file)

                if os.path.exists(self._info_file):
                    FileTool.remove_file(self._info_file)

                # 形成信息字典
                self._info = {
                    'tmp_file': '%s.%s' % (self._filename, temp_ext),  # 临时文件名称
                    'file_size':
                    -1 if file_size is None else file_size,  # 文件大小
                    'write_size': 0,  # 已写入数据大小
                    'md5': '' if md5 is None else md5,  # md5校验值
                    'extend_info':
                    {} if extend_info is None else extend_info,  # 传入的扩展信息
                    # 存储索引,按位置顺序在数组中登记未写入区间,数组每一项登记未写入数据的开始位置和结束位置
                    'store_index': [[0, file_size - 1]]
                }

                # 生成临时文件
                self._tmp_file_handle = open(self._temp_file, 'wb')
                if not auto_expand and file_size is not None:
                    # 直接生成指定大小的文件
                    self._tmp_file_handle.seek(file_size - 1)  # 跳到指定位置
                    self._tmp_file_handle.write(b'\x00')  # 一定要写入一个字符,否则无效
                    self._tmp_file_handle.flush()

                # 写入信息字典文件
                self._info_file_handle = open(self._info_file,
                                              'w',
                                              encoding='utf-8')
                self._write_info_file()

            # 合并存储索引,把碎片合并成为大块
            self._info['store_index'] = self._f_merge_store_index(
                self._info['store_index'])

            # 初始化缓存等信息
            if self._info['file_size'] == -1:
                # 如果没有文件大小的情况,不支持拆分多写入线程和一次性创建指定大小文件的情况
                self._thread_num = 1
                self._auto_expand = True

            # 缓存处理
            self._max_cache_pos = [
                -1,
            ]  # 当前缓存分配到的区域最大位置
            self._cache = dict()
            for _i in range(self._thread_num):
                self._cache[_i] = {
                    'start': -1,  # 缓存数据对应文件的写入位置, -1代表没有设置
                    'size': 0,  # 缓存数据大小
                    'buffer': bytes(),  # 具体的缓存数据
                    'end_pos': -1,  # 该缓存对应线程要处理的文件块结束位置
                    'lock': threading.RLock(),  # 用于缓存线程处理的锁)
                    'get_start': -1,  # 当前正在获取的数据的开始位置
                    'get_size': 0,  # 当前要获取数据的大小
                }

            # 分配每个缓存要处理文件区域
            for _i in range(self._thread_num):
                self._set_cache_area(_i)
        except:
            # 如果初始化出现异常,清理文件句柄及锁文件
            self._clear_file_handle_and_lock()
            raise
Пример #10
0
    def test_grpc_to_local(self):
        if not TEST_FLAG['test_grpc_to_local']:
            return

        print('测试获取gRpc远程文件')

        # DebugTool.set_debug(set_on=True)
        _copy_file = os.path.join(_temp_path, 'grpc_to_local.bin')

        # 删除临时文件
        if os.path.exists(_copy_file):
            FileTool.remove_file(_copy_file)
        for _ext in ('.lock', '.tmp', '.info'):
            if os.path.exists(_copy_file + _ext):
                FileTool.remove_file(_copy_file + _ext)

        # 连接参数
        _connect_para = SimpleGRpcConnection.generate_connect_para(
            conn_str='127.0.0.1:50051')

        # 单线程-缓存大于文件大小
        _tips = '单线程-缓存大于文件大小'
        with GRpcPullProtocol(_temp_file,
                              _copy_file,
                              is_resume=True,
                              is_overwrite=True,
                              block_size=40960,
                              connect_para=_connect_para) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地gRpc推送-%s: %s" % (_tips, _status))

        # 单线程-缓存小于文件大小
        _tips = '单线程-缓存小于文件大小'
        with GRpcPullProtocol(_temp_file,
                              _copy_file,
                              is_resume=True,
                              is_overwrite=True,
                              cache_size=2,
                              block_size=40960,
                              connect_para=_connect_para) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地gRpc推送-%s: %s" % (_tips, _status))

        # 单线程-暂停重复
        _tips = '单线程-暂停重复'
        with GRpcPullProtocol(_temp_file,
                              _copy_file,
                              is_resume=True,
                              is_overwrite=True,
                              cache_size=2,
                              block_size=40960,
                              connect_para=_connect_para) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.1)
            _status = _reader.start()
            time.sleep(1)
            _reader.stop()
            self.assertTrue(_reader.status == 'stop',
                            msg="本地gRpc推送-%s(暂停): %s" %
                            (_tips, _reader.status))
            _reader.thread_interval = 0.0
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地gRpc推送-%s: %s" % (_tips, _status))

        # 单线程-停止后续传
        _tips = '单线程-停止后续传'
        with GRpcPullProtocol(_temp_file,
                              _copy_file,
                              is_resume=True,
                              is_overwrite=True,
                              cache_size=2,
                              block_size=40960,
                              connect_para=_connect_para) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.1)
            _status = _reader.start()
            time.sleep(1)
            _reader.stop()
        self.assertTrue(_reader.status == 'stop',
                        msg="本地gRpc推送-%s(暂停): %s" % (_tips, _reader.status))
        # 续传处理
        with GRpcPullProtocol(_temp_file,
                              _copy_file,
                              is_resume=True,
                              is_overwrite=True,
                              cache_size=2,
                              block_size=40960,
                              connect_para=_connect_para) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地gRpc推送-%s: %s" % (_tips, _status))

        # 多线程
        _tips = '多线程'
        with GRpcPullProtocol(_temp_file,
                              _copy_file,
                              is_resume=True,
                              is_overwrite=True,
                              cache_size=2,
                              thread_num=230,
                              block_size=40960,
                              connect_para=_connect_para) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地gRpc推送-%s: %s" % (_tips, _status))

        # 多线程-缓存大于文件大小
        _tips = '多线程-缓存大于文件大小'
        with GRpcPullProtocol(_temp_file,
                              _copy_file,
                              is_resume=True,
                              is_overwrite=True,
                              thread_num=5,
                              block_size=40960,
                              connect_para=_connect_para) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地gRpc推送-%s: %s" % (_tips, _status))

        # 多线程-每次传输块大小大于文件
        _tips = '多线程-每次传输块大小大于文件'
        with GRpcPullProtocol(_temp_file,
                              _copy_file,
                              is_resume=True,
                              is_overwrite=True,
                              thread_num=5,
                              block_size=900000,
                              connect_para=_connect_para) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地gRpc推送-%s: %s" % (_tips, _status))

        # 多线程-暂停重复
        _tips = '多线程-暂停重复'
        with GRpcPullProtocol(_temp_file,
                              _copy_file,
                              is_resume=True,
                              is_overwrite=True,
                              cache_size=2,
                              thread_num=5,
                              block_size=40960,
                              connect_para=_connect_para) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.3)
            _status = _reader.start()
            time.sleep(1)
            _reader.stop()
            self.assertTrue(_reader.status == 'stop',
                            msg="本地gRpc推送-%s(暂停): %s" %
                            (_tips, _reader.status))
            _reader.thread_interval = 0.0
            _status = _reader.start(wait_finished=True)
            if _status != 'finished':
                print(NetTool.get_file_md5(_temp_file))
                print(NetTool.get_file_md5(_copy_file + '.tmp'))
            self.assertTrue(_status == 'finished',
                            msg="本地gRpc推送-%s: %s" % (_tips, _status))

        # 多线程-停止后续传
        _tips = '多线程-停止后续传'
        with GRpcPullProtocol(_temp_file,
                              _copy_file,
                              is_resume=True,
                              is_overwrite=True,
                              cache_size=2,
                              thread_num=5,
                              block_size=40960,
                              connect_para=_connect_para) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.3)
            _status = _reader.start()
            time.sleep(1)
            _reader.stop()
        self.assertTrue(_reader.status == 'stop',
                        msg="本地gRpc推送-%s(暂停): %s" % (_tips, _reader.status))

        # 续传处理
        with GRpcPullProtocol(_temp_file,
                              _copy_file,
                              is_resume=True,
                              is_overwrite=True,
                              cache_size=2,
                              thread_num=5,
                              block_size=40960,
                              connect_para=_connect_para) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            if _status != 'finished':
                print(NetTool.get_file_md5(_temp_file))
                print(NetTool.get_file_md5(_copy_file + '.tmp'))

            self.assertTrue(_status == 'finished',
                            msg="本地gRpc推送-%s: %s" % (_tips, _status))
Пример #11
0
    def test_local_to_local(self):
        if not TEST_FLAG['test_local_to_local']:
            return

        print('测试本地文件复制')

        # DebugTool.set_debug(set_on=True)
        _copy_file = os.path.join(_temp_path, 'local_to_local_copy.bin')

        # 删除临时文件
        if os.path.exists(_copy_file):
            FileTool.remove_file(_copy_file)
        for _ext in ('.lock', '.tmp', '.info'):
            if os.path.exists(_copy_file + _ext):
                FileTool.remove_file(_copy_file + _ext)

        # 单线程-缓存大于文件大小
        _tips = '单线程-缓存大于文件大小'
        with LocalProtocol(_temp_file,
                           _copy_file,
                           is_resume=True,
                           is_overwrite=True) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地文件复制-%s: %s" % (_tips, _status))

        # 单线程-每次传输块大小大于文件
        _tips = '单线程-每次传输块大小大于文件'
        with LocalProtocol(_temp_file,
                           _copy_file,
                           is_resume=True,
                           is_overwrite=True,
                           block_size=600000,
                           auto_expand=False) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地文件复制-%s: %s" % (_tips, _status))

        # 单线程-缓存小于文件大小
        _tips = '单线程-缓存小于文件大小'
        with LocalProtocol(_temp_file,
                           _copy_file,
                           is_resume=True,
                           is_overwrite=True,
                           cache_size=2) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地文件复制-%s: %s" % (_tips, _status))

        # 单线程-暂停重复
        _tips = '单线程-暂停重复'
        with LocalProtocol(_temp_file,
                           _copy_file,
                           is_resume=True,
                           is_overwrite=True,
                           cache_size=2) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.1)
            _status = _reader.start()
            time.sleep(1)
            _reader.stop()
            self.assertTrue(_reader.status == 'stop',
                            msg="本地文件复制-%s(暂停): %s" % (_tips, _reader.status))
            _reader.thread_interval = 0.0
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地文件复制-%s: %s" % (_tips, _status))

        # 单线程-停止后续传
        _tips = '单线程-停止后续传'
        with LocalProtocol(_temp_file,
                           _copy_file,
                           is_resume=True,
                           is_overwrite=True,
                           cache_size=2) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.1)
            _status = _reader.start()
            time.sleep(1)
            _reader.stop()
        self.assertTrue(_reader.status == 'stop',
                        msg="本地文件复制-%s(暂停): %s" % (_tips, _reader.status))
        # 续传处理
        with LocalProtocol(_temp_file,
                           _copy_file,
                           is_resume=True,
                           is_overwrite=True,
                           cache_size=2) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地文件复制-%s: %s" % (_tips, _status))

        # 多线程
        _tips = '多线程'
        with LocalProtocol(_temp_file,
                           _copy_file,
                           is_resume=True,
                           is_overwrite=True,
                           cache_size=2,
                           thread_num=200) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地文件复制-%s: %s" % (_tips, _status))

        # 多线程-缓存大于文件大小
        _tips = '多线程-缓存大于文件大小'
        with LocalProtocol(_temp_file,
                           _copy_file,
                           is_resume=True,
                           is_overwrite=True,
                           thread_num=5) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地文件复制-%s: %s" % (_tips, _status))

        # 多线程-每次传输块大小大于文件
        _tips = '多线程-每次传输块大小大于文件'
        with LocalProtocol(_temp_file,
                           _copy_file,
                           is_resume=True,
                           is_overwrite=True,
                           thread_num=5,
                           block_size=600000) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            self.assertTrue(_status == 'finished',
                            msg="本地文件复制-%s: %s" % (_tips, _status))

        # 多线程-暂停重复
        _tips = '多线程-暂停重复'
        with LocalProtocol(_temp_file,
                           _copy_file,
                           is_resume=True,
                           is_overwrite=True,
                           cache_size=2,
                           thread_num=5) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.3)
            _status = _reader.start()
            time.sleep(1)
            _reader.stop()
            self.assertTrue(_reader.status == 'stop',
                            msg="本地文件复制-%s(暂停): %s" % (_tips, _reader.status))
            _reader.thread_interval = 0.0
            _status = _reader.start(wait_finished=True)
            if _status != 'finished':
                print(NetTool.get_file_md5(_temp_file))
                print(NetTool.get_file_md5(_copy_file + '.tmp'))
            self.assertTrue(_status == 'finished',
                            msg="本地文件复制-%s: %s" % (_tips, _status))

        # 多线程-停止后续传
        _tips = '多线程-停止后续传'
        with LocalProtocol(_temp_file,
                           _copy_file,
                           is_resume=True,
                           is_overwrite=True,
                           cache_size=2,
                           thread_num=5) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.3)
            _status = _reader.start()
            time.sleep(1)
            _reader.stop()
        self.assertTrue(_reader.status == 'stop',
                        msg="本地文件复制-%s(暂停): %s" % (_tips, _reader.status))

        # 续传处理
        with LocalProtocol(_temp_file,
                           _copy_file,
                           is_resume=True,
                           is_overwrite=True,
                           cache_size=2,
                           thread_num=5) as _protocol:
            _reader = Transfer(
                _protocol,
                show_process_bar_fun=ProgressRate.show_cmd_process_bar,
                process_bar_label=_tips,
                thread_interval=0.0)
            _status = _reader.start(wait_finished=True)
            if _status != 'finished':
                print(NetTool.get_file_md5(_temp_file))
                print(NetTool.get_file_md5(_copy_file + '.tmp'))

            print(_status)
            self.assertTrue(_status == 'finished',
                            msg="本地文件复制-%s: %s" % (_tips, _status))
Пример #12
0
    def _clean_file_path(cls, path: str, class_path: str):
        """
        清理当前目录文件

        @param {str} path - 要处理的目录地址
        @param {str} class_path - 类目录
        """
        # 处理自身目录,先获取商品信息
        _info = dict()
        _info_file = os.path.join(path, 'info.json')
        if os.path.exists(_info_file):
            with open(_info_file, 'rb') as f:
                _eval = str(f.read(), encoding='utf-8')
                _info = eval(_eval)

            # 判断是否不处理
            _shop_name = _info['店名']
            # if _info['款式'] == '挂件' and _info['挂件类型'] == '':
            #     return

            # 遍历文件进行处理
            _product_num = FileTool.get_dir_name(path)
            _files = FileTool.get_filelist(path)
            _order = 1
            for _file in _files:
                _file_ext = FileTool.get_file_ext(_file).lower()
                if _file_ext not in ['jpg', 'jpeg', 'png', 'bmp']:
                    # 不是合适的文件类型
                    continue

                # 判断是否有括号
                if _file.find('(') >= 0:
                    FileTool.remove_file(_file)
                    continue

                # 判断是否匹配上要删除的图片大小
                if _shop_name in DEL_SHOP_PIC_SIZE.keys() and os.path.getsize(
                        _file) in DEL_SHOP_PIC_SIZE[_shop_name]:
                    FileTool.remove_file(_file)
                    continue

                # 修改文件名
                if not FileTool.get_file_name(_file).startswith(_product_num):
                    os.rename(
                        _file,
                        os.path.join(
                            path, '%s_%s_%d.%s' %
                            (_product_num, 'main' if _file.find('主图') >= 0
                             or _file.find('main') >= 0 else 'detail', _order,
                             _file_ext)))

                # 下一个文件
                _order += 1

            # 移动文件夹到指定的分类目录
            _class_path = _info['款式']
            if _class_path in PROP_TYPE_TRAN_DICT.keys():
                _class_path = PROP_TYPE_TRAN_DICT[_info['款式']]
            shutil.move(path,
                        os.path.join(class_path, _class_path, _product_num))

        # 处理完成,返回
        return
Пример #13
0
def test_case1():
    # 测试单日志最基本功能,日志输出,变更日志级别,修改日志格式
    # 删除临时日志
    try:
        FileTool.remove_file(_TEMP_DIR + '/test_case1.json')
        print('del %s%s' % (_TEMP_DIR, '/test_case1.json'))
    except:
        pass

    try:
        FileTool.remove_files(path=_TEMP_DIR + '/log/', regex_str='test_case1*')
    except:
        pass

    _logger = simple_log.Logger(conf_file_name=_TEMP_DIR + '/test_case1.json',
                                logger_name=simple_log.EnumLoggerName.ConsoleAndFile,
                                config_type=simple_log.EnumLoggerConfigType.JSON_FILE,
                                logfile_path=_TEMP_DIR + '/log/test_case1.log')
    # ConsoleAndFile 的配置level为DEBUG,但对应的ConsoleHandler的level为DEBUG,FileHandler的level为INFO
    # 日志是否显示会根据logger的level和handler的level,以级别比较高的匹配输出
    # 注意默认root的handler应该为空,否则无论如何都会执行root的输出,如果自己又另外指定了输出,那就会有2个相同输出日志
    _logger.log(simple_log.DEBUG,
                'test_case1:write_log:DEBUG:1:界面应显示本日志,文件不应显示本日志')
    _logger.debug('test_case1:write_log:DEBUG:1-1:界面应显示本日志,文件不应显示本日志')
    _logger.log(simple_log.INFO,
                'test_case1:write_log:INFO:2:界面应显示本日志,文件应显示本日志')
    _logger.info('test_case1:write_log:INFO:2-1:界面应显示本日志,文件应显示本日志')

    # 修改ConsoleAndFile的level为INFO,handler仍不变
    _logger.setLevel(simple_log.INFO)
    _logger.log(simple_log.DEBUG,
                'test_case1:write_log:DEBUG:3:界面不应显示本日志,文件不应显示本日志')
    _logger.log(simple_log.INFO,
                'test_case1:write_log:INFO:4:界面应显示本日志,文件应显示本日志')

    # 修改ConsoleAndFile的level为DEBUG, FileHandler的level为WARNING
    _logger.setLevel(simple_log.DEBUG)
    for _handler in _logger.base_logger.handlers:
        if _handler.name == 'FileHandler':
            _logger.set_handler_log_level(_handler, simple_log.WARNING)
    _logger.log(simple_log.DEBUG,
                'test_case1:write_log:DEBUG:5:界面应显示本日志,文件不应显示本日志')
    _logger.log(simple_log.WARNING,
                'test_case1:write_log:WARNING:6:界面应显示本日志,文件应显示本日志')

    #  修改整个日志级别为INFO
    _logger.setLevel(simple_log.INFO)
    _logger.log(simple_log.DEBUG,
                'test_case1:write_log:DEBUG:7:界面不应显示本日志,文件不应显示本日志')
    _logger.log(simple_log.INFO,
                'test_case1:write_log:INFO:8:界面应显示本日志,文件应显示本日志')

    # 修改日志类型为Console,日志级别应根据配置文件恢复原状态(DEBUG)
    _logger.change_logger_name(logger_name=simple_log.EnumLoggerName.Console)
    _logger.log(simple_log.DEBUG,
                'test_case1:write_log:DEBUG:9:界面应显示本日志,文件不应显示本日志')

    # 修改日志输出格式
    _logger.set_logger_formater(format_str='[%(asctime)s]%(message)s')
    _logger.log(simple_log.DEBUG,
                'test_case1:write_log:DEBUG:9:格式发生变化,界面应显示本日志,文件不应显示本日志')

    del _logger