def _crypto(cls, val, fstd: OutputFieldConfig) -> str: """ 对val进行crypto编码,返回编码后的字符串 val 可以是字符串,可以是bytes """ res: str = None if val == "": res = val return res if not isinstance(fstd, OutputFieldConfig): raise Exception("Invalid OutputFieldConfig: {}".format(fstd)) crypto: ECrypto = fstd._crypto if not isinstance(crypto, ECrypto): raise Exception("Unknow cryptograph method: {}".format(crypto)) enc: str = fstd._cryptoenc if not charsets.contains_charset(enc): raise Exception("Unknow charset for encoding: {}".format(enc)) # 有些数据不得不先加密 if crypto == ECrypto.Null or not isinstance( val, str) or val.startswith('=?utf-8?b?'): res = val elif crypto == ECrypto.Base64: res = cls._base64_encrypt(val, enc) else: raise Exception("Unknow cryptograph method: {}".format(crypto)) return res
def __init__( self, description: str, platform: str, datamatcher: DataMatcher = None, maxsegcount: int = 1000, enc: str = "utf-8", ): if not isinstance(description, str) or description == "": raise Exception("Invalid description for outputer") if not isinstance(platform, str) or platform == "": raise Exception("Invalid platform for outputer") self._description: str = description # 当前输出器唯一描述信息 self._platform: str = platform # 当前输出器所属平台 self._maxsegcount: int = 1000 # 单次发送数据的最大数据段数量 if isinstance(maxsegcount, int) and maxsegcount > 0: self._maxsegcount = maxsegcount self._datamacher: DataMatcher = DataMatcher() if isinstance(datamatcher, DataMatcher): self._datamacher = datamatcher if not isinstance( enc, str) or enc == "" or not charsets.contains_charset(enc): raise Exception("Invalid charset param 'enc' for OutputerBase") self._enc: str = enc self._logger: MsLogger = MsLogManager.get_logger("Output_{}".format( self._platform))
def __init__(self, source: str, platform: str, oncomplete: callable, srcmark: str, encoding: str = 'utf-8'): InputData.__init__(self, source, platform, oncomplete, srcmark) self._encoding = 'utf-8' if not isinstance(encoding, str) or not charsets.contains_charset(encoding): raise Exception("Invalid charset '%s' for initial InputFileData" % encoding) else: self._encoding = encoding
def _fields_to_bytes(cls, fields: dict, enc: str = "utf-8") -> bytes: """字段搞成bytes""" enc_ = "utf-8" if isinstance(enc, str) and charsets.contains_charset(enc): enc_ = enc res: bytes = bytes() for f in fields.items(): try: s = "{}:{}\r\n".format(f[0], f[1]) res += s.encode(enc_) except Exception as ex: # 此处不应报错或做任何处理,过来的字段必须为 # 正常可转换为bytes的,如果不能,则应修改配置,将需要 # base64的字段给编码了再过来 # s = "{}:{}\r\n".format(f[0], helper_str.repr_str(f[1])) # res += s.encode(enc_) raise ex res += "\r\n".encode(enc_) return res
def __init__(self, uniqueName: str, inputdir: str, platform: str, clasifiers: dict = None, maxDealingQueue: int = 20, encoding='utf-8', succ_file_delete: bool = True, succdir: str = './_serversucc', succfilekeepcount: int = 1000, succfilekeepdays: int = 3, error_file_delete: bool = False, errordir: str = './_servererror', errorfilekeepcount: int = 1000, errorfilekeepdays: int = 3): InputerBase.__init__(self, uniqueName, inputdir, platform, clasifiers, error_file_delete, succ_file_delete) # 输入器属性 self._inputdir = os.path.abspath(inputdir) os.makedirs(self._inputdir, exist_ok=True) if not isinstance( maxDealingQueue, int) or maxDealingQueue < 1 or maxDealingQueue > 99999: raise Exception("Param maxDealingQueue is incorrect.") self._max_dealing_queue_count = maxDealingQueue # 读取文件时使用的字符集 self._encoding = 'utf-8' if not isinstance(encoding, str) or not charsets.contains_charset(encoding): self._logger.warn( "Specified charset is invalid: '%s', will use 'utf-8' instead." % encoding) else: self._encoding = encoding # 完成目录 self._succdirroot = os.path.abspath('./_servercomplete') if isinstance(succdir, str) and not succdir == "": self._succdirroot = os.path.abspath(succdir) self.__succdir = os.path.join(self._succdirroot, self._get_date()) self._last_succ_dir_time = datetime.datetime.now(pytz.timezone('Asia/Shanghai')).timestamp() self._succdir_locker = threading.RLock() self._succfile_keepcount: int = 1000 if isinstance(succfilekeepcount, int): self._succfile_keepcount = succfilekeepcount self._succfile_keepdays: int = 3 if type(succfilekeepdays) in [int, float]: self._succfile_keepdays = succfilekeepdays self._t_completefi_reduce: threading.Thread = helper_file.directory_file_reduce( self._succdirroot, self._succfile_keepcount, self._succfile_keepdays, self._complete_file_reduce_log) # 错误目录 self._errordirroot = os.path.abspath('./_servererror') if isinstance(errordir, str) and not errordir == "": self._errordirroot = os.path.abspath(errordir) self.__errordir = os.path.join(self._errordirroot, self._get_date()) self._last_error_dir_time = datetime.datetime.now(pytz.timezone('Asia/Shanghai')).timestamp() self._errordir_locker = threading.RLock() self._errorfile_keepcount: int = 1000 if isinstance(errorfilekeepcount, int): self._errorfile_keepcount = errorfilekeepcount self._errorfile_keepdays: int = 3 if type(errorfilekeepdays) in [int, float]: self._errorfile_keepdays = errorfilekeepdays self._t_errorfi_reduce: threading.Thread = helper_file.directory_file_reduce( self._errordirroot, self._errorfile_keepcount, self._errorfile_keepdays, self._error_file_reduce_log) # 文件监视线程相关 # 指示文件监视线程是否停止 self._stoped = True self._stopedLocker = threading.Lock() self._tmonitor = threading.Thread( target=self._monitor, name=self._uniquename) self._dealing_queue: dict = {} self._dealing_queue_locker = threading.Lock()