def dialup(): context = SSLContext(PROTOCOL_SSLv3) def _ssl(): sslsession = SMTP_SSL(context=context) ext_log(sslsession.connect(getconf('smtp_server'), getconf('smtp_port')), 'SSL connection') return sslsession def _starttls(): tlssession = SMTP() ext_log(tlssession.connect(getconf('smtp_server'), getconf('smtp_port')), 'startTLS connection') tlssession.ehlo() if tlssession.has_extn('STARTTLS'): ext_log(tlssession.starttls(context=context), 'startTLS') tlssession.ehlo() return tlssession else: logger.warning('plaintext connection') return tlssession try: if getconf('smtp_use_ssl'): session = _ssl() else: session = _starttls() ext_log(session.login(getconf('smtp_user'), getconf('smtp_password')), 'login') return session except (SMTPException, SSLError) as ex: logger.error('SMTP error: %s' %(ex))
def is_file_valid(model_path, save_file=False): assert model_path.endswith('.npy'), logger.error( 'Invalid file provided {}'.format(model_path)) if not save_file: assert os.path.exists(model_path), logger.error( 'file not found: {}'.format(model_path)) logger.info('[LOAD/SAVE] checkpoint path is {}'.format(model_path))
def send_mail(to, messagetext, subject=None, **args): cc = args.get('cc', []) bcc = args.get('bcc', []) recipients = list(chain(to, cc, bcc)) sender = args.get('sender', getconf('email_sender')) if not sender: sender = getconf('email_sender') footer = args.get('footer', getconf('email_footer')) if not footer: footer = getconf('email_footer') subjecttag = args.get('subjecttag', getconf('email_defaulttag')) subjectdate = args.get('subjectdate', getconf('email_subject_date')) files = args.get('files', []) logger.info('~' * 23) logger.info('sending new mail using %s:\n%d recipients ~ %d cc, %d bcc, %d files' %(sender, len(recipients), len(cc), len(bcc), len(files))) message = make_header(to, sender, cc, subject, subjecttag, subjectdate) message.attach(make_mime_text(messagetext, footer)) [message.attach(make_mime_file(f)) for f in files] session = dialup() if session is not None: try: session.sendmail(sender, recipients, message.as_string().encode('UTF-8')) except SMTPException as ex: logger.error('smtp error: %s' %(ex)) return ex else: logger.info('mail sent') return True finally: ext_log(session.quit(), 'quit') logger.info('end mail')
def pybackup(**options): """数据库备份""" # 检查磁盘空间 if not util.check.check_disk_free(): logger.error('磁盘空间不足,请释放空间') sys.exit(0) # 解析配置文件并获取参数 backup_setting = util.config.get_config(options) # 记录本次执行的参数 logger.debug(backup_setting) # 创建一个记录数据库实例 rdb = util.record.RecordDB(backup_setting['record-type'], backup_setting['record-db-sqlite'], backup_setting['record-db-mysql']) backup_setting.update({'rdb': rdb}) # 检测参数合法性 check_error = util.check.check_option(backup_setting) if check_error: logger.error(check_error) sys.exit(1) # 执行备份或恢复流程 if options['restore']: # 执行恢复流程 if 'backup-package' in backup_setting: # 从指定备份包恢复 restore_from_specified_packages(backup_setting, backup_setting['backup-package']) else: # 从数据库获取恢复信息 restore_process(backup_setting, backup_setting['host'], backup_setting['port'], backup_setting['recover-point']) else: # 执行备份流程 backup_process(backup_setting)
def upload_dir(local, remote): from util import logger try: transport = paramiko.Transport((host, port)) transport.connect(username=username, password=password) sftp = paramiko.SFTPClient.from_transport(transport) for root, dirs, files in os.walk(local): for filespath in files: local_file = os.path.join(root, filespath) a = local_file.replace(local, '') remote_file = os.path.join(remote, a) try: sftp.put(local_file, remote_file) except Exception as e: sftp.mkdir(os.path.split(remote_file)[0]) sftp.put(local_file, remote_file) # print(local_file + ' uploaded!') for name in dirs: local_path = os.path.join(root, name) a = local_path.replace(local, '') remote_path = os.path.join(remote, a) try: sftp.mkdir(remote_path) # print("mkdir path %s" % remote_path) except Exception as e: print(e) # print('upload finish!') sftp.close() except Exception as e: logger.error('error occurred when uploading dir %s to %s', local, remote) print(e)
def run(self): self._build_session() with self._session as sess: self._build_model() while True: next_task = self._task_queue.get(block=True) if next_task[0] == parallel_util.WORKER_RUNNING: self._num_envs_required = int(next_task[1]) # collect rollouts traj_episode = self._play() self._task_queue.task_done() for episode in traj_episode: self._result_queue.put(episode) if next_task[0] == parallel_util.WORKER_RUNNING_MT: self._num_envs_required = int(next_task[1]) # collect rollouts traj_episode = self._play() self._task_queue.task_done() for episode in traj_episode: self._result_queue.put({ **episode, "task name": self.task_name }) elif next_task[0] == parallel_util.AGENT_SET_WEIGHTS: # set parameters of the actor policy self._set_weights(next_task[1]) time.sleep(0.001) # yield the process self._task_queue.task_done() elif next_task[0] == parallel_util.END_ROLLOUT_SIGNAL or \ next_task[0] == parallel_util.END_SIGNAL: # kill all the thread # logger.info("kill message for worker {}".format(self._actor_id)) logger.info("kill message for worker") self._task_queue.task_done() break elif next_task[0] == parallel_util.AGENT_SET_WEIGHTS_MULTI: weights = next_task[1] self._set_weights(weights[self.task_name]) time.sleep(0.001) # yield the process self._task_queue.task_done() elif next_task[0] == parallel_util.AGENT_RENDER: pass #self._num_envs_required = 1 #self._render(next_task[1]['it'], next_task[1]['save_loc']) #self._task_queue.task_done() else: logger.error('Invalid task type {}'.format(next_task[0])) return
def install_it(pkg, system='debian'): """ Install a package only if it isn't already installed. If it was impossible the installation of the package the function return False, True otherwise. Possible values of system are: * debian * archlinux """ if system == 'debian': cmd = 'sudo apt-get --assume-yes install ' + pkg elif system == 'archlinux': cmd = 'sudo pacman -S ' + pkg info('Installing the package ' + pkg + ' ...') if is_pack_installed(pkg): return True try: (st, out) = subprocess.getstatusoutput(cmd) except OSError: error('Maybe apt is not installed in this system.') st = 32512 # This the error code of 'command not found' return st == 0
def read_power(self, SerialNumber): # page 53, table A3 if not SerialNumber in self.caches_time: self.caches_time[SerialNumber] = 0 self.caches_val[SerialNumber] = 0 if not time.time( ) - self.caches_time[SerialNumber] > init.SERIAL_CACHE_TTL: # logger.info("CACHE HINT!!") return self.caches_val[SerialNumber] if self.ser_lock: while 1: if self.ser_lock: # logger.info("Request Jamd") time.sleep(init.SER_LOCK_RECHECK_TIME) else: break addr = self.ser2addr(SerialNumber) self.ser_lock = True rsp = self.chn.read_power(addr) self.ser_lock = False if not rsp is None: self.caches_val[SerialNumber] = rsp self.caches_time[SerialNumber] = time.time() return rsp else: logger.error("COM %s, Addr %s no resp!" % (self.COM_PORT, SerialNumber)) return None
def send_stop_timelapse(key): try: task = celery_app.send_task('cam_task.stop_timelapse_task', args=[key]) return task.get() except Exception as e: logger.error(traceback.format_exc()) raise e
def check_xtrabackup_completed(log_text): """根据xtrabackup的日志检查xtrabackup是否执行成功""" last_row = log_text[log_text.rfind('\n') + 1:] if last_row[-13:] != 'completed OK!': logger.error('-' * 15 + ' ' + ' xtrabackup(innobackupex)备份结果校验失败 ' + '-' * 15) sys.exit(1)
def __init__(self, url, method="GET", parameters=None, cookie=None, headers={}): try: # 解析url if type(url) == bytes: self.__url = url.decode("utf-8") if type(url) == str: self.__url = url logger.debug(self.__url) scheme, rest = urllib.parse.splittype(self.__url) # 拆分域名和路径 logger.debug(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>") self.__host_absolutely, self.__path = urllib.parse.splithost(rest) host_list = self.__host_absolutely.split(":") if len(host_list) == 1: self.__host = host_list[0] self.__port = 80 elif len(host_list) == 2: self.__host = host_list[0] self.__port = host_list[1] # 对所传参数进行处理 self.__method = method self.__data = parameters self.__cookie = cookie if parameters != None: self.__parameters_urlencode_deal = urllib.parse.urlencode(parameters) else: self.__parameters_urlencode_deal = "" self.__jdata = simplejson.dumps(parameters, ensure_ascii=False) self.__headers = headers except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def send_aspsms(to, messagetext, **args): originator = args.get('originator', getconf('aspsms_originator')) if not originator: originator = getconf('aspsms_originator') flashing = args.get('flashing', getconf('aspsms_flashing')) maxchars = args.get('maxchars', getconf('aspsms_maxchars')) logger.info('~' * 23) logger.info('sending new aspsms using %s:\n%d recipients ~ flashing: %s' %(originator, len(to), flashing)) message = wrap(messagetext, maxchars) try: for recipient in to: for text in message: payload = make_xml(recipient, originator, text, flashing) response = response_xml(post_xml(payload)) if not response['ErrorCode'] == '1': raise Exception('aspsms error: %s' %(response['ErrorDescription'])) except Exception as ex: logger.error('error: %s' %(ex)) return ex else: logger.info('aspsms sent') return True logger.info('end aspsms')
def parseMessage(msgData, authors): """Parse the message contained in msgData. Authors should be a dict to provide a correspondence between the IDs as present in the msgData and eventually preferred aliases. If a key is not present, the ID itself is used as alias for all successive messages. :return: A string representing the parsed messages, or None if the value for a specific key was not found """ try: localTimestamp = time.localtime(msgData["timestamp"] / 1000) dateAndTime = time.strftime("%Y.%m.%d %H:%M:%S", localTimestamp) body = msgData["body"].replace("\n", " ") authorId = msgData["author"].split(":")[1] if authorId not in authors: logger.warning( "Missing value for author ID {}. Using directly the ID for all successive messages" .format(authorId)) authors[authorId] = str(authorId) author = authors[authorId] message = str(dateAndTime) + " " + author + " " + body return message except KeyError: logger.error("Parsing message. KeyError") logger.error(msgData) return None
def send_command(self, cmd_type, **kwds): ''' ''' import json packet = {} packet['Cmd'] = cmd_type packet['Seq'] = self.seq for key in kwds.keys(): packet[key] = kwds[key] data = json.dumps(packet) + "\n" if isinstance(data, unicode): data = data.encode('utf8') # while not self._lock.acquire(False): #加锁 # logger.warn('%s 延时发送: %s' % (time.clock(), data)) # time.sleep(0.1) time0 = time.time() self._lock.acquire() time1 = time.time() delta = time1 - time0 if self._enable_log and delta >= 0.05: logger.info('send wait %s S' % delta) if self._enable_log: logger.debug('send: %s' % (data[:512])) try: result = self.send(data) except Exception, e: # 避免因异常导致死锁 logger.error('send %r error: %s' % (data, e)) result = None
def direct_clip(stacking, band_name, clip_extents, tile_id, rename, workdir): """Clip datatypes which require no special processing.""" logger.info(' Start processing for band: %s', band_name) mosaic_filename = os.path.join(workdir, tile_id, tile_id + '_' + rename + '.tif') if os.path.exists(mosaic_filename): logger.warning("Skip previously generated result %s", mosaic_filename) return mosaic_filename warp_cmd = ('gdalwarp -te {extents}' ' -co "compress=deflate" -co "zlevel=9"' ' -co "tiled=yes" -co "predictor=2"').format( extents=clip_extents) for stack in reversed(stacking): scene_name = util.ffind(workdir, stack['LANDSAT_PRODUCT_ID'], '*' + band_name + '.tif') warp_cmd += ' ' + scene_name warp_cmd += ' ' + mosaic_filename util.execute_cmd(warp_cmd) logger.info(' End processing for %s as %s ', band_name, mosaic_filename) if not os.path.exists(mosaic_filename): logger.error('Processing failed to generate desired output: %s', mosaic_filename) return mosaic_filename
def read_power_by_IP(self, ip): if ip in globals.METERS_IP_MAP: SN = globals.METERS_IP_MAP[ip][0] SER = globals.METERS_IP_MAP[ip][1] return globals.COM_SHARD[SER].read_power(SN) else: logger.error("IP %s record not found" % ip)
def saveUser(self, user): assert type(user) == User, "类型错误" sql = "insert into User(userName,password,permission,wechatId,wechatName,registerTime,phoneNumber,emailAddress) values(%s,%s,%s,%s,%s,%s,%s,%s)" value = [ user.userName, user.getPassword(), user.permission, user.wechatId, user.wechatName, user.registerTime, user.phoneNumber, user.emailAddress ] cursor = self.db.cursor() try: result = cursor.execute(sql, value) self.db.commit() cursor.close() return result except Exception as e: self.db.rollback() cursor.close() if e.args[0] == 1062: logger.warning(e.args[1]) return 0 logger.error("Failed:" + str(sql), exc_info=True) return -1 pass return result
def parseMessage(msgData, authors): """Parse the message contained in msgData. Authors should be a dict to provide a correspondence between the IDs as present in the msgData and eventually preferred aliases. If a key is not present, the ID itself is used as alias for all successive messages. :return: A string representing the parsed messages, or None if the value for a specific key was not found """ try: localTimestamp = time.localtime(msgData["timestamp"]/1000) dateAndTime = time.strftime("%Y.%m.%d %H:%M:%S", localTimestamp) body = msgData["body"].replace("\n", " ") authorId = msgData["author"].split(":")[1] if authorId not in authors: logger.warning("Missing value for author ID {}. Using directly the ID for all successive messages".format(authorId)) authors[authorId] = str(authorId) author = authors[authorId] message = str(dateAndTime) + " " + author + " " + body return message except KeyError: logger.error("Parsing message. KeyError") logger.error(msgData) return None
def publish(item, board, helper, channel, web): item_type = helper.item_type(item) post_process(item, board, helper, web) while True: try: channel.basic_publish(exchange='chan', routing_key="%s.%s.%s" % (chan, item_type, board), body=json.dumps(item, separators=(',', ':'), ensure_ascii=False, sort_keys=True)) if MONITORING: distance = datetime.utcnow() - datetime.utcfromtimestamp( helper.item_mtime(item)) monitoring.log([{ "measurement": chan, "time": str(datetime.utcnow()), "tags": { "board": board }, "fields": { "distance": distance.total_seconds() } }]) break except Exception as e: logger.debug(traceback.format_exc()) logger.error(str(e)) channel = connect()
def callback(): code = request.args.get('code') if not code: logger.error("缺少code参数") return jsonify(Response.error(400, "缺少code参数")) logger.info("【code】" + str(code)) # 通过code请求到access_token token_url = urls.get_token_url(code) resp = Request.get(token_url, to_dict=False) print(resp) try: access_token = re.findall("access_token=(.*?)&expires_in", resp)[0] logger.info("【access_token】" + str(access_token)) except IndexError: logger.error('获取access_token错误') return jsonify(Response.error(400, "获取access_token错误")) session['qq_access_token'] = access_token # 通过access_token得到openid openid_url = urls.get_openid_url(access_token) resp = Request.get(openid_url) print(resp) openid = resp.get('openid') logger.info("【openid】" + str(openid)) session['openid'] = openid return redirect(url_for('get_user_info'))
def _parse_stream(self, bot): stream = JSONStream(self.flow_user_api_key) gen = stream.fetch([self.channel], active=True) for data in gen: process_message = type(data) == dict and (data['event'] == "message" or data['event'] == "comment") if process_message and ("user" in data and self.user != data["user"]): self.spoken = False bot_input = BotInput() if type(data['content']) is dict: bot_input.message = data["content"]['text'] elif "content" in data: bot_input.message = data["content"] else: break if "user" in data and int(data["user"]) > 0: try: bot_input.nick = self.get_user_by_id(data["user"])["nick"] self.user_id = data["user"] if random.random() < (self.chattiness / 100): logger.log("Randomly sending message to %s" % bot_input.nick) self.private_message(data["user"], random.choice(self.responses["private_messages"])) except Exception as e: logger.error(e) self.say(bot.responses["stranger"]) elif "external_name" in data: bot_input.nick = data["external_name"] else: bot_input.nick = "anonymous" bot_input.bot = bot self.user_nick = bot_input.nick marvin.process(bot_input, self)
def wrapper(*args, **kwargs): prog = '' comment = args[1] # 找到要执行的程序,剔除掉环境变量赋值 for arg in args[0]: if '=' not in str(arg): prog = str(arg) break logger.info('检查程序 ' + prog + ' 是否存在') # 检查程序是否存在 if not shutil.which(prog): logger.error(prog + ' : 程序不存在') # raise ProgramError("Cannot locate binary: " + binary) sys.exit(prog) # 执行命令 returncode, out_value, err_value = func(*args, **kwargs) # 检查命令行命令是否执行成功 if returncode is None: logger.info('-' * 15 + ' ' + comment + ' 程序执行中 ' + '-' * 15) elif returncode == 0: logger.info('-' * 15 + ' ' + comment + ' 完成 ' + '-' * 15) else: logger.error('-' * 15 + ' ' + comment + ' 执行失败 ' + '-' * 15) sys.exit(1) # 根据xtrabackup的日志检查备份是否成功 if prog in ['innobackupex', 'xtrabackup']: check_xtrabackup_completed(err_value.strip()) logger.info('-' * 15 + ' ' + ' xtrabackup(innobackupex)备份结果校验成功 ' + '-' * 15) return returncode, out_value, err_value
def sendMessages(self, user, text): ''' ''' url = "https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=" + self.__accessToken data = { "touser": user.wechatId, "msgtype": "text", "agentid": WECHAT_CONFIG['AgentId'], "text": { "content": text }, "safe": 0 } try: result = requests.post(url, json.dumps(data)) result = json.loads(result.text) if result['errcode'] != 0: self.getAccessToken() url = "https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=" + self.__accessToken result = requests.post(url, json.dumps(data)) result = json.loads(result.text) except Exception as e: logger.error("Post wechat push failed:{}".format(e), exc_info=True) if result['errcode'] != 0 or result['invaliduser'] != '': logger.error("Post wechat push failed.\n result :" + json.dumps(result) + "\ndata: " + json.dumps(data)) return -1 return 0
def install_it(pkg, system='debian'): """ Install a package only if it isn't already installed. If it was impossible the installation of the package the function return False, True otherwise. Possible values of system are: * debian * archlinux """ if system=='debian': cmd = 'sudo apt-get --assume-yes install '+pkg elif system=='archlinux': cmd = 'sudo pacman -S '+pkg info('Installing the package '+pkg+' ...') if is_pack_installed(pkg): return True try: (st, out) = subprocess.getstatusoutput(cmd) except OSError: error('Maybe apt is not installed in this system.') st = 32512 # This the error code of 'command not found' return st==0
def start(): try: proxies_thread = threading.Thread(target=async_do) proxies_thread.setName("task-submitter") proxies_thread.start() except Exception as e: error("提交任务异常:", repr(e)) start()
def remove_backup_dir(path): """删除备份的文件夹""" try: shutil.rmtree(path) logger.info('备份目录 ' + path + ' 已删除') except Exception as e: logger.error('删除备份的文件夹失败') sys.exit(1)
def __check_args_length(must: int, argname: str, soft: bool = False) -> bool: if len(passed_args) < must: if not soft: logger.fatal('MISSING ARGUMENT: [%s]' % argname) raise Exception('manual interruption') logger.error('MISSING ARGUMENT: [%s]' % argname)
def __init__(self): try: current_dir = os.getcwd() self.__ini_config_file = BASE_DIR + "/conf/" + "config.ini" self.__conf_config_file = BASE_DIR + "/conf/" + "config.conf" except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def start(): try: proxies_thread = threading.Thread(target=async_do) proxies_thread.setName("tag-loader") proxies_thread.start() except Exception as e: error("待办任务获取异常:", repr(e)) start()
def run(self): try: self.go() except Exception as e: logger.error("大鱼上传失败 任务id:{} error=>{}".format(self.id, e)) self.msg = "上传未知失败 请检查上传日志 error:{}".format(e) self.browser.close() return self.status, self.aid, self.vid, self.msg
def req_url(url): times = 0 while True: times = times + 1 proxies_ip = get_proxies_ip(url) # 豆瓣请求添加代理限制,必须使用代理访问 if is_douban(url) and proxies_ip is None: # 没有代理地址可以用,需要阻塞处理,防止本地ip被封 WORKER_WAIT.wait() continue if is_xici(url): info("西刺代理访问重复,", times, '次,休眠10s再次重试') # 防止接口访问频次过快 if times > 0: time.sleep(10) headers = { 'User-Agent': random.choice(USER_AGENTS), 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5', 'Connection': 'keep-alive', 'Accept-Encoding': 'gzip, deflate', } proxies = {} if proxies_ip is not None: agreement, location = split_proxies(proxies_ip) proxies[agreement] = location try: html = requests.get(url, proxies=proxies, timeout=5, headers=headers).text dom = etree.HTML(html) if html.strip == "": remove_proxies_ip(url, proxies_ip) continue # 判断接口返回数据是否正常,错误的数据则更换代理重新拉取 if result_has_error(url, dom, proxies_ip): error(url, ",访问异常,页面内容:", html) remove_proxies_ip(url, proxies_ip) continue else: # 通过校验说明代理可用,加入优先级高的列表中 add_effective_proxies_ip(proxies_ip) return html except Exception as e: error(url, "请求异常,需要更换代理地址。", proxies_ip) # , repr(e) # 移除错误的代理地址 remove_proxies_ip(url, proxies_ip)
def send_start_timelapse(header: str, run_every: float, expire_at: str, data: dict): try: task = celery_app.send_task('cam_task.start_timelapse_task', args=[header, run_every, expire_at, data]) return task.get() except Exception as e: logger.error(traceback.format_exc()) raise e
def remove_dir(dir): """删除目录""" try: shutil.rmtree(dir) except Exception as e: logger.error('删除目录出错') logger.error('错误信息: ' + e) else: logger.info('删除目录 ' + dir + ' 成功')
def receive_result(task_id): try: res = celery_app.AsyncResult(task_id) data = res.get() res.forget() return res except Exception as e: logger.error(traceback.format_exc()) raise e
def _init_driver(self): '''初始化测试桩 ''' if AndroidSpyClient.server_opened(self._port): # 字段赋值 logger.info('port %d opened' % self._port) self._process['name'] = self._process_name self._process['id'] = self._device.adb.get_pid(self._process_name) if self.hello() != None: return timeout = 20 time0 = time.time() proc_exist = False while time.time() - time0 < timeout: if not proc_exist: pid = self._device.adb.get_pid(self._process_name) if pid > 0: proc_exist = True self._process['name'] = self._process_name self._process['id'] = pid break time.sleep(1) if not proc_exist: raise RuntimeError('进程:%s 在%d秒内没有出现' % (self._process_name, timeout)) timeout = 30 try: if self._device.is_art(): # Android 5.0上发现注入容易导致进程退出 self._wait_for_cpu_low(20, 10) time0 = time.time() while time.time() - time0 < timeout: ret = self._device.adb.run_shell_cmd('.%s/inject %s' % (AndroidDriver.qt4a_path, self._process_name), True, timeout=120, retry_count=1) logger.debug('inject result: %s' % ret) if 'Inject Success' in ret: break elif 'Operation not permitted' in ret: # 可能是进程处于Trace状态 pid = self._device.adb.get_pid(self._process_name) status = self._device.adb.get_process_status(pid) tracer_pid = int(status['TracerPid']) if tracer_pid > 0: if int(status['PPid']) == tracer_pid: # 使用TRACEME方式防注入 raise Exception('应用使用了防注入逻辑,注入失败') logger.warn('TracerPid is %d' % tracer_pid) self._device.adb.kill_process(tracer_pid) time.sleep(1) except RuntimeError, e: logger.error('%s\n%s' % (e, self._device.adb.run_shell_cmd('ps'))) logger.info(self._device.adb.dump_stack(self._process_name)) raise e
def send_color(path, **kwargs): try: task = celery_app.send_task('cv_task.cv_color', args=[path], kwargs=kwargs) return task.id except Exception as e: logger.error(traceback.format_exc()) raise e
def ERROR(code,appcode,s): l = "%s %s %s" % (code,appcode,s) for ke in known_errors.iter('Error'): if(ke.attrib['ErrorCode'] == code and ke.attrib['AppCode'] == appcode): logger.warning(l) logger.warning(" >> %s",ke.attrib['Description']) return logger.error(l)
def init(self, port, baud): self.COM_PORT = port self.chn = Channel() self.chn.open(port, int(baud)) logger.info("Open %s ,baudrate is %s" % (port, baud)) if not self.chn.isOpen(): logger.error("Serial port %s open fail!" % port) return False return True
def calc_nodata_9999_lineage(stacking, band_name, clip_extents, tile_id, rename, workdir): """Clip scenes which have data outside the lineage, apply -9999 fill.""" logger.info(' Start processing for band: %s', band_name) mosaic_filename = os.path.join(workdir, tile_id, tile_id + '_' + rename + '.tif') if os.path.exists(mosaic_filename): logger.warning("Skip previously generated result %s", mosaic_filename) return mosaic_filename temp_clipped_names = list() temp_masked_names = list() for level, stack in reversed(list(enumerate(stacking, start=1))): scene_name = util.ffind(workdir, stack['LANDSAT_PRODUCT_ID'], '*' + band_name + '.tif') temp_name1 = mosaic_filename.replace('.tif', '_temp%d' % level + '.tif') temp_warp_cmd = ('gdalwarp -te {extents}' ' -dstnodata "-9999" -srcnodata "-9999" {0} {1}') util.execute_cmd( temp_warp_cmd.format(scene_name, temp_name1, extents=clip_extents)) temp_clipped_names.append(temp_name1) lineg_name = util.ffind(workdir, tile_id, '*LINEAGEQA.tif') temp_name2 = mosaic_filename.replace('.tif', '_temp%dM' % level + '.tif') temp_calc_cmd = ('gdal_calc.py -A {0} -B {lineage} --outfile {1}' ' --calc="(A*(B=={level}) + (-9999*(B!={level})))"' ' --NoDataValue=-9999') util.execute_cmd( temp_calc_cmd.format(temp_name1, temp_name2, lineage=lineg_name, level=level)) temp_masked_names.append(temp_name2) temp_name = mosaic_filename.replace('.tif', '_temp.tif') temp_warp_cmd = 'gdalwarp {} {}'.format(' '.join(temp_masked_names), temp_name) util.execute_cmd(temp_warp_cmd) util.remove(*temp_masked_names + temp_clipped_names) warp_cmd = ( 'gdalwarp -dstnodata "-9999" -srcnodata "-9999" -co "compress=deflate"' ' -co "zlevel=9" -co "tiled=yes" -co "predictor=2" {} {}') util.execute_cmd(warp_cmd.format(temp_name, mosaic_filename)) util.remove(temp_name) logger.info(' End processing for %s as %s ', band_name, mosaic_filename) if not os.path.exists(mosaic_filename): logger.error('Processing failed to generate desired output: %s', mosaic_filename) return mosaic_filename
def ini_data(self, label, key, configFile=None): try: config = configparser() if configFile is None: configFile = self.__ini_config_file config.read(configFile) return config.get(label, key) except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def run(self): client = ConnectFactory().getConnect("redis",self.config) for msg in self.consumer: kafkamsg = self._decodemsg(msg) try: logger.info("message handling(%s)" % kafkamsg) jsondata = json.loads(kafkamsg['rawdata']) ObjectFactory.fromjson(jsondata["message"]).execute(client) except: logger.error("message execute error(%s)" % jsondata)
def md5(cls, strVar): try: # md5加密并小写化 md5_var = hashlib.md5() md5_var.update(strVar.encode("utf-8")) # md5_var.update(strVar) sign = md5_var.hexdigest().lower() return sign except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def getLocationId(location_name,redis_server): keys = redis_server.hkeys('locations') location_id = None for key in keys: value = redis_server.hget('locations',key) if value == location_name: location_id = key if not location_id: print 'can not find corresponding location id' logger.error("cannot find the location id") sys.exit(-1) else: print location_id
def repost_replies(account_name): bf = open('.blacklist_%s'%account_name,'a+') blacklist = bf.read().splitlines() bf.close() rp = open('.reposted_%s'%account_name,'a+') reposted = rp.read().splitlines() account = settings.ACCOUNTS.get(account_name) try: logging.info('[%s] Getting last mentions offset'%account_name) bot = TwitterBot(settings.CONSUMER_KEY,settings.CONSUMER_SECRET, account['key'],account['secret']) mentions = [] try: mentions = bot.api.mentions() logging.info('[%s] Got %d mentions'%(account_name,len(mentions))) except Exception,e: logging.error('[%s] Failed to get mentions. %s'%(account_name,e)) for mess in reversed(mentions): try: author = mess.author.screen_name if str(author) in blacklist: logging.debug('[%s] Author %s blacklisted. Skipping.'%(account_name,str(author))) continue if str(mess.id) in reposted: logging.debug('[%s] Message #%s already reposted. Skipping.'%(account_name,str(mess.id))) continue message = mess.text.split(' ') if message[0] != '@%s'%account_name: continue #not a "@reply" trigger = message[1] triggers = dict(account['triggers']) if trigger not in triggers: logging.warning('[%s] Bad message format, sending DM to author'%account_name) bot.dm(author,account['not_triggered']) else: len_params = {'message':'','user':author} mess_len = len(triggers[trigger]%len_params) params = {'message':bot.trim_message(' '.join(message[2:]),mess_len),'user':author} message = triggers[trigger]%params logging.info('[%s] Tweeting message %s'%(account_name,message)) bot.tweet(message) rp.write('%s\n'%mess.id) except Exception,e: logging.error('%s'%e) continue
def request_with_cookies(self): try: cookiejar = http.CookieJar() cookiejar.set_cookie(self.__cookie) opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cookiejar)) if self.__data is None: request = urllib.request.Request(self.__url) else: request = urllib.request.Request(self.__url, self.__data) html = opener.open(request).read() return html except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def oyjtDict(amount, orderDate): try: oyjt_data = dict() oyjt_data["orderID"] = "307900" oyjt_data["orderPoint"] = "3.104" oyjt_data["mallName"] = "欧亚卖场" oyjt_data["storeName"] = "欧亚卖场" oyjt_data["productName"] = "辉山小枕240ml" oyjt_data["quantity"] = "16" oyjt_data["unitPice"] = "1.94" oyjt_data["amount"] = amount oyjt_data["orderDate"] = orderDate return oyjt_data except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def loadPlugin(self): ''' @summary: Plugin Moduleをロードします @todo: 仕様を考える 選択したモジュールをディレクトリにコピーするのか、モジュールパスを追加するのか 前者がシンプル ''' filename = filedialog.askopenfilename(filetypes = [('Python Script', ('.py', '.pyc')),], initialdir = dirname(__file__)) if filename != "": dist_path = join(self.plugin_path, basename(filename)) if exists(dist_path): # ここで上書き確認 logger.error("Already exists %s" % dist_path) return False shutil.copyfile(filename, dist_path) self.refreshList()
def conf_data(self, key, configFile=None): try: if configFile is None: configFile = self.__conf_config_file config_file = open(configFile, 'r') result = config_file.readlines() result_deal_list = [] key_value_list = [] for i in result: result_deal_list.append(i.strip('\n')) for j in result_deal_list: key_value_list.append(j.split('=')) result_dict = dict(key_value_list) return result_dict[key] except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def get_district_code(): try: with open(DC_PATH) as file: data = file.read() district_list = data.split('\n') for node in district_list: # print node if node[10:11] != ' ': state = node[10:].strip() if node[10:11] == ' ' and node[12:13] != ' ': city = node[12:].strip() if node[10:11] == ' ' and node[12:13] == ' ': district = node[14:].strip() code = node[0:6] code_list.append({"state": state, "city": city, "district": district, "code": code}) except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def ytdycDict(cardno, datetime_str_register, salesamt, datetime_str_buy): try: ytdyc_data = dict() ytdyc_data["card_no"] = cardno ytdyc_data["vipcardno"] = "" ytdyc_data["jointdate"] = datetime_str_register ytdyc_data["openid"] = "" ytdyc_data["points_flag"] = "0" ytdyc_data["score_type"] = "1" ytdyc_data["score"] = "13" ytdyc_data["time"] = datetime_str_buy ytdyc_data["salesamt"] = salesamt ytdyc_data["storecode"] = "1FD151" ytdyc_data["storename"] = u"奢侈品眼镜" return ytdyc_data except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def verify_archive_upload(youtube_id, filename): c_retries_allowed = 5 c_retries = 0 while c_retries < c_retries_allowed: try: request = urllib2.Request("http://s3.us.archive.org/KA-converted-{0}/{1}".format(youtube_id, filename)) request.get_method = lambda: "HEAD" response = urllib2.urlopen(request) return response.code == 200 except urllib2.HTTPError, e: c_retries += 1 if c_retries < c_retries_allowed: logger.error("Error during archive upload verification attempt %s, trying again" % c_retries) else: logger.error("Error during archive upload verification final attempt: %s" % e) time.sleep(10)
def request(self): try: conn = client.HTTPConnection(self.__host, self.__port) if self.__method == "GET": self.path = self.__path + self.__parameters_urlencode_deal conn.request(self.__method, self.__path) if self.__method == "POST": if self.__headers == {"Content-type": "application/json"}: conn.request(self.__method, self.__path, self.__jdata, self.__headers) if self.__headers == {"Content-type": "application/x-www-form-urlencoded"}: conn.request(self.__method, self.__path, self.__data, self.__headers) response = conn.getresponse() result_origin = response.read() try: result = result_origin.decode("gb2312").encode("utf8") except: result = result_origin return result except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def _parse_stream(self, bot): stream = JSONStream(self.token, self.room_id) gen = stream.fetch() for data in gen: process_message = type(data) == dict and ("text" in data) if process_message and ("fromUser" in data): from_user = data["fromUser"]["username"] self.spoken = False bot_input = BotInput() bot_input.message = data["text"] try: bot_input.nick = from_user #bot_input.bot_speaking = from_user.lower().startswith(self.nick.lower()) if from_user.lower().startswith(self.nick.lower()): continue self.user_id = data["fromUser"]["id"] except Exception as e: logger.error(e) self.say(bot.responses["stranger"]) bot_input.bot = bot self.user_nick = bot_input.nick marvin.process(bot_input, self)
def gennerator(): try: global code_list code_list = [] if not code_list: get_district_code() id = code_list[random.randint(0, len(code_list))]['code'] # 地区项 id = id + str(random.randint(1930, 2013)) # 年份项 da = date.today() + timedelta(days=random.randint(1, 366)) # 月份和日期项 id = id + da.strftime('%m%d') id = id + str(random.randint(100, 300)) # ,顺序号简单处理 i = 0 count = 0 weight = [7, 9, 10, 5, 8, 4, 2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2] # 权重项 check_code = {'0': '1', '1': '0', '2': 'X', '3': '9', '4': '8', '5': '7', '6': '6', '7': '5', '8': '5', '9': '3', '10': '2'} # 校验码映射 for i in range(0, len(id)): count = count + int(id[i]) * weight[i] id = id + check_code[str(count % 11)] # 算出校验码 return id except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def updateswitch(switch_ip,dms_location,redis_server): logger.info('start to update switch(%s)' % switch_ip) headers = {'content-type': 'application/json'} switch = {} switch['managementIp'] = switch_ip keys = redis_server.hkeys('locations') location_id = None for key in keys: value = redis_server.hget('locations',key) if value == dms_location: location_id = key if not location_id: print 'can not find corresponding location id' logger.error("cannot find the location id") sys.exit(-1) switch['locationId'] = location_id switch_json = json.dumps(switch) logger.info('request:%s' % switch_json) r = requests.post('%s/switch' % dso_url,data=switch_json,headers=headers) if r.status_code == 201: print 'notify dso to bind location and switch successfully.' else: print 'fail to notify dso to bind location and switch:status_code(%s),content(%s)' % (r.status_code,r.content)
def send_command(self, command): if importError: return None ssh_newkey = 'Are you sure you want to continue connecting' login_choices = [ssh_newkey, 'Password:'******'password:'******'ssh -o ConnectTimeout=10 {user}@{host}'.format(user = self.username, host=self.host) ssh = pexpect.spawn(ssh_cmd) i = ssh.expect(login_choices, timeout=12) if i == 0: ssh.sendline('yes') # Try again :) i = ssh.expect(login_choices) if i == 1 or i == 2: ssh.sendline(self.password) elif i == 3: logger.error("[%s] I either got key problems or connection timeout." % self.host) return None ssh.expect('>', timeout=60) # Ready to send cmd ssh.sendline(command) ssh.expect('</rpc-reply>', timeout=600) # expect end of the XML xml = ssh.before # take everything printed before last expect() ssh.sendline('exit') except pexpect.ExceptionPexpect as e: msg = e.message.splitlines()[0] logger.error('[{}] unable to send command - error: {}'.format(self.host, msg)) return None xml += '</rpc-reply>' # Add the end element as pexpect steals it # Remove the first line in the output which is the command sent # to JunOS. #Remove everything before command xml = self._strip_before(xml,command) try: xmldoc = minidom.parseString(xml) except ExpatError: logger.error('Malformed XML input from %s.' % self.host) print xml return None return xmldoc
def get_host(self): try: return self.__host except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def get_parameters_urlencode_deal(self): try: return self.__parameters_urlencode_deal except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def get_path(self): try: return self.__path except Exception as e: logger.error(e) logger.exception(u"捕获到错误如下:")
def scrapeConversation(self, merge, offset, timestampOffset, chunkSize, limit, isGroupConversation): """Retrieves conversation messages and stores them in a JSON file If merge is specified, the new messages will be merged with the previous version of the conversation, if present """ if merge: if not os.path.exists(self._directory + "conversation.json"): logger.error("Conversation not present. Merge operation not possible") return with open(self._directory + "conversation.json") as conv: convMessages = json.load(conv) numMergedMsgs = 0 if not os.path.exists(self._directory): os.makedirs(self._directory) logger.info("Starting scraping of conversation {}".format(self._convID)) messages = [] msgsData = "" timestamp = "" if timestampOffset == 0 else str(timestampOffset) while self.CONVERSATION_ENDMARK not in msgsData: requestChunkSize = chunkSize if limit <= 0 else min(chunkSize, limit-len(messages)) reqData = self.generateRequestData(offset, timestamp, requestChunkSize, isGroupConversation) logger.info("Retrieving messages " + str(offset) + "-" + str(requestChunkSize+offset)) msgsData = self.executeRequest(reqData) jsonData = json.loads(msgsData) if jsonData and ('payload' in jsonData) and jsonData['payload']: if ('actions' in jsonData['payload']) and jsonData['payload']['actions']: actions = jsonData['payload']['actions'] #case when the last message already present in the conversation #is older newer than the first one of the current retrieved chunk if merge and convMessages[-1]["timestamp"] > actions[0]["timestamp"]: for i, action in enumerate(actions): if convMessages[-1]["timestamp"] == actions[i]["timestamp"]: numMergedMsgs = len(actions[i+1:-1]) + len(messages) messages = convMessages + actions[i+1:-1] + messages break break #We retrieve one message two times, as the first one of the previous chunk #and as the last one of the new one. So we here remove the duplicate, #but only once we already retrieved at least one chunk if len(messages) == 0: messages = actions else: messages = actions[:-1] + messages #update timestamp timestamp = str(actions[0]["timestamp"]) else: if 'errorSummary' in jsonData: logger.error("Response error: " + jsonData['errorSummary']) else: logger.error("Response error. No messages found") logger.error(msgsData) return else: logger.error("Response error. Empty data or payload") logger.error(msgsData) logger.info("Retrying in " + str(self.ERROR_WAIT) + " seconds") time.sleep(self.ERROR_WAIT) continue offset += chunkSize if limit!= 0 and len(messages) >= limit: break time.sleep(self.REQUEST_WAIT) if merge: logger.info("Successfully merged {} new messages".format(numMergedMsgs)) logger.info("Conversation total message count = {}".format(len(messages))) else: logger.info("Conversation scraped successfully. {} messages retrieved".format(len(messages))) self.writeMessages(messages)