def sync_dirty_attributes(queue, loop=True): _l = queue.qsize() if _l > 0: if loop: _times = min(_l, MAX_SYNC_CNT_PER_LOOP) else: _times = _l i = 0 while i < _times: i += 1 try: attr = queue.get_nowait() attr.syncdb() except Queue.Empty: break except: pass log.info('End sync character to db, total: {0}, dirty attributes length: {1}'.format( _times, queue.qsize() )) if loop: reactor.callLater(SYNC_DB_INTERVAL, sync_dirty_attributes, queue) else: log.debug('End sync db, dirty attributes length {0}, loop:{1}'.format( queue.qsize(), loop))
def set_value(self, section, option, value): """ Set configurations. (wrapping method only for web ui) """ self.set(section, option, value) self.flush() log.info('Configuration set request, [{}], {} : {}'.format(section, option, value))
def __broadcast(user_remain, func, args): log.error('================__broadcast user_remain:{0}, func:{1}, args: {2}.'.format(len(user_remain), func, args)) if user_remain: i = 0 while i < MAX_BROADCAST_PER_LOOP: i += 1 _user = user_remain.pop( 0 ) if _user: if hasattr(_user, 'p'): if hasattr(_user.p, 'transport'): if _user.p.transport: _user.p.send(func, args) log.warn('================uid:{0}, func:{1}, args:{2}'.format(_user.uid, func, args)) else: log.warn('__broadcast. uid:{0}, unknown t:{1}.'.format(_user.uid, _user.p.transport)) g_UserMgr.del_zombie_user( _user.uid ) else: log.warn('__broadcast. uid:{0}, the p has no transport attribute..'.format(_user.uid)) g_UserMgr.del_zombie_user( _user.uid ) else: log.warn('__broadcast. uid:{0}, the user has no p attribute..'.format(_user.uid)) g_UserMgr.del_zombie_user( _user.uid ) else: log.info('__broadcast. Unknown user.') if not user_remain: break else: reactor.callLater(1, __broadcast, user_remain, func, args)
def syncdb(self): if self.__dirty: _dirty_fields = self.__dirty_fields[:] if len(_dirty_fields) == 0 and False == self.__del: log.info('no dirty_fields! table name:{0}, attrib_id:{1}.'.format( self.table, self.__attrib_id )) raise defer.returnValue(None) _sql = '' try: if self.__del: yield db.execute('DELETE FROM {0} WHERE id={1};'.format(self.table, self.__attrib_id)) else: _sql, _v = self.__gen_update_value(_dirty_fields) if _v: yield POOL.execute(_sql, _v) else: log.warn('Update error. table: {0}, cid: {1}, sql: {2}, dirty: {3}.'.format(\ self.table, self.__attrib_id, _sql, self.__dirty_fields)) except: log.exception('[ SQLERROR ]table:{0}, id:{1}, dirty:{2}, new:{3}, dirty_fields:{4}, sql:{5}'.format( self.table, self.__attrib_id, self.__dirty, self.__new, self.__dirty_fields, _sql)) else: self.clean()
def send(self, msg): """Send the indicated message over the UDP socket. Not receiver paced like TCP, so no need for thread.""" msg = "%08d %s" % (self.sendSeq, msg) assert len(msg) < Feed.MAX_SIZE log.info(msg) self.sendSeq += 1 self.sendSocket.sendto(msg, (Feed.MCAST_GRP, Feed.MCAST_PORT))
def __load_data__(self, infile): """Override the loading by using the XML importer.""" xmldata = ImarisXML(infile) self.data = xmldata.coordinates('Position') del xmldata log.info('Created %i spots from XML export.\n%s' % (len(self.data), str(self.data)))
def __init__(self, *args): """Initialize som""" log.info("SOM()") args = list(args) args.append(2) self.nodes = np.zeros(args) self.reset()
def main(): urls = [ 'http://www.python.org', 'https://stackoverflow.com/', 'https://css-tricks.com/', 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference', 'https://dev.twitter.com/', 'https://d3js.org/', 'https://www.heroku.com/', 'https://docs.pytest.org/en/latest/', 'https://www.djangoproject.com/', 'https://pudding.cool/', 'https://caniuse.com/', 'http://svgpocketguide.com/book/', 'https://www.w3.org/TR/SVG/intro.html', ] pool = Pool() start = time.time() for x, y in pool.imap_unordered(url_name, urls): index = urls.index(y) log.info("{}s (sleep: {}) (#{} in array) for {})" .format(int(time.time() - start), x, index, y)) pool.close() pool.join()
def reconnectUser(self, p, uid): user = self.all_users.get(uid, None) if user: #if user.session_key != session_key: # log.error('Session not match. old sk: {0}, new sk:{1}.'.format( user.session_key, session_key )) # return RECONNECT_FAIL #if False == user.temp_lost: # log.error('It is not temp lost client. uid:', user.uid) # return CONNECTION_LOSE # check old protocol is valid or not. old_p = user.p user.p = None if old_p: old_p.lose_connect = True if hasattr(old_p, 'uid') and old_p.uid: old_p.uid = 0 if old_p.transport: old_p.transport.loseConnection() user.p = p user.uid = user.uid user.machine_code = user.machine_code user.nickname = user.nickname user.temp_lost = False user.logout_timestamp = 0 log.info('Reconnect ok. uid: {0}, lose_connect: {1}.'.format( p.uid, p.lose_connect )) return NO_ERROR else: log.error('Can not find uid: {0}.'.format( uid )) return CONNECTION_LOSE
def _setConsoleGateway(self, newGateway, vswifName='vswif0', setConfFileOnly=True): '''Set the default network gateway for the Console OS. Arguments: vswifName: name of the vswif. ie, 'vswif0'. If the named vswif hasn't actually been created, ie, there's no vNic with that name, then setConfFileOnly should be set to True. Using an empty string results in no GATEWAYDEV being set in /etc/sysconfig/network setConfFileOnly: setting to False will result in vmkctl trying to bring up the new gateway, which results in a call to `/sbin/ip route replace ...` ''' # TODO: I am trusting here that it has been previously sanity-checked # perhaps I should be less trusting routeInfo = vmkctl.RoutingInfoImpl() oldGateway = self._getConsoleGateway() if oldGateway and oldGateway not in [self.DEFAULT_GATEWAY, newGateway]: log.info('Changing gateway from %s to %s' % (oldGateway, newGateway)) else: log.info('Setting gateway to %s' % newGateway) self._desiredGateway = newGateway if setConfFileOnly: self._vmkctlKnowsDesiredGateway = False vmkctlGateway = vmkctl.Ipv4Address(newGateway) routeInfo.SetConsoleDefaultGateway(vmkctlGateway, vswifName, setConfFileOnly) self._verifyConsoleGatewaySaved(newGateway)
def reconnectUser(self, p, cid, session_key): user = self.user_dic.get(cid, None) if user: if user.session_key != session_key: log.error('Session not match. old sk: {0}, new sk:{1}.'.format( user.session_key, session_key )) return RECONNECT_FAIL #if False == user.temp_lost: # log.error('It is not temp lost client. cid:', user.cid) # return CONNECTION_LOSE # check old protocol is valid or not. old_p = user.p user.p = None if old_p: old_p.lose_connect = True if hasattr(old_p, 'cid') and old_p.cid: old_p.cid = 0 if old_p.transport: old_p.transport.loseConnection() user.p = p user.temp_lost = False user.logout_timestamp = 0 p.cid = user.cid p.account = user.account p.session_key = user.session_key log.info('Reconnect ok. cid: {0}, lose_connect: {1}.'.format( p.cid, p.lose_connect )) return NO_ERROR else: log.error('Can not find cid: {0}.'.format( cid )) return CONNECTION_LOSE
def raid_boss(bot, update): user = update.message.from_user log.info('Рейд босс: {} ({})'.format(update.message.text,user.username)) update.message.reply_text('Босс: {}\nОтправьте местоположение стадиона с рейдом.'.format(update.message.text), reply_markup=ReplyKeyboardRemove()) return LOCATION
def load_all_routes_dev(root, module): _imported = [] for f in listdir(root + module): if f.startswith('.') or f.startswith('_'): continue _subfix = '' if f.find('.pyc') > 0: _subfix = '.pyc' elif f.find('.pyo') > 0: _subfix = '.pyo' elif f.find('.py') > 0: _subfix = '.py' else: continue fname, _ = f.rsplit(_subfix, 1) if fname and fname not in _imported: _handlers_name = '%s.%s' % (module, fname) try: __import__(_handlers_name) except Exception as e: log.error('[ load_all_handlers ]Error when load handler: root:{0}, f:{1}, handler:{2}, Detail:{3}'.format( root, f, _handlers_name, e)) traceback.print_exc() raise e break _imported.append(fname) log.info('All handler loaded: {0}'.format(_imported))
def gen_pshelper(info, tgt): """Generate text files with host lists for the PowerShell scripts. Will create (or use) a directory structure in the given 'tgt' directory. By default it will create one file for each host, using the *first* host alias as file name, containing the host's DNS hostname. In addition, files will be created for *each* host group, containing all the DNS hostnames of those hosts belonging to the group. """ if tgt is None: tgt = '/tmp/pshelper' log.warn("No output directory specified, trying fallback '%s'.", tgt) if not os.path.exists(tgt): try: os.makedirs(tgt) except Exception as err: sys.exit("ERROR creating output directory: %s" % err) for hostname, details in info.hosts.iteritems(): # the host's primary alias: alias = details['aliases'][0] log.info("Host: %s (%s)", alias, hostname) list_to_file(tgt, alias, [hostname]) for groupname in info.groups.keys(): log.info("Group: %s", groupname) list_to_file(tgt, groupname, info.groups[groupname])
def not_info(bot, update): user = update.message.from_user log.info('Затупил с описанием рейда: {} ({})'.format(update.message.text, user.username)) update.message.reply_text("Инфа о рейде: {}".format(update.message.text)) return INFO
def remove_target(self, *location): location = self._get_host_port(*location) if not location or not location in self.targets: return if verbose: log.info("OSC removing target %s:%s" % location) del self.targets[location]
def _computeEsxConfChecksum(): m = md5.new() m.update(open(ESXCONF_FILE).read()) retval = m.hexdigest() log.info("digest of initrd esx.conf -- %s" % retval) return retval
def reload_plugins(init=False): plugins_folder = [os.path.join(os.getcwd(), 'plugins')] plugins = set(glob.glob(os.path.join("plugins", "*.py"))) for plugin in plugins: _plugin = os.path.join(os.getcwd(), plugin) mtime = os.stat(_plugin).st_mtime if mtime != mtimes.get(_plugin): mtimes[_plugin] = mtime try: moduleinfo = imp.find_module(plugin.split("/")[1].split(".")[0], plugins_folder) pl = imp.load_source(plugin, moduleinfo[1]) except ImportError as e: if str(e).startswith('No module named'): log.error('Failed to load plugin %r: the plugin could not be found.', plugin) else: log.error('Failed to load plugin %r: import error %s', plugin, str(e)) if init: sys.exit(1) except BaseException as e: log.error(e) pass else: if hasattr(pl, 'main'): for server in utils.connections.values(): pl.main(server) log.debug('%r Calling main() function of plugin %r', server.netname, pl) log.info("(Re)Loaded %s", _plugin)
def _parse_cells(self, ws_name): """Parse the cell-contents of a worksheet into a 2D array. After parsing the contents, they are added to the global map 'cells' using the worksheet name as the key. Parameters ---------- ws_name : string The name of the worksheet to process. """ rows = self._worksheet(ws_name).findall('.//{%s}Row' % self.namespace) cells = [] for row in rows: content = [] # check if this is a header row: style_att = '{%s}StyleID' % self.namespace if style_att in row.attrib: # we don't process the header row, so skip it continue for cell in row: content.append(cell[0].text) log.debug('length of row: %i' % len(row)) log.debug(content) cells.append(content) self.cells[ws_name] = cells log.debug("--- cells ---\n%s\n--- cells ---" % self.cells) log.info("Parsed rows: %i" % len(self.cells))
def __init__(self, parent, id = wx.ID_ANY, size = wx.DefaultSize, style=wx.SUNKEN_BORDER|wx.NO_FULL_REPAINT_ON_RESIZE): NC.NavCanvas.__init__(self, parent, id=id, size=size, ProjectionFun=None,Debug=0,BackgroundColor = "WHITE") #self.Canvas = NC.NavCanvas self.RadioObjectArray = [] self.EdgeObjectArray = [] self.PixelPerMeter = 100 self.buffer = None self.selection = [] # an array to hold all the objects currently selected by the user using a rubber band, and/or shift-clicking, and/or ctrl-clicking log.info("Init RadioPanel") # Add the Canvas #self.Canvas = NC.NavCanvas(self, parent=parent, id=id, size=size,ProjectionFun=None,Debug=0,BackgroundColor = "WHITE",**kwargs).Canvas # initialize a double-buffer memory block #self._initBuffer() self.SetThemeEnabled(True) tb = self.ToolBar OptimizeButton = wx.Button(tb, label="Optimize Positions") tb.AddSeparator() tb.AddControl(OptimizeButton) OptimizeButton.Bind(wx.EVT_BUTTON, self.OnOptimizePositions) PauseButton = wx.Button(tb, label="Pause Packets") tb.AddSeparator() tb.AddControl(PauseButton) PauseButton.Bind(wx.EVT_BUTTON, self.OnPausePackets) tb.Realize() #self.Bind(wx.EVT_PAINT, self.OnPaint) #self.Bind(FC.EVT_MOTION, self.OnMove ) # shouldn't spawn double OnMove events for panel adn frame self.Bind(FC.EVT_LEFT_UP, self.OnLeftUp ) self.N = 0 #self.InitializeRadios() #self.NewEdges() self.GhostPoints = None self.MovingGhostPoints = None self.Moving = False self.AutoMoving = False
def closeConnections(conn): #close connections try: conn.close() log.info('Connection closed') except: log.info('Connection close failed')
def outPdHandler(meta, config): message, service_alias, incident_key = meta[1:] log.info("Event Match: %s" % message) service_key = config['pagerduty'][service_alias] url = "https://events.pagerduty.com/generic/2010-04-15/create_event.json" alert = { "event_type": "trigger", "service_key": service_key, "description": "occam_alert", "incident_key": "", "details": {} } # Append whole message as PD alert details. alert['details'] = json.dumps(message) # Create incident_key if provided. if incident_key: alert['incident_key'] = alert['description'] = incident_key # Ship. resp = requests.post(url, data=json.dumps(alert)) if resp.status_code != 200: log.warn("Error sending to PagerDuty: %s" % resp.content.decode('utf-8')) else: log.info("Message sent to PagerDuty: %s" % resp.content.decode('utf-8'))
def reload_plugins(irc, init=False): plugins_folder = [os.path.join(os.getcwd(), 'plugins')] plugins = set(glob.glob(os.path.join("plugins", "*.py"))) for plugin in plugins: _plugin = os.path.join(os.getcwd(), plugin) mtime = os.stat(_plugin).st_mtime if mtime != mtimes.get(_plugin): mtimes[_plugin] = mtime try: moduleinfo = imp.find_module(plugin.split("/")[1].split(".")[0], plugins_folder) pl = imp.load_source(plugin, moduleinfo[1]) except ImportError as e: if str(e).startswith('No module named'): log.error("Failed to load plugin {}: the plugin could not be found.".format(plugin)) else: log.error("Failed to load plugin {}: import error {}".format(plugin, str(e))) if init: sys.exit(1) except BaseException as e: log.error(e) else: if hasattr(pl, 'main'): pl.main(irc) log.debug("Calling main() function of plugin {}".format(pl)) try: if pl.name in utils.plugins.keys(): del(utils.plugins[pl.name]) utils.plugins[pl.name] = pl.cmds irc.state["plugins"][pl.name] = {} except AttributeError: pass log.info("(Re)Loaded {}".format(_plugin))
def hostActionRemoveVmdk(_context): vmdkLocation = userchoices.getExistingVmdkLocation().get('vmdkLocation') if vmdkLocation: log.info("Removing existing vmdk") #shutil.rmtree(os.path.split(vmdkLocation)[0]) devices.removeVmdkFile(vmdkLocation)
def sync_dirty_attributes(queue, loop=True): qsize = queue.qsize() if qsize > 0: log.info("sync data to db. dirty attrib length:%s." % qsize) if loop: sync_cnt = min(qsize, MAX_SYNC_CNT_PER_LOOP) else: sync_cnt = qsize i = 0 while i < sync_cnt: i += 1 try: attrib = queue.get_nowait() yield attrib.syncdb() except Queue.Empty: break except: pass if loop: reactor.callLater(SYNC_DB_INTERVAL, sync_dirty_attributes, queue) else: log.debug('End sync db, dirty attributes length {0}, loop:{1}'.format( queue.qsize(), loop))
def _deploy(squadron_dir, new_dir, last_dir, commit_info, this_run_sum, last_run_sum, last_commit, dont_rollback, resources, force): log.info("Applying changes") log.debug("Changes: %s", commit_info) commit.commit(commit_info) paths_changed, new_paths = _get_hash_diff(last_run_sum, this_run_sum, force) log.debug("Paths changed: %s", paths_changed) log.debug("New paths: %s", new_paths) _run_actions(squadron_dir, new_dir, commit_info, resources, paths_changed, new_paths) # Now test try: _run_tests(squadron_dir, commit_info) except TestException: # Roll back if last_commit and not dont_rollback: log.error("Rolling back to %s because tests failed", last_commit) # Flip around the paths changed and new_paths _deploy(squadron_dir, last_dir, None, last_commit, last_run_sum, {}, None, dont_rollback, resources, False) raise
def cancel_raid(bot, update): user = update.message.from_user log.info("Создание рейда отменено ({})".format(user.first_name)) update.message.reply_text('Создание рейда отменено.', reply_markup=ReplyKeyboardRemove()) return ConversationHandler.END
def remove(self, ipAddress): dnsConfig = _getDnsConfigImpl() if ipAddress not in self: return ValueError('NameServerCollection.remove(x): x not present') log.info('Removing nameserver %s' % ipAddress) dnsConfig.RemoveNameServer(vmkctl.Ipv4Address(ipAddress)) self._save()
def learn(self, samples, epochs=10000, sigma=(10, 0.001), lrate=(0.5, 0.005)): """Learn samples""" log.info("SOM.learn") learn_start = time.clock() sigma_i, sigma_f = sigma lrate_i, lrate_f = lrate for i in range(epochs): # adjust learning rate and neighborhood t = i / float(epochs) lrate = lrate_i * (lrate_f / float(lrate_i))**t sigma = sigma_i * (sigma_f / float(sigma_i))**t # get random sample index = np.random.randint(0, samples.shape[0]) data = samples[index] # get index of nearest node (minimum distance) D = ((self.nodes - data)**2).sum(axis=-1) winner = np.unravel_index(np.argmin(D), D.shape) # generate a Gaussian centered on winner G = self._gaussian(D.shape, winner, sigma) G = np.nan_to_num(G) # move nodes towards sample according to Gaussian delta = self.nodes - data for i in range(self.nodes.shape[-1]): self.nodes[...,i] -= lrate * G * delta[...,i] learn_end = time.clock() - learn_start log.info("--> completed in %fs" % learn_end)
def __init__(self, name, size=5500, imagePath='', imageName='', physicalDeviceName=None, vmfsVolume=None): # XXX isinstance(str) is not py3k compliant. assert physicalDeviceName is None or isinstance(physicalDeviceName, str) DiskDev.__init__(self, name, size=size, deviceExists=False, probePartitions=False, sectorSize=1, sizeUnit='MB') self.imagePath = imagePath if imageName: self.imageName = imageName else: self.imageName = DEFAULT_COS_IMAGE self.physicalDeviceName = physicalDeviceName self.vmfsVolume = vmfsVolume self.stable = True if not self.imagePath: # The default vmdk path includes the system UUID so it will be # unique on shared storage. self.imagePath = \ fsset.vmfs3FileSystem.systemUniqueName('esxconsole') log.info("creating virtualdisk %s/%s" % (self.imagePath, self.imageName)) else: # XXX - do we want to raise something here if ValueError is # raised? pass
def send_raw(self, raw): if not self.enable: # log.error(f"Beacon is disabled") return if self.last_msg_raw is None or raw != self.last_msg_raw: # log.info(f"To serial: {signal.encode()}") self.ser.write(raw) self.last_msg_raw = raw return raw else: log.info(f"Duplicated message") return ""
def ntpQueryStart(server): global _openSocket task_progress.taskStarted('ntp') carefullyCloseSocket() _openSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) task_progress.taskProgress('ntp') try: _openSocket.connect((server, NTP_PORT)) _openSocket.send(NTP_ASK_MSG) except (socket.error, socket.gaierror), ex: log.info('Socket error sending to NTP server ' + str(ex)) raise NTPError('Socket error ' + str(ex))
def Capture(): try: a, x1, y1, x2, y2 = dm.GetClientRect(hwnd) except: log.info("截图时错误") print("请保持雷电模拟器前台") # print( x1, y1, x2, y2) # 笔记本屏幕缩放125% 所以坐标乘以1.25 # x1, y1, x2, y2 = int(x1 * 1.25), int(y1 * 1.25), int(x2 * 1.25), int(y2 * 1.25) log.info("截图成功") dm.Capture(x1, y1, x2, y2, "photo/now.bmp")
def add_route(app, fn): method = getattr(fn, '__method__', None) path = getattr(fn, '__route__', None) if path is None or method is None: raise ValueError('@get or @post not defined in %s.' % str(fn)) if not asyncio.iscoroutinefunction(fn) and not inspect.isgeneratorfunction( fn): fn = asyncio.coroutine(fn) log.info('add route %s %s => %s(%s)' % (method, path, fn.__name__, ', '.join( inspect.signature(fn).parameters.keys()))) app.router.add_route(method, path, RequestHandler(app, fn))
def _get_script(self) -> Optional[bytes]: methods: List[Tuple[str, Function[bytes]]] = [ ('local devserver', self._get_script_devserver), ('local build', self._get_script_build), ('upstream', self._get_script_upstream), ] for name, method in methods: script = method() if script: log.info(f'Got userscript from {name}.') return script return None
def get_fallow_transfer(cls, member) -> list: fallow_trans = [] trans_pk = cls.__MPFTPK.get(member['id']) if trans_pk: for i in trans_pk: trans = cls.__FALLOW_TRANSFPER.get(i) crop = cls.__CROP[trans['crop']]['name'] fallow_trans.append( [crop, str(trans['subsidy']), trans['period']]) log.info('id=', member['id'], ', appid=', member['app_id'], ', fallow_transfer=', fallow_trans) return fallow_trans
def AllianceConnectionMade(self, p): if p is None: reactor.stop() else: p.setTimeout(None) res = yield p.call('registersrv', 'gs') log.info( 'registedAlliance: result: {0}, me: {1}, peer: {2}.'.format( res, p.transport.getHost(), p.transport.getPeer())) if res[0] == 0: protocol_manager.set_server('alli', p)
def __init__(self) -> None: webpack_config = _load_webpack_config() filename = Path(ParsedURL(*parse_url(webpack_config.upstream_url)).path.decode('utf-8')).name self.dev_server_url = unparse_url('http', webpack_config.dev_server_host, webpack_config.dev_server_port, f'/{filename}') self.build_path = webpack_config.build_dir / filename self.upstream_url = webpack_config.upstream_url log.info(f'Userscript sources (in order): {self.dev_server_url}, {self.build_path}, {self.upstream_url}') self.filter: flowfilter.TFilter = None self._upstream_response_cache: Optional[str] = None self._upstream_response_cache_timeout: datetime = datetime.now() - UPSTREAM_CACHE_TIMEOUT
def openfile(self, filename): if os.path.exists(filename) is True: try: stream = open(filename, 'r') log.info('Opened RheaFlow config file:%s', filename) except IOError as e: raise Exception("Could not open %s", filename) sys.exit(e.errno) else: log.error('Could not find %s', filename) sys.exit(1) return stream
def get_centerxy(self,condition,xiangsi=0.9): if condition: list = dm.FindPic( scereen[1], scereen[2],scereen[3], scereen[4], self.path, "000000", xiangsi, 0) if list[1]>=0 and list[2]>=0 and list[0]>=0: #区域中心 在屏幕上的位置 self.x, self.y =list[1],list[2] # 区域的半径 # self.r = 0 log.info("发现 %s"%self.name) return 1 else : return 0
def send(self, signal): if not self.enable: # log.error(f"Beacon is disabled") return if self.last_msg is None or signal != self.last_msg: # log.info(f"To serial: {signal.encode()}") self.ser.write(signal.encode()) self.last_msg = signal return signal else: log.info(f"Duplicated message") return ""
def main(): log.info('Бот запущен.') updater = Updater(token=config.token) dp = updater.dispatcher dp.add_handler(CommandHandler('start', start)) dp.add_handler(CommandHandler('off', off)) dp.add_handler(CommandHandler('help', help)) all_boss_list = '^(' + regexp_all_boss() + ')$' # Хэндлер диалога по рейду raid_conv_handler = ConversationHandler( entry_points=[CommandHandler('raid', new_raid)], states={ LEVEL: [RegexHandler('^(1️⃣)$', raid_1), RegexHandler('^(2️⃣)$', raid_2), RegexHandler('^(3️⃣)$', raid_3), RegexHandler('^(4️⃣)$', raid_4), RegexHandler('^(5️⃣)$', raid_5), MessageHandler(Filters.all, not_raid_level)], BOSS: [RegexHandler(all_boss_list, raid_boss), MessageHandler(Filters.all, not_boss)], LOCATION: [MessageHandler(Filters.location, location), MessageHandler(Filters.all, not_location)], INFO: [MessageHandler(Filters.text, raid_info), MessageHandler(Filters.all, not_info)], TIME: [RegexHandler('^([0-1]:[0-5]\d)$', raid_time), MessageHandler(Filters.all, not_time)], CHECK: [RegexHandler('^(✅)$', raid_done), RegexHandler('^(❌)$', cancel_raid), MessageHandler(Filters.all, not_done)] }, fallbacks=[CommandHandler('cancel', cancel_raid)] ) dp.add_handler(raid_conv_handler) dp.add_handler(MessageHandler(Filters.text, other)) dp.add_handler(MessageHandler(Filters.command, unknown)) # Запускаем бота updater.start_polling() updater.idle()
def lex(self): # Get lexer for language (use text as fallback) try: if self.language and unicode(self.language).lower() <> 'none': lexer = get_lexer_by_name(self.language) else: lexer = get_lexer_by_name('text') except ValueError: log.info("no pygments lexer for %s, using 'text'" \ % self.language) # what happens if pygment isn't present ? lexer = get_lexer_by_name('text') return pygments.lex(self.code, lexer)
def show_local(filename='slides.md'): try: filename = 'markdown/' + filename log.info('Loading file: %s', filename) slides = _local_slides(filename) theme = request.args.get('theme', 'monokai') highlight = request.args.get('highlight', 'remark') return render_template('localshow.html', slides=slides, theme=theme, highlight=highlight) except ApiNotFoundError, e: log.error(e.response)
def _processLinkADRAns(self, device, command): """Process a link ADR answer Returns three ACKS: power_ack, datarate_ack, channelmask_ack Args: device (Device): Sending device command (LinkADRAns): LinkADRAns object """ # Not much to do here - we will know if the device had changed datarate via # the rxpk field. log.info("Received LinkADRAns from device {devaddr}", devaddr=devaddrString(device.devaddr))
def append(self, ipAddress, deferSaving=False): '''Works like list.append. Optionally use deferSaving when doing "batches" ''' # TODO: I am trusting here that ipAddress has been previously # sanity-checked perhaps I should be less trusting if ipAddress in self: return #don't add duplicate name servers log.info('Adding nameserver %s' % ipAddress) dnsConfig = _getDnsConfigImpl() dnsConfig.AddNameServer(vmkctl.Ipv4Address(ipAddress)) if not deferSaving: self._save()
def _exec(irc, source, args): """<code> Admin-only. Executes <code> in the current PyLink instance. \x02**WARNING: THIS CAN BE DANGEROUS IF USED IMPROPERLY!**\x02""" utils.checkAuthenticated(irc, source, allowOper=False) args = ' '.join(args) if not args.strip(): irc.reply('No code entered!') return log.info('(%s) Executing %r for %s', irc.name, args, utils.getHostmask(irc, source)) exec(args, globals(), locals())
def __iter__(self): """parse code string and yield "clasified" tokens """ try: tokens = self.lex() except IOError: log.info("Pygments lexer not found, using fallback") # TODO: write message to INFO yield ('', self.code) return for ttype, value in self.join(tokens): yield (_get_ttype_class(ttype), value)
def update_versions_all(target_folder, spring_boot_version): external_dependency_version_map = {} # Read artifact version from dependency_file. dependency_file = get_spring_boot_managed_external_dependencies_file( spring_boot_version) log.info('external_dependency_file=' + dependency_file) load_version_map_from_file(dependency_file, external_dependency_version_map) for root, _, files in os.walk(target_folder): for file_name in files: file_path = root + os.sep + file_name if file_name.startswith('pom') and file_name.endswith('.xml'): update_versions(external_dependency_version_map, file_path)
def manage_execution(decision, default_court='STJ', year=None): """ decision should be file containing decision text File name is expected as: YYYYMMDD_CLASSE_NUMERO_ID, where the date refers to decision publishing """ with open(decision, 'r') as d: title = os.path.basename(d.name) pipeline_id = helpers.parse_title_for_uniqueness(title) output = submit(d, id_=pipeline_id, default_court=default_court) if output['i_cite']: D.update({title: output['i_cite']}) log.info('precedentes extracted for %s' % title) return D
def _eval(irc, source, args): """<Python expression> Admin-only. Evaluates the given Python expression and returns the result. \x02**WARNING: THIS CAN BE DANGEROUS IF USED IMPROPERLY!**\x02""" utils.checkAuthenticated(irc, source, allowOper=False) args = ' '.join(args) if not args.strip(): irc.reply('No code entered!') return log.info('(%s) Evaluating %r for %s', irc.name, args, utils.getHostmask(irc, source)) irc.reply(eval(args))
def start(self): #content.cleanup() #content.buildup() for path in self.paths: result = content.scan(path) self.notifier.start() self.running = True log.info('Monitor started.') self.timer.start()
def consumer_callback(self, channel, method, properties, body): self.__number_of_msg += 1 if self.__number_of_msg == 1: self.__first_msg_time = time.time() self.ack_msg(method.delivery_tag) if str(body) == "quit": self.__quit_time = time.time() log.info( "Consumer: %s quit at %f, Number of Msg: %d" % (self.__queue_name, self.__quit_time, self.__number_of_msg)) self.__channel.basic_cancel(consumer_tag=self.__queue_name) self.__channel.stop_consuming() self.__stop_consuming = True
def __search_dublicate(inn, date): result_list = [] conn = pyodbc.connect(config.db_connect) duble = conn.cursor() duble.execute("""SELECT Name,INN,IP,Date,Phone FROM ibank_delta.dbo.Sessions where inn = '{inn}' and date = '{date}'""".format(inn=inn, date=date)) for row in duble.fetchall(): result_list.append(row[1]) conn.close() log.info(result_list) return result_list
async def auth(request): log.info('检查用户: %s %s' % (request.method, request.path)) request.__user__ = None cookie_str = request.cookies.get(COOKIE_NAME) if cookie_str: user = await cookie2user(cookie_str) if user: log.info('设置当前用户: %s' % user.phone) request.__user__ = user if request.path.startswith('/manage/') and ( request.__user__ is None or not request.__user__.admin): return web.HTTPFound('/login') return (await handler(request))
def mk_snap(self, image, snap, vm=None): snap = self.__esc(snap) Log.info('Creating snapshot %s@%s .. ' % (image, snap)) if vm is None: self('snap', 'create', '--snap', snap, image) return try: self('snap', 'create', '--snap', snap, image) except Exception as e: raise e
def extract_agrstat_official_info(key, url) -> None: """ 將關鍵字與網址傳進來,一次創建所有關鍵字的列表, 將網頁解析後取出網頁的關鍵字跟關鍵字列表比對,若存在就刪除直到關鍵字列表個數為零 同時判斷若以解析到最後一頁要結束迴圈 :param key: 關鍵字 :param url: 網址,解析用 :return: None """ kws_d[key] = '' if len(kws_d) == agroff.len(): creator = agroff() driver = get_web_driver() driver.get(url) while True: if len(kws_d) == 0: driver.quit() break try: element, soup = get_html_element(agroff.tag('tr_row1'), agroff.tag('tr_row2'), page_source=driver.page_source, return_soup=True) kw_list = LAMBDA_DICT['kw_list'](element) for k in kw_list: if k in kws_d.keys(): log.info('find ', k, ' at page ', creator.page, unpacking=False) del kws_d[k] # 取得最後一個 td tag 的文字,用來判斷是否為最後一頁或者是更多頁面 flag = LAMBDA_DICT['specified_element_text'](soup(agroff.tag('td')), -1) if flag == '...': if creator.page.endswith('0'): driver.find_element_by_xpath(creator.tag('more_page')).click() creator.page = 1 continue else: if creator.page == flag: driver.quit() if len(kws_d) != 0: err_log.warning('not found keyword: ', kws_d.keys(), unpacking=False) print('Page end, ', creator.page, 'pages') break creator.page = 1 driver.find_element_by_xpath(creator.tag('page').format(creator.page)).click() driver.implicitly_wait(3) except Exception: driver.quit() t, v, tr = sys.exc_info() err_log.error('error occurred.\t', t, '\t', v) break
def reload(self, topic_id): #双缓冲方式,减少数据冲突点 topic_dict_copy = dict(self.topic_dict) try: #Topic id cannot be 0 ??? topic_id = int(topic_id) if topic_id <= 0: topic_id = None except Exception as e: topic_id = None db_topic_res = [] mysql_conn = get_mysql_conn(self.conf) sql = 'select `id`, `name`, `description`, `table_name`, `schema`, `utime`, `ctime`, `primary_keys` from topic' if topic_id is not None: sql = "%s where id = %d;" % (sql, topic_id) db_topic_res = mysql_fetch(mysql_conn, sql, return_dict=True) #We requested a topic id but db returned nothing, means this topic(s) is non-existent if topic_id and len(db_topic_res) == 0 and topic_dict_copy.has_key( topic_id): del topic_dict_copy[topic_id] change_topics = [] for topic_info in db_topic_res: try: # 将record元组转换为dict. (利用cur.description里面的字段名) # 解析Schema对象 try: topic_info['schema_obj'] = json.loads(topic_info['schema']) except Exception, e: topic_info['schema_obj'] = None log.warning( 'schema parse failed[%s], topic[%s] is not writable!' % (str(e), topic_info['name'].encode('utf8'))) # 给一个默认的db_name. # TODO: 将来把db_name也存放在mysql表中, 使得不同的topic可以存在不同的db里面去 # topic_info['db_name'] = conf.MONGODB['default_db'] # 拿到collection # db_name = topic_info['db_name'] table_name = topic_info['table_name'] topic_info['collection'] = table_name # 加入self.topic_dict topic_dict_copy[topic_info['id']] = topic_info change_topics.append(topic_info['id']) log.info( 'TopicManager loaded: topic.id[%s], topic.name[%s], table_name[%s] loaded!' % (topic_info['id'], topic_info['name'].encode('utf8'), table_name.encode('utf8'))) except Exception as e: log.info("failed reload schema {}".format(topic_info['id'])) log.error(str(traceback.format_exc()))
def start_job(s): s.cancel_job() log.info("PR %s: queueing build of commit %s", s.url, s.head) env = { "CI_PULL_COMMIT": s.head, "CI_PULL_REPO": s.repo, "CI_PULL_BRANCH": s.branch, "CI_PULL_NR": str(s.nr), "CI_PULL_URL": s.url, "CI_PULL_TITLE": s.title, "CI_PULL_USER": s.user, "CI_BASE_REPO": s.base_repo, "CI_BASE_BRANCH": s.base_branch, "CI_BASE_COMMIT": s.base_commit, "CI_SCRIPTS_DIR": config.scripts_dir, "CI_PULL_LABELS": ";".join(sorted(list(s.labels))), "CI_BUILD_HTTP_ROOT": os.path.join(config.http_root, s.base_full_name, str(s.nr), s.head), } if s.mergeable: env["CI_MERGE_COMMIT"] = s.merge_commit for key, value in env.items(): if not value: log.warning("PR %s: env %s has NoneType!", s.url, key) return s s.current_job = Job(s.get_job_path(s.head), os.path.join(config.scripts_dir, "build.sh"), env, s.job_hook, s.head) s.jobs.append(s.current_job) queue.put(s.current_job) s.current_job.set_state(JobState.queued) return s
def delete_dependency_version(file_path): log.info("delete dependency version in " + file_path) with open(file_path, 'r', encoding='utf-8') as pom_file: lines = pom_file.readlines() with open(file_path, 'w', encoding='utf-8') as new_pom_file: for line in lines: if ';external_dependency} -->' not in line: new_pom_file.write(line) elif line.split(";")[1] in IGNORED_ARTIFACTS: new_pom_file.write(line) elif line.split( ";")[1] not in external_dependencies_managed_list(): # listed in external-dependencies.txt but not managed by spring new_pom_file.write(line)