def build_vm_status(sn, uri = "qemu:///system"): conn = None try: conn = libvirt.open(uri) except Exception, e: logger.exception(e)
def run(self): while 1: if _error.is_set(): break if _stop.is_set(): logger.debug('Stop signal received by worker.') break try: entry = queue.get(False) task, id = (entry['task'], entry['id']) # Update current task current = entry logger.debug('Starting task %s.%s (%s)' % (entry['task'].module, entry['task'].name, entry['id'][:8])) entry['task']() logger.debug('Completed task (%s)' % entry['id'][:8]) except Empty: break except Exception as e: _error.set() logger.exception(e) finally: current = None
def __get_alpha(self, alpha): # Transparency if alpha == "active_opacity": # For backwards compability alpha = "color5" for i in range(1, 9): if alpha in ("color%s"%i, "opacity%s"%i): if self.globals.colors.has_key("color%s_alpha"%i): a = float(self.globals.colors["color%s_alpha"%i])/255 else: logger.warning("Theme error: The theme has no" + \ " opacity option for color%s." % i) a = 1.0 break else: try: a = float(alpha)/100 if a > 1.0 or a < 0: raise except: logger.exception("Theme error: The opacity attribute of a theme " + \ "command should be a number between \"0\" " + \ " and \"100\" or \"color1\" to \"color8\".") a = 1.0 return a
def registerRequest(self, addr, msg): content = msg["content"] mode = content.get("mode", "sage") if mode in ("sage", "python"): self.waiting[msg["header"]["msg_id"]] = addr if self.kernel is None: # It is highly unlikely that we get a completion request before # the kernel is ready, so we are not going to handle it. logger.exception("completer kernel is not available") self.kernel.session.send(self.kernel.channels["shell"], msg) return match = Completer.name_pattern.search( content["line"][:content["cursor_pos"]]) response = { "channel": "shell", "header": { "msg_id": str(uuid.uuid4()), "username": "", "session": self.kernel.id, "msg_type": "complete_reply" }, "parent_header": msg["header"], "metadata": {}, "content": { "matches": [t for t in tab_completion.get(mode, []) if t.startswith(match.group())], "cursor_start": match.start(), }, } addr.send("complete," + jsonapi.dumps(response))
def get(url, payload=None): payload = payload or {} logger.debug([url, payload]) retries = MAX_RETRY res = None while retries > 0: try: cache = TwitchAPI.caching if cache: res = cache.get(url, payload) if res: return res res = requests.get(url, params=payload, headers=common_headers, verify=False) j = res.json() if cache: cache.set(url, payload, j) if "error" in j and j['error']: raise Exception(j.get("error")) return j except ValueError as e: logger.exception(e) if res: logger.warning(res.text) retries -= 1 if retries <= 0: raise time.sleep(0.3) except Exception as e: logger.exception(e) raise e
def get_entity_score(self, entity): """ Use the number of relations as a measure of importance. """ entity = ensure_prefixed(entity) value = self.entity_scores.get(entity) if value: return value #assert False logger.debug("Entity %s not found in cache", entity) try: result = self.store.query(""" prefix fb: <http://rdf.freebase.com/ns/> SELECT COUNT(*) WHERE { %s ?r ?o . } """ % entity) score = int(result[0][0]) except timeout: logger.exception("Timeout attempting to get count for entity: %s", entity) score = 0 self.entity_scores[entity] = score return score
def run(self): assert self.storage is not None try: self.crawl() except Exception, e: logger.info('error when crawl: %s' % self.user) logger.exception(e)
def connect_names(self, query_entities, target_names): if len(query_entities) == 0: return [] name_query_string = (','.join(['"%s"@en']*len(target_names)) % tuple(target_names)) logger.debug("Name query string: %r", name_query_string) try: all_entities = self.store.query(""" prefix fb: <http://rdf.freebase.com/ns/> SELECT ?r1 WHERE { { ?s ?r1 ?e . ?e fb:type.object.name ?n . FILTER(?n IN (%s)) . FILTER(?s IN (%s)) . } UNION { ?s ?r1 ?e . ?e fb:common.topic.alias ?n . FILTER(?n IN (%s)) . FILTER(?s IN (%s)) . } } """ % (name_query_string, ','.join(query_entities), name_query_string, ','.join(query_entities))) except timeout: logger.exception("Timeout looking for simple connection") all_entities = [] logger.debug("Performing complex search") logger.info("Found %d simple connections", len(all_entities)) for target_name in target_names: try: entities = self.store.query(""" prefix fb: <http://rdf.freebase.com/ns/> SELECT ?r1, ?r2 WHERE { { ?s ?r1 ?o . ?o ?r2 ?e . ?e fb:type.object.name "%s"@en . FILTER(?s IN (%s)) . } UNION { ?s ?r1 ?o . ?o ?r2 ?e . ?e fb:common.topic.alias "%s"@en . FILTER(?s IN (%s)) . } } """ % (target_name, ','.join(query_entities), target_name, ','.join(query_entities))) logger.debug("Search for complex connection: %r", entities) all_entities += entities except timeout: logger.exception("Timeout looking for complex connection with target: %s", target_name) return set(all_entities)
def cancel_discovery(namespace): try: if namespace in browser: browser[namespace].cancel() if namespace in zconf: zconf[namespace].close() except: logger.exception('')
def start_discovery(namespace, callback=None): global listener, zconf, browser try: listener[namespace] = CastListener(namespace=namespace, callback=callback) zconf[namespace] = Zeroconf() browser[namespace] = ServiceBrowser(zconf[namespace], namespace, listener[namespace]) except: logger.exception('')
def reset(self): try: bus = dbus.SessionBus() proxy = bus.get_object("net.launchpad.DockManager.Daemon", "/net/launchpad/DockManager/Daemon") proxy.RestartAll(dbus_interface="net.launchpad.DockManager.Daemon") except: logger.exception("Restarting DockManager Helpers failed.")
def _process_comment_text(self, comment_text): try: for comment_text_processor in all_comment_text_processor: comment_text = comment_text_processor(comment_text) return comment_text except: logger.exception("Error processing comment_text: '%s'" % comment_text) return escape(comment_text)
def query(self, querystring, fields, is_target=True): try: handle = self._target if is_target else self._others cursor = handle.find(querystring, fields) for doc in cursor: yield doc except: logger.exception('mongodb query exception!')
def add_API(API_data, organization): try: add_api_to_db(API_data, organization) response = Response(status=200) except Exception: logger.exception("Error occurred when adding an API") response = Response(status=500) return response
def start(): sn = 'CAS1000000000' try: payload = build_host_hw(sn) logger.debug(json.dumps(payload)) r = requests.post(constants.url_status, data=json.dumps(payload)) except Exception, e: logger.exception(e)
def add_clicked_API(clicked_API, organization): try: add_search_query_to_db(clicked_API, organization) response = Response(status=200) except Exception: logger.exception("Error occurred when adding a clicked API") response = Response(status=500) return response
def _get_action(self): raw_action = send([self._project_name, 'nowa_sesja']) try: action = eval(raw_action) except SyntaxError: logger.exception("Answer from server not in json format: %s", repr(raw_action)) raise return action
def add_search_query(search_query, organization): try: add_search_query_to_db(search_query, organization) response = Response(status=200) except Exception: logger.exception("Error occurred when adding a search query") response = Response(status=500) return response
def update_application(application_data, organization): try: update_application_in_db(application_data, organization) response = Response(status=200) except Exception: logger.exception("Error occurred when updating an application") response = Response(status=500) return response
def add_application(application_data, organization): try: add_application_to_db(application_data, organization) response = Response(status=200) except Exception: logger.exception("Error occurred when adding an application") response = Response(status=500) return response
def inner(self): try: func(self) except: logger.exception('Error!') if is_dev_server: raise else: self.response.write('Error occurred')
def handle(self): try: keep_alive = True while keep_alive: keep_alive = self.handle_http() except Exception as e: self.send_error(500, 'Internal Server Error') logger.exception('Fatal error in handle()') logger.info('Request handled, exiting')
def edit(id, db_session, data, username): logger.info(LogMsg.START, username) # TODO: you never checked version of passed data, we have version field in our # records, to prevent conflict when we received two different edit request # concurrently. check KAVEH codes (edit functions) to better understanding # version field usage logger.debug(LogMsg.EDIT_REQUST, {'discuss_group_id': id, 'data': data}) user = check_user(username, db_session) per_data = {} permissions, presses = get_user_permissions(username, db_session) if is_admin_member(user.person_id, id, db_session): per_data.update({Permissions.IS_OWNER.value: True}) has_permission([Permissions.DISCUSSION_GROUP_PREMIUM], permissions, None, per_data) logger.debug(LogMsg.PERMISSION_VERIFIED, username) if "id" in data.keys(): del data["id"] model_instance = db_session.query(DiscussionGroup).filter( DiscussionGroup.id == id).first() if model_instance: logger.debug(LogMsg.MODEL_GETTING) else: logger.debug(LogMsg.MODEL_GETTING_FAILED, {'discussion_group_id': id}) raise Http_error(404, Message.NOT_FOUND) try: for key, value in data.items(): # TODO if key is valid attribute of class setattr(model_instance, key, value) edit_basic_data(model_instance, username, data.get('tags')) if 'members' in data: members = data.get('members', []) members.append({'person_id': user.person_id, 'type': 'Admin'}) delete_group_members(model_instance.id, db_session, username) logger.debug(LogMsg.DISCUSSION_GROUP_OLD_MEMBERS_DELETED) member_data = {'group_id': model_instance.id, 'members': members} add_disscussuion_members(member_data, db_session, username) logger.debug(LogMsg.DISCUSSION_GROUP_MEMBERS_ADDED, members) result = discuss_group_to_dict(model_instance, db_session) logger.debug(LogMsg.MODEL_ALTERED, result) except: logger.exception(LogMsg.EDIT_FAILED, exc_info=True) raise Http_error(500, Message.EDIT_FAILED) logger.info(LogMsg.END) return result
def clickfortag(self, para): try: para = "" if para is None else para args = para.split(" ") if (len(args) < 2): return [Helper.HELP_NONE("at least one label and one url")] Faver.Tag(args) except Exception as e: logger.exception("[main] tag except:%s", str(e))
def run(self): while 1: try: result = checker() self.queue.put(result) except Exception,e: logger.error("Error happend while check instances.") logger.exception(e) time.sleep(60)
def connect(self): try: self._client = MongoClient('mongodb://{}:{}'.format( MongoConf.host, MongoConf.port), connect=False) self._target = self._client[self.db][MongoConf.target] self._others = self._client[self.db][MongoConf.others] except: logger.exception('connect mongodb failed!')
def delete_application(payload, organization): try: app_id = payload["appid"] delete_application_from_db(app_id, organization) response = Response(status=200) except Exception: logger.exception("Error occurred when deleting an application") response = Response(status=500) return response
def tag(self, para): try: para = "" if para is None else para args = para.split(" ") if (len(args) < 2): return [Helper.HELP_NONE("at least one label and one url")] return [Helper.ShowList("tag %s for url:%s"%(" & ".join(args[:-1]), args[-1]), "click for comfirm", para, "clickfortag")] except Exception as e: logger.exception("[main] tag except:%s", str(e))
def get_domain(self, uri="qemu:///system"): try: conn = libvirt.open(uri) dom = conn.lookupByName(self.name) return dom conn.close() except Exception, e: logger.exception(e) return None
def _handle_truncate(self, text): fl = FbLexer() table_name = fl.extract_table_name(text) try: result, cursor = self._execute(text) except Exception as e: logger.exception(e) mm = MetaManager(table_name) mm.save_delete() print('truncate table finish')
def try_auto_resolve_captcha(self): try: self._try_auto_resolve_captcha() except Exception as e: msg = f"ver {now_version} {self.name} 自动处理验证失败了,出现未捕获的异常,请加群{config().common.qq_group}反馈或自行解决。请手动进行处理验证码" logger.exception(color("fg_bold_red") + msg, exc_info=e) logger.warning( color("fg_bold_cyan") + "如果稳定报错,不妨打开网盘,看看是否有新版本修复了这个问题~") logger.warning( color("fg_bold_cyan") + f"链接:{config().common.netdisk_link}")
def download_by_list(self, data): '''Downloads given songs in parallel''' try: return [self.download_tuple(song) for song in data] # with multiprocessing.Pool(processes=max(len(data) // 2, 2)) as pool: # return pool.map(self.download_tuple, data) except urllib.error.URLError as exc: print('Can not fetch songs') logger.exception(exc) return []
def call_display_marks(self): try: cursor = self.connect._CRUD__db.cursor() result_arg = cursor.callproc('display_marks',[3]) logger.info(result_arg[0]) except: logger.exception("Call Procedure Aborted")
def tracks(self): if not self._tracks: for i in self._link.get_tracks(): try: self._tracks.append(LastSong(i)) except Exception as exc: logger.error('Can not fetch track for album {}'.format(self.name)) logger.exception(exc) return self._tracks
def delete_API(payload, organization): try: tenant = payload['tenant'] api = payload['api_name'] delete_API_from_db(tenant, api, organization) response = Response(status=200) except Exception: logger.exception("Error occurred when deleting an API") response = Response(status=500) return response
def download_as(self, title, artist='', album='', track=0): '''Downloads song and set its tags to given title, artist, album''' logger.debug('in download(' + repr((title, artist, album, track)) + ')') title = strip_unprintable(title.strip()) artist = strip_unprintable(artist.strip()) if artist else '' album = strip_unprintable(album.strip()) if album else '' data = None providers = [vpleer.download] exc = None for download in providers: try: data = download(artist, title) except Exception as exc: logger.exception(exc) else: break else: return '' filename = str(track).zfill(2) + ' - ' + artist + ' - ' + title + '.mp3' logger.debug('Saving to {}'.format(os.path.abspath(filename))) with open(filename, 'wb') as file: file.write(data) file.flush() del data tag = open_tag(filename) if not artist: artist = tag.artist.strip() logger.info("Setting new song's artist to " + artist) elif tag.artist.strip() and tag.artist.strip() != artist: logger.info('Original artist was ' + tag.artist.strip()) if not title: title = tag.title.strip() logger.info("Setting new song's title to " + title) elif tag.title.strip() and tag.title.strip() != title: logger.info('Original title was ' + tag.title.strip()) if not album: album = tag.album.strip() logger.info("Setting new song's album to " + album) elif tag.album.strip() and tag.album.strip() != album: logger.info('Original album was ' + tag.album.strip()) if not track: track = tag.track logger.info("Setting new song's track to " + str(track)) elif tag.track != track: logger.info('Original track was ' + str(tag.track)) tag._frames.clear() tag.title = title tag.artist = artist tag.album = album tag.track = track tag.write() return filename
def update_user_recommendations_db(): """ Processing recommendations periodically with a time interval of 24 hours and storing them in a db so that these pre-processed recommendations can be retrieved whenever needed. """ try: logger.info("Processing periodic user recommendations") process_user_info(SEARCH_DETAILS_VALID_TIME, MINIMUM_SEARCH_QUERIES) logger.info("Periodic user recommendations processed successfully") except Exception: logger.exception("Error when processing periodic user recommendations")
def post(self): try: args = self.parser.parse_args() data = args["data"] if data is not None: self.data['response']['code'] = 0 self.data['response']['text'] = self.ad_mag.predict(data, 10) except Exception as e: logger.exception('ERROR', exc_info=e) finally: return self.data
def stop(self): logger.debug("stopping kernel %s", self.id) if not self.alive: logger.exception("not alive already") return if self._on_stop: self._on_stop() self.stop_hb() for stream in self.channels.itervalues(): stream.close() self._dealer.stop_kernel(self.id)
def run(self): logger.info(f'Run {self.__class__.__name__}') urls = self.urls() for url in urls: if not self.post_exists(url): try: model = self.parse_post(url) self.save_post(model) except Exception as e: logger.exception(e) continue
def _reset_timeout(self, msg): if msg["header"]["msg_type"] == "kernel_timeout": try: timeout = float(msg["content"]["timeout"]) if (not math.isnan(timeout)) and timeout >= 0: if timeout > self.application.km.max_kernel_timeout: timeout = self.application.km.max_kernel_timeout self.kernel["timeout"] = timeout except Exception as e: logger.exception("blanket exception in _reset_timeout: %s", e.message) return False
def run(self): is_client_connected = False while (is_client_connected is not True) and (getattr( self, "do_run", True)): sleep(3) is_client_connected = self.hotspot.check() try: self.connect() except BaseException as e: logger.exception("Error when starting server: %s", e) self.stop()
def try_auto_resolve_captcha(self): try: self._try_auto_resolve_captcha() except Exception as e: msg = f"ver {now_version} 自动处理验证失败了,出现未捕获的异常,请加群966403777反馈或自行解决。请手动进行处理验证码" logger.exception(color("fg_bold_red") + msg, exc_info=e) logger.warning( color("fg_bold_cyan") + "如果稳定报错,不妨打开网盘,看看是否有新版本修复了这个问题~") logger.warning( color("fg_bold_cyan") + "链接:https://fzls.lanzous.com/s/djc-helper")
def run(self): assert self.storage is not None try: self.crawl() self.check_error() if not self.error: self._call_callbacks(self.success_callbacks) except Exception, e: self.check_error(force=True) # raise e logger.info('error when crawl: %s' % self.uid) logger.exception(e)
def get(self, name): e = self.applets[name]["exec"] iname, ext = os.path.splitext(os.path.split(e)[-1]) path = os.path.join(self.applets[name]["dir"], e) try: applet = imp.load_source(iname, path) except: message = "Error: Could not load applet from %s. " % path message += "Could not import the script." logger.exception(message) return return applet
def run(self): while 1: try: resp = http_client(self.url) try: db = shelve.open('instances.dat','c') db['data'] = eval(resp) finally: db.close() except Exception,e: logger.exception(e) time.sleep(300)
def stream_filter(self): """ Start listening based on a list of persons names. """ # add names to stream filter track_list = [data.normalize(p['name']) for p in self.persons] logger.debug('track_list: %s', track_list) print 'track_list: %s' % (track_list) while True: try: self.stream.filter(track=track_list) except (Exception) as e: logger.exception(e) time.sleep(10)
def get_instance_config(): """ get configuration list for instance for checking. """ while 1: try: db = shelve.open('instances.dat','c') instances_config = db.items()[0][1] for i in instances_config: queue_moniting.put(i) queue_notification.put(i) except Exception,e: logger.exception(e) time.sleep(300)
def _on_zmq_reply(self, stream, msg_list): if stream.closed(): return try: idents, msg_list = self.session.feed_identities(msg_list) msg = self.session.unserialize(msg_list) if all([f(msg) is not False for f in self.msg_from_kernel_callbacks]): msg["channel"] = stream.channel self._output_message(msg) except Exception as e: logger.exception("blanket exception in _on_zmq_reply: %s", e.message) if stream.channel == "shell" and self.kill_kernel: self.channels["shell"].flush() self.kernel_died()
def execute(self, query): logger.debug("Executing query: %r", query) best_expressions = self.get_best_expressions(query) entities = self.connector.get_query_entities(query) for expression in best_expressions: try: result_ids = expression.apply(entities, self.connector.related) except Exception: logger.exception("Exception applying expression") result_ids = [] result = set(self.connector.related.get_names(result) for result in result_ids) logger.debug("Searching for best expression, expression: %r, result: %r", expression, result) if len(result) > 0: return result return set()
def local(db='file', folder=None, uids=[]): global give_ups create = create_cookie_file() fetcher = CnFetcher(account, pwd, cookie_file if not create else None) if create: fetcher.login(cookie_filename=cookie_file) while give_ups > 0: while len(tokens) == 0: if give_ups > 0: pass else: return token = tokens.pop() cb = callback(token) if len(uids) == 0: give_ups = 0 else: uid = uids.pop() try: crawler = UserCrawler(uid, is_uid=True, fetcher=fetcher, fetch_fans=False, callbacks=cb, span=False) uid = crawler.uid if db == 'file' and folder is not None: storage = FileStorage(uid, folder) elif db == 'mongo': storage = MongoStorage(uid) else: raise ValueError('db must be "file" or "mongo", ' + 'when is "file", you must define folder parameter.') if storage.crawled: storage.complete() cb() continue else: crawler.set_storage(storage) crawler.start() except Exception, e: cb() # raise e logger.exception(e)
def finish_request(self): try: # in case kernel has already been killed self.application.km.end_session(self.kernel_id) except Exception as e: logger.exception("blanket exception in finish_request: %s", e.message) #statslogger.info(StatMessage(kernel_id = self.kernel_id, '%r SERVICE DONE'%self.kernel_id) retval = self.zmq_handler.streams self.zmq_handler.on_close() # if the timeout is calling the finish_request, the success and other attributes may not be set retval.update(success=getattr(self, 'success', 'abort')) if hasattr(self, 'execute_reply'): retval.update(execute_reply=self.execute_reply) if "Origin" in self.request.headers: self.set_header("Access-Control-Allow-Origin", self.request.headers["Origin"]) self.set_header("Access-Control-Allow-Credentials", "true") self.write(retval) self.finish()
def resolve(self, path, solve_type=2): for x in range(TRIES): try: if solve_type: result = self._client.decode(path, type=solve_type) else: result = self._client.decode(path, 60) if result: logger.info("result form deathbycaptcha {}".format(result)) return result else: logger.warning("None response from deathbycaptcha") except deathbycaptcha.AccessDeniedException: logger.critical("deathbycaptcha.AccessDeniedException") self._deathbycaptcha_client = None except: logger.exception("Error from deathbycaptcha") time.sleep(INTERVAL)
def local(uids=[]): fetcher = CnFetcher() fetcher.login() connection_error = False while len(uids) > 0 or connection_error: if not connection_error: uid = uids.pop() try: crawler = UserCrawler(uid, fetcher) crawler.run() connection_error = False except URLError, e: logger.exception(e) connection_error = True time.sleep(10)
def _replay_boundaries(self): payload = { "video_id": self.id, "start": 0, } res = API.get(RECHAT, payload) if "errors" not in res: raise Exception("Expected chat replay boundary error message.") msg = res.get("errors")[0].get("detail") try: mat = re.match('-?\d+ is not between (\d+) and (\d+)', msg) return tuple(map(int, [mat.group(1), mat.group(2)])) except Exception as e: logger.warning(msg) logger.exception(e) raise
def get_rx_tx(self, itface): try: cmd_rx = "cat /sys/class/net/%s/statistics/rx_bytes" % itface cmd_tx = "cat /sys/class/net/%s/statistics/tx_bytes" % itface data_rx_prev = commands.getoutput(cmd_rx) data_tx_prev = commands.getoutput(cmd_tx) time.sleep(1) data_rx_now = commands.getoutput(cmd_rx) data_tx_now = commands.getoutput(cmd_tx) rx = (float(data_rx_now) - float(data_rx_prev))/1024 rx = ("%.2f" % rx) tx = (float(data_tx_now) - float(data_tx_prev))/1024 tx = ("%.2f" % tx) except Exception, e: logger.exception(e) return None
def __get_color(self, color): if color == "active_color": color = "color5" if color in ["color%s"%i for i in range(1, 9)]: color = self.globals.colors[color] if color == "icon_average": color = self.__get_average_color() else: try: if len(color) != 7: raise ValueError("The string has the wrong lenght") t = int(color[1:], 16) except: logger.exception("Theme error: the color attribute " + "for a theme command"+ \ " should be a six digit hex string eg. \"#FFFFFF\" or"+ \ " the a dockbarx color (\"color1\"-\"color8\").") color = "#000000" return color