def doTest1(bones): try: fp = open(thePoseBoneFile) except: log.warning("Did not find %s", thePoseBoneFile) return readBones = [] setBones = [] for line in fp: words = line.split() if len(words) == 0: continue elif len(words) == 1: bone = bones[words[0]] readBones.append((bone,[])) elif len(words) == 4: bone = bones[words[0]] coords = (float(words[1]), float(words[2]), float(words[3])) setBones.append((bone, coords)) fp.close() for bone, coords in setBones: bone.setRotationIndex(1, coords[0], False) bone.setRotationIndex(2, coords[1], False) bone.setRotationIndex(3, coords[2], False) #bone.setRotation(coords) angles = bone.getRotation() return setBones+readBones
def eye(self, options = None): if self.myEye == None: self.myEye = Eye.Eye.getInstance() if options != None and len(options) >=2: width, height = options[:2] else: width = height = None self.send_response(200) self.send_header('Pragma', 'no-cache') self.send_header('Cache-Control', 'no-cache') self.send_header('Content-Encoding', 'identify') self.send_header('Content-Type', 'multipart/x-mixed-replace;boundary=jpgboundary') self.end_headers() try: while Brain.switch: stream = self.myEye.getStream(width, height) self.send_header('Content-type','image/jpeg') self.send_header('Content-length', str(len(stream))) self.end_headers() self.wfile.write(stream) self.wfile.write('--jpgboundary\r\n') self.send_response(200) except IOError as e: if hasattr(e, 'errno') and e.errno == 32: log.warning('Error: broken pipe') self.rfile.close() return else: raise e
def loadProxy(human, path, type="Clothes"): try: npzpath = os.path.splitext(path)[0] + '.mhpxy' try: if not os.path.isfile(npzpath): log.message('compiled proxy file missing: %s', npzpath) raise RuntimeError('compiled proxy file missing: %s', npzpath) if os.path.isfile(path) and os.path.getmtime(path) > os.path.getmtime(npzpath): log.message('compiled proxy file out of date: %s', npzpath) raise RuntimeError('compiled file out of date: %s', npzpath) proxy = loadBinaryProxy(npzpath, human, type) except Exception as e: showTrace = not isinstance(e, RuntimeError) log.warning("Problem loading binary proxy: %s", e, exc_info=showTrace) proxy = loadTextProxy(human, path, type) # TODO perhaps proxy type should be stored in .mhclo file too if getpath.isSubPath(npzpath, getpath.getPath()): # Only write compiled binary proxies to user data path try: saveBinaryProxy(proxy, npzpath) except StandardError: log.notice('unable to save compiled proxy: %s', npzpath, exc_info=True) else: log.debug('Not writing compiled proxies to system paths (%s).', npzpath) except: log.error('Unable to load proxy file: %s', path, exc_info=True) return None return proxy
def upgrade(self, tran, obj): # call update method try: self.cmd(obj).update(tran, obj) except: log.warning("Cannot execute update method on", obj.oid) refObj = self.new(obj.type) new = refObj.__dict__.keys() old = obj.__dict__.keys() changed = 0 # change attributes # remove old for attr in old: if attr in new: #if type(getattr(obj, attr)) != type(getattr(refObj, attr)): #@log.debug('IObject', 'Upgrade - chng', obj.oid, obj.type, attr, type(getattr(obj, attr)), 'to', type(getattr(refObj, attr))) # TODO - enable #setattr(obj, attr, getattr(refObj, attr)) #changed = 1 new.remove(attr) else: log.debug('IObject', 'Upgrade - del', obj.oid, obj.type, attr) delattr(obj, attr) changed = 1 # set new for attr in new: log.debug('IObject', 'Upgrade - new', obj.oid, obj.type, attr) setattr(obj, attr, getattr(refObj, attr)) changed = 1
def descendTree(self, structure, atoms, root=None): """ Descend the tree in search of the functional group node that best matches the local structure around `atoms` in `structure`. If root=None then uses the first matching top node. Returns None if there is no matching root. """ if root is None: for root in self.tree.top: if self.matchNodeToStructure(root, structure, atoms): break # We've found a matching root else: # didn't break - matched no top nodes return None elif not self.matchNodeToStructure(root, structure, atoms): return None next = [] for child in self.tree.children[root]: if self.matchNodeToStructure(child, structure, atoms): next.append(child) if len(next) == 1: return self.descendTree(structure, atoms, next[0]) elif len(next) == 0: return root else: #print structure.toAdjacencyList() #raise InvalidDatabaseException('For structure %s, a node %s with non-mutually-exclusive children %s was encountered in tree with top level nodes %s.' % (structure.getFormula(), root, next, self.tree.top)) logging.warning('For %s, a node %s with overlapping children %s was encountered in tree with top level nodes %s.' % (structure, root, next, self.tree.top)) return root
def get_roll_to(head, tail, normal): """ Compute the roll angle for a bone to make the bone's local x axis align with the specified normal. """ p1 = toZisUp3(head) p2 = toZisUp3(tail) xvec = normal pvec = matrix.normalize(p2-p1) xy = np.dot(xvec,pvec) yvec = matrix.normalize(pvec-xy*xvec) zvec = matrix.normalize(np.cross(xvec, yvec)) mat = np.asarray((xvec,yvec,zvec), dtype=np.float32) try: assertOrthogonal(mat) except Exception as e: log.warning("Calculated matrix is not orthogonal (%s)" % e) quat = tm.quaternion_from_matrix(mat) if abs(quat[0]) < 1e-4: return 0 else: roll = math.pi - 2*math.atan(quat[2]/quat[0]) if roll < -math.pi: roll += 2*math.pi elif roll > math.pi: roll -= 2*math.pi return roll
def _autoGuessCoordinateSystem(self): """ Guesses whether this BVH rig uses a Y-up or Z-up axis system, using the joint offsets of this rig (longest direction is expected to be the height). Requires joints of this BVH skeleton to be initialized. Returns False if no conversion is needed (BVH file uses Y-up coordinates), returns True if BVH uses Z-up coordinates and conversion is needed. Note that coordinate system is expected to be right-handed. """ ref_joint = None # TODO an alternative approach is to measure the length of all bones. For humanoids the bone length is always highest in the up direction ref_names = ['head', 'spine03', 'spine02', 'spine01', 'upperleg02.L', 'lowerleg02.L'] while ref_joint is None and len(ref_names) != 0: joint_name = ref_names.pop() try: ref_joint = self.joints[joint_name] except: try: ref_joint = self.joints[joint_name[0].capitalize()+joint_name[1:]] except: ref_joint = None if ref_joint != None and len(ref_joint.children) == 0: log.debug("Cannot use reference joint %s for determining axis system, it is an end-effector (has no children)" % ref_joint.name) ref_joint = None if ref_joint is None: log.warning("Could not auto guess axis system for BVH file %s because no known joint name is found. Using Y up as default axis orientation." % filepath) else: tail_joint = ref_joint.children[0] direction = tail_joint.position - ref_joint.position if abs(direction[1]) > abs(direction[2]): # Y-up return False else: # Z-up return True
def wa_msg_received(self, message): store_msg(message, self.log_file) lines = message.msg.strip().split("\n") #split multiline messages info(" <<< WA %s" %message) if message.chan == self.wa_phone: #private message if message.target is None: # directed to bot itself nick = self.contacts[message.get_nick()] irc_target = self.contacts[message.nick_full.split("@")[0]] for line in lines: irc_msg = "<%s> %s" %(nick, line) self.irc_i.send(self.owner_nick, irc_msg) else: # directed to someone try: phone = message.get_nick() source_nick = self.contacts[phone] for line in lines: msg = "<%s> %s" %(source_nick, line) self.irc_i.send(message.target, msg) except: error("Couldn't relay directed WA msg to IRC") else: #group message for line in lines: try: msg = "<%s> %s" %(self.contacts[message.get_nick()], line) except: warning("Contact not recognized") msg = "<%s> %s" %(message.get_nick(), line) try: self.irc_i.send(self.contacts[message.chan], msg) except: warning("Channel %s not recognized" %(message.chan))
def loadNextPlugin(self): alreadyLoaded = len(self.modules) stillToLoad = len(self.pluginsToLoad) self.progress(0.4 + (float(alreadyLoaded) / float(alreadyLoaded + stillToLoad)) * 0.4) if not stillToLoad: return path = self.pluginsToLoad.pop() try: name, ext = os.path.splitext(os.path.basename(path)) if name not in self.settings['excludePlugins']: log.message('Importing plugin %s', name) module = imp.load_source(name, path) self.modules[name] = module log.message('Imported plugin %s', name) log.message('Loading plugin %s', name) module.load(self) log.message('Loaded plugin %s', name) self.processEvents() else: self.modules[name] = None except Exception, e: log.warning('Could not load %s', name, exc_info=True)
def p_expression_namespace(self, p): 'expression : NAME DOT NAME' try: p[0] = self.get_ns()[p[1]][p[3]] except (LookupError, AttributeError): log.warning("ACL: Undefined name '%s.%s'" % (p[1], p[3])) p[0] = False
def _select(self, config, ls): if len(ls) != 2: log.warning('invalid select statement') else: r = self.macros.set_read_map(ls[1]) log.trace('config: %s: _select: %s %s %r' % \ (self.name, r, ls[1], self.macros.maps()))
def _loadUuidLookup(self): items = [ (values[1], path) for (path, values) in self._proxyFileCache.items() ] self._proxyFilePerUuid = dict() for (_uuid, path) in items: if _uuid in self._proxyFilePerUuid and self._proxyFilePerUuid[_uuid] != path: log.warning("WARNING: Duplicate UUID found for different proxy files in %s library (files %s and %s share uuid %s). Make sure that all proxy files in your data folders have unique UUIDs (unless they are exactly the same file). Else this may lead to unexpected behaviour.", self.proxyName, path, self._proxyFilePerUuid[_uuid], _uuid) self._proxyFilePerUuid[_uuid] = path
def convertgirls(importedgirls, basefolder=''): log.debug('Converting %s girls' % len(importedgirls)) templates = {} contentfolder = stuff.joinpath(basefolder, 'content') loaded = template.loadalltemplates(contentfolder, templates) nonbases = template.findnonbasetemplates(loaded['girlbase']) girls = [] for importedgirl in importedgirls: girl = {} for nonbase in nonbases: girl[nonbase['name']] = template.clone(nonbase) #sometraits = ['Big Boobs', 'Cool Person', 'Fast o*****s', 'Quick Learner', 'Cute', 'Strong', 'Adventurer', 'Good Kisser', 'Nymphomaniac', 'Fake o****m expert', 'Sexy Air', 'Great Figure', 'Great Arse', 'Optimist', 'Fleet of Foot', 'Tough', 'Charismatic', 'Charming', 'Long Legs', 'Cool Scars', 'Nerd', 'Aggressive', 'Assassin', 'Nervous', 'Elegant', 'M*******t', 'Meek', 'Merciless', 'Iron Will', 'Dependant', 'Eye Patch', 'Perky Nipples', 'Clumsy', 'Lolita', 'Puffy Nipples', 'Sadistic', 'Fearless', 'Psychic', 'Strong Magic', 'Small Boobs', 'Sterile', 'Construct', 'Strange Eyes', 'Slow Learner', 'Tsundere', 'MILF', 'Twisted', 'Slow o*****s', 'Not Human', 'Yandere', 'Lesbian', 'Abnormally Large Boobs', 'Mind F****d', 'Fragile', 'Pessimist', 'Incorporial', 'Broken Will'] metastats = ['Name', 'Desc'] basestats = ['Confidence', 'Constitution', 'Obedience', 'Charisma', 'Beauty', 'Libido', 'Spirit', 'Age', 'Intelligence', 'Agility'] skillstats = ['Service', 'Magic', 'Combat'] sexstats = ['NormalSex', 'Group', 'BDSM', 'Beastiality', 'Strip', 'Anal', 'Lesbian'] tempstats = ['Health', 'PCHate', 'PCFear', 'PCLove', 'Tiredness', 'Happiness', 'Fame'] ignoredstats = ['Status', 'Level', 'Exp', 'Gold', 'House', 'AskPrice', 'Mana', 'Traits'] fullstats = sum([metastats, basestats, skillstats, sexstats, tempstats], []) for stat in fullstats: if stat in importedgirl: if not stat in girl: log.warning('Could not find stat %s' % stat) continue value = importedgirl[stat] if girl[stat]['valuetype'] == 'int': value = int(value) girl[stat]['value'] = value girl['Traits'] = [] if 'Traits' in importedgirl: girl['Traits'] = importedgirl['Traits'] #stuff.dump('temp.yaml', girl) girls.append(girl) return girls
def _process_data(self, results, directive, info, data): new_data = [] for l in results[1]: if l.startswith('%error'): l = self._expand(l) raise error.general('config error: %s' % (l[7:])) elif l.startswith('%warning'): l = self._expand(l) log.stderr('warning: %s' % (l[9:])) log.warning(l[9:]) if not directive: l = self._expand(l) ls = self.tags.split(l, 1) log.trace('config: %s: _tag: %s %s' % (self.name, l, ls)) if len(ls) > 1: info = ls[0].lower() if info[-1] == ':': info = info[:-1] info_data = ls[1].strip() else: info_data = ls[0].strip() if info is not None: self._info_append(info, info_data) else: log.warning("invalid format: '%s'" % (info_data[:-1])) else: log.trace('config: %s: _data: %s %s' % (self.name, l, new_data)) new_data.append(l) return (directive, info, data + new_data)
def getLongpoll(self, mode=2): if not self.longpoll_server: self.initLongpoll() url = 'https://{}?act=a_check&key={}&ts={}&wait=25&mode={}'.format(self.longpoll_server, self.longpoll_key, self.longpoll_ts, mode) try: json_string = urllib.request.urlopen(url, timeout=30).read() except urllib.error.HTTPError as e: log.warning('longpoll http error ' + str(e.code)) return [] except OSError as e: log.warning('longpoll failed ({})'.format(e)) time.sleep(1) return [] data_array = json.loads(json_string.decode('utf-8')) if self.logging: with open('inf.log', 'a') as f: print('[{}]\nlongpoll request: {}\nresponse: {}\n'.format(time.strftime(log.datetime_format, time.localtime()), url, json.dumps(data_array)), file=f) if 'ts' in data_array: self.longpoll_ts = data_array['ts'] if 'updates' in data_array: return data_array['updates'] elif data_array['failed'] != 1: self.initLongpoll() return [] else: return self.getLongpoll(mode)
def findProxyMetadataByFilename(self, path): """ Retrieve proxy metadata by canonical path from metadata cache. Returns None or metadata in the form: (mtime, uuid, tags) """ proxyId = getpath.canonicalPath(path) if self._filecache is None: # Init cache self.loadCache() self.updateFileCache(self.getSearchPaths(), self.getFileExtensions(), True) if self._proxyFilePerUuid is None: self._loadUuidLookup() if proxyId not in self._filecache: # Try again once more, but update the metadata cache first (lazy cache for performance reasons) self.updateFileCache(self.getSearchPaths(), self.getFileExtensions(), True) self._loadUuidLookup() if proxyId not in self._filecache: log.warning('Could not get metadata for proxy with filename %s. Does not exist in %s library.', proxyId, self.proxyName) return None metadata = self._filecache[proxyId] mtime = metadata[0] if mtime < os.path.getmtime(proxyId): # Queried file was updated, update stale cache self.updateFileCache(self.getSearchPaths(), self.getFileExtensions(), True) self._loadUuidLookup() metadata = self._filecache[proxyId] return metadata
def log(self, signal): # Prepare data_to_send = {'key': self.config['data']['key']} if isinstance(signal, dict): for alias in signal: try: data_to_send[self.mapping[alias].config['out']] = signal[alias] except KeyError: pass else: try: single_field = self.config['data']['out'] except KeyError: single_field = 'field1' # default field data_to_send[single_field] = signal # Send connection = http_client.HTTPConnection("api.thingspeak.com:80") connection.request( "POST", "/update", urlencode(data_to_send), {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}) # Response response = connection.getresponse() # response.status, response.reason if response.status != 200: log.warning('%s cannot upload data, response: %d "%s"' % (self, response.status, response.reason))
def run(self): if self.qpkg_dir is None: error('Cannot find QNAP/changelog anywhere!') error('Are you in the source code tree?') return -1 # read ~/.qdkrc qdkrc = QDKrc() cfg_user = qdkrc.config['user'] control = ControlFile(self.qpkg_dir) changelog = ChangelogFile(self.qpkg_dir) kv = {'package_name': control.source['source']} if self._args.message is not None: kv['messages'] = self._args.message if self._args.version is not None: kv['version'] = self._args.version kv['author'] = cfg_user['name'] if self.author is None else self.author kv['email'] = cfg_user['email'] if self.email is None else self.email if len(kv['author']) == 0 or len(kv['email']) == 0: warning('Environment variable QPKG_NAME or QPKG_EMAIL are empty') info('QPKG_NAME: ' + kv['author']) info('QPKG_EMAIL: ' + kv['email']) yn = raw_input('Continue? (Y/n) ') if yn.lower() == 'n': return 0 kv['author'] = 'noname' kv['email'] = '*****@*****.**' entry = changelog.format(**kv) editor = Editor() editor.insert_content(entry) editor.open(changelog.filename) return 0
def run(self): rate = Rate() nav = None # Wait for a command. while True: if (not nav or not self.slam_controller.empty()): command = self.slam_controller.get() if "run" in command.keys(): if command["run"]: log.info("Starting SLAM navigation...") nav = slam.Slam(command["x"], command["y"], command["theta"]) # Notify anyone waiting that we're ready. for pipe in self.pipes: pipe.send({"ready": True}) else: log.info("Stopping SLAM navigation...") nav = None else: if nav: if "driving" in command.keys(): if command["driving"]: log.debug("Got started driving hook.") print "Started driving: " + str(command["position"]) nav.started_driving(command["position"], command["timestamp"]) else: log.debug("Got stopped driving hook.") print "Stopped driving: " + str(command["position"]) nav.stopped_driving(command["position"], command["timestamp"]) elif "displacement" in command.keys(): if command["displacement"]: log.debug("Displacement requested.") # Send our displacement. pipe = getattr(self, command["program"]) pipe.send({"displacement": nav.get_displacement()}) elif "reset" in command.keys(): if command["reset"] == "position": # Reset the robot position. log.info("Resetting the robot's position.") nav.reset_position() elif command["reset"] == "bearing": # Reset the robot bearing. log.info("Resetting the robot's bearing.") nav.reset_bearing() else: log.warning("Not running command when SLAM isn't running.") if nav: # Run the Kalman filter periodically. rate.rate(0.5) print "Updating position..." nav.update_position() print "Done!"
def imagefind(path): log.debug('Finding images') images = {} if not stuff.folderexists(path): log.warning('Folder not found: %s' % path) return images IMAGE_TYPES = ['anal', 'bdsm', 'beast', 'bunny', 'combat', 'death', 'ecchi', 'group', 'les', 'maid', 'mast', 'nude', 'oral', 'preg', 'profile', 'sex', 'sing', 'strip', 'titty', 'wait'] IMAGE_TYPES.extend([ 'preg' + imagetype for imagetype in IMAGE_TYPES ]) IMAGE_TYPES.sort(key=len, reverse=True) girlfolders = stuff.findfolders(path) templist = [] for girlfolder in girlfolders: girlname = stuff.filename(girlfolder) images[girlname] = girlimages = {} imagefiles = stuff.findfiles('*.*', girlfolder) for imagefile in imagefiles: ignoredtypes = ['.db', '.txt'] ext = stuff.fileext(imagefile) if ext in ignoredtypes: continue imagename = stuff.filename(imagefile, ext=False).lower() found = False for imagetype in IMAGE_TYPES: if imagename.startswith(imagetype): if not imagetype in girlimages: girlimages[imagetype] = [] girlimages[imagetype].append(imagefile) found = True break if not found: log.debug('Could not find image type for %s' % imagefile) return images
def install(self): debug(self._label + "install files") src_install = pjoin(Settings.CONTROL_PATH, self._package["package"] + ".install") if not pexists(src_install): warning("Missing: " + src_install) return try: lineno = 0 with open(src_install) as fin: for line in fin: lineno += 1 src, dst = line.strip().split(" ", 1) dst = dst.strip() if dst.startswith("/"): dst = "." + dst dst = pjoin(self._env["QPKG_DEST_DATA"], dst) if not pexists(dst): makedirs(dst) src_files = glob(src) if not src_files: raise FileSyntaxError(src_install, lineno, "`{}` not found".format(src)) for fn in glob(src): try: sp.check_call(["cp", "-a", fn, dst]) except sp.CalledProcessError as e: warning("Error in copy files: {}".format(e)) return -1 except ValueError: raise FileSyntaxError(src_install, lineno, line)
def downloadComicImage(self, imageUrl, prefix = "", comicDir = './Comics/ComicName'): """Download the image at the specified url to a local directory. :param imageUrl: The url for the image :param prefix: The prefix used when saving the file to the local directory :param comicDir: The local directory to save the image to """ #Open a stream to the image r = requests.get(imageUrl, stream=True) imageName = prefix + imageUrl.split('/')[-1] #Check if the stream was opened successfully if r.status_code == 200: #Create the local directory if it does not exist if not os.path.exists(comicDir): os.makedirs(comicDir) localImageFile = comicDir + imageName #Skip the file if it already exists if not os.path.isfile(localImageFile): #Write the chunks of the image data from the stream to the local file with open(localImageFile, 'wb') as newImage: for chunk in r.iter_content(1024): newImage.write(chunk) else: log.warning("File {0} already exists. Skipping download.".format(localImageFile))
def munge_file(file, munges): """ apply a list of munges to `file' and return a path to a file to read in """ if not isinstance(munges, list): munges = map(operator.methodcaller("strip"), munges.split(",")) if file is None: log.warning("Failed to munge file (was given None!)") return None # The first thing we do is make a copy of the input file. # This means we can safely delete any file that is used # as input or output. nomunge = munge.NoMunge() tmpfile = nomunge.perform(file) log.debug("input file: %s" % file.encode("utf-8")) log.debug("tmp copy: %s" % tmpfile) for m in munges: log.debug("performing %s" % (m)) cls = munge.munge_classes[m] inst = cls() newfile = inst.perform(tmpfile) remove_file(tmpfile) tmpfile = newfile return newfile
def processIpRequest(self,line): allocator = self.allocator try: parts = line.split(" ",2) if len(parts) != 2: error("got invalid line [%s]"%(line)) self.transport.loseConnection() return domain, email_address = parts details = self.allocator.getIpAddress(domain,email_address) if details: info("Got %s for %s on %s"%(details, email_address, domain)) self.transport.write("%s %s\n"%(details.getIpAddress(),details.getHeloHost())) self.email_address = email_address self.domain = domain self.ip_address = details self.state = STATE_IP_SENT else: warning("No available ip address for %s on %s"%(email_address,domain)) self.transport.loseConnection() self.state = STATE_COMPLETED except Exception as e: exception(e) self.transport.looseConnection()
def getMetadata(self, filename): """Retrieves the metadata of a specified file. Updates the cache if needed. """ if self._filecache is None: # Init cache self.loadCache() self.updateFileCache(self.getSearchPaths(), self.getFileExtensions(), False) fileId = getpath.canonicalPath(filename) if fileId not in self._filecache._cache: # Lazily update cache self.updateFileCache(self.getSearchPaths() + [os.path.dirname(fileId)], self.getFileExtensions(), False) if fileId in self._filecache: metadata = self._filecache[fileId] if metadata is not None: mtime = metadata[0] metadata = metadata[1:] if mtime < os.path.getmtime(self.getMetadataFile(fileId)): # Queried file was updated, update stale cache self.updateFileCache(self.getSearchPaths() + [os.path.dirname(fileId)], self.getFileExtensions(), False) metadata = self._filecache[fileId] mtime = metadata[0] metadata = metadata[1:] return metadata else: log.warning('Could not get metadata for file %s. Does not exist in cache.', filename) return None
def save(self, path): log.debug('Saving scene file: %s', path) try: hfile = open(path, 'wb') except IOError as e: log.warning('Could not save %s: %s', path, e[1]) return False except Exception as e: log.error('Failed to save scene file %s\nError: %s\n', path, repr(e), exc_info=True) return False else: try: pickle.dump(mhscene_version, hfile, protocol=2) self.environment.save(hfile) pickle.dump(len(self.lights), hfile, protocol=2) for light in self.lights: light.save(hfile) except Exception as e: log.error('Failed to save scene file %s\nError: %s\n', path, repr(e), exc_info=True) hfile.close() return False hfile.close() self.file.saved(path) return True
def kill_child(self, pid, sig=signal.SIGHUP): try: os.kill(pid, sig) return True except OSError: log.warning('ERROR: child pid %s does not exist', pid) return False
def irc_msg_received(self, message): store_msg(message, self.log_file) info(" <<< IRC %s" %message) if message.chan == self.irc_nick: if message.target is None: raise Exception("Target not specified. Please prefix you private messages with a nickname (e.g. 'person1: hello') or phone number (e.g. '+34555555373: hello')") wa_target = message.target if message.target not in self.contacts: try: wa_target = self.get_wa_id_from_name(self.contacts, message.target) #get by nick except KeyError: if not wa_target.isdigit(): raise Exception("Whatsapp identifier '%s' not found in contact list, and does not look like a phone number" %message.target) warning("Phone number '%s' not found in contact list. Trying to send anyway..." %message.target) wa_target += "@s.whatsapp.net" msg = "<%s> %s" %(message.get_nick(), message.msg) self.wa_m.send(wa_target, msg) else: msg = "<%s> %s" %(message.get_nick(), message.msg) try: group = self.get_wa_id_from_name(self.contacts, message.chan) self.wa_m.send(group, msg) except Exception, e: error("Cannot send message to channel %s: %s" %(message.chan, e))
def protocol_send(self, addr, *args): sp = xmlrpclib.ServerProxy('http://' + addr) try: sp.HGWCollector.receiveData(*args) except Exception, e: log.warning('delivery failure: %s' % str(e)) return False
def _undefine(self, config, ls): if len(ls) <= 1: log.warning("invalid macro definition") else: mn = self._label(ls[1]) if mn in self.macros: del self.macros[mn]
def getTags(self, uuid = None, filename = None): """ Get tags associated with proxies. When no uuid and filename are specified, returns the all the tags found in this collection (all proxy files managed by this library). Specify a filename or uuid to get all tags belonging to that proxy file. Always returns a set of tags (so contains no duplicates), unless no proxy was found upon which None is returned. An empty library (no proxies) or a library where no proxy file contains tags will always return an empty set. """ if uuid and filename: raise RuntimeWarning("getTags: Specify either uuid or filename, not both!") if uuid: proxyFile = self.findProxyByUuid(uuid) if not proxyFile: log.warning('Could not get tags for proxy with UUID %s. Does not exist in %s library.', uuid, self.proxyName) return set() filecache.MetadataCacher.getTags(self, proxyFile) elif filename: return filecache.MetadataCacher.getTags(self, filename) else: return self.getAllTags()
def loadProxy(human, path, type="Clothes"): try: npzpath = os.path.splitext(path)[0] + '.mhpxy' asciipath = os.path.splitext(path)[0] + getAsciiFileExtension(type) try: if not os.path.isfile(npzpath): log.message('compiled proxy file missing: %s', npzpath) raise RuntimeError('compiled proxy file missing: %s', npzpath) if os.path.isfile(asciipath) and os.path.getmtime(asciipath) > os.path.getmtime(npzpath): log.message('compiled proxy file out of date: %s', npzpath) raise RuntimeError('compiled file out of date: %s', npzpath) proxy = loadBinaryProxy(npzpath, human, type) except Exception as e: showTrace = not isinstance(e, RuntimeError) log.warning("Problem loading binary proxy: %s", e, exc_info=showTrace) proxy = loadTextProxy(human, asciipath, type) # TODO perhaps proxy type should be stored in .mhclo file too if getpath.isSubPath(npzpath, getpath.getPath()): # Only write compiled binary proxies to user data path try: log.message('Compiling binary proxy file %s', npzpath) saveBinaryProxy(proxy, npzpath) except Exception: log.notice('unable to save compiled proxy: %s', npzpath, exc_info=True) if os.path.isfile(npzpath): # Remove file again, in case an empty file is left try: os.remove(npzpath) except Exception as e: log.warning("Could not remove empty file %s that was left behind (%s).", npzpath, e) else: log.debug('Not writing compiled proxies to system paths (%s).', npzpath) except: log.error('Unable to load proxy file: %s', path, exc_info=True) return None return proxy
def _list_plugins_from_nexus(nexusUrl, nexusRepo): if nexusRepo is None or nexusRepo == '': raise XmakeException( 'cannot list external plugins from nexus. Repository is not set') plugins = set() #http://nexus.wdf.sap.corp:8081/nexus/service/local/repositories/build.snapshots/content/com/sap/prd/xmake/buildplugins/ url = '{}/nexus/service/local/repositories/{}/content/com/sap/prd/xmake/buildplugins'.format( nexusUrl, nexusRepo) try: # Download xml file in temporary directory with contextlib.closing(urllib.urlopen(url, proxies={})) as downloadedFile: xmlPluginContent = downloadedFile.read() log.debug('\texternal plugin list downloaded') # Parse xml file to get the list of external plugins root = ET.fromstring(xmlPluginContent) if root is None: raise Exception( 'cannot extract the plugin list from nexus xml file downloaded' ) data = root.find('data') for item in data.findall('content-item'): plugins.add(item.find('text').text) log.debug('\tplugins found: {}'.format(', '.join(plugins).upper())) return plugins except Exception as e: log.exception(e) log.warning('\tcannot list plugins from {}'.format(url)) return set()
def _process_data(self, results, directive, info, data): log.trace('config: %s: %3d: _process_data: result=#%r# directive=#%s# info=#%r# data=#%r#' % \ (self.name, self.lc, results, directive, info, data)) new_data = [] for l in results[1]: if l.startswith('%error'): l = self._expand(l) raise error.general('config error: %s' % (l[7:])) elif l.startswith('%log'): l = self._expand(l) log.output(l[4:]) elif l.startswith('%warning'): l = self._expand(l) log.warning(l[9:]) if not directive: l = self._expand(l) ls = self.tags.split(l, 1) log.trace('config: %s: %3d: _tag: %s %s' % (self.name, self.lc, l, ls)) if len(ls) > 1: info = ls[0].lower() if info[-1] == ':': info = info[:-1] info_data = ls[1].strip() else: info_data = ls[0].strip() if info is not None: self._info_append(info, info_data) else: log.warning("invalid format: '%s'" % (info_data[:-1])) else: l = self._expand(l) log.trace('config: %s: %3d: _data: %s %s' % (self.name, self.lc, l, new_data)) new_data.append(l) return (directive, info, data + new_data)
def createShader(file, type, defines = [], defineables = None): with io.open(file, 'r', encoding='utf-8') as f: source = f.read() if "#version" not in source: log.warning("The shader source in %s does not contain an explicit GLSL version declaration. This could cause problems with some compilers.", file) if defineables != None: for d in Shader._getDefineables(source): if d not in defineables: defineables.append(d) if defines: # Add #define instructions for shader preprocessor to enable extra # shader features at compile time firstComments, code = Shader._splitVersionDeclaration(source) defineLines = "\n".join([ "#define " + define for define in defines]) source = "\n".join([firstComments, defineLines, code]) shader = glCreateShader(type) glShaderSource(shader, source) glCompileShader(shader) if not glGetShaderiv(shader, GL_COMPILE_STATUS): logmsg = glGetShaderInfoLog(shader) log.error("Error compiling shader: %s", logmsg) return None return shader
async def on_message(self, message: discord.Message): self.message_count += 1 if not self.active: return # An external interface like the server can reenable this. (young-amateurs-rc/arbys modules/server.py) for func in self._message_handlers: try: await func(message) except Exception: log.warning( "Ignoring exception in message coroutine (see stack trace below)", include_exception=True) is_cmd, this_prefix = prefix.check_bot_prefix(message.content, self.prefixes) if is_cmd: command = message.content[len(this_prefix):] known_cmd, run_by = prefix.check_command_prefix( command, list(self._command_lookup.keys())) if not known_cmd: # unknown command branch await message.channel.send(self.unknown_command) return await self._command_lookup[run_by](command, message)
def save(self, path=None): if path is None: if self.path is None: log.notice( 'Cannot save scene as it is not associated with any file. Please supply a path') return False else: path = self.path log.debug('Saving scene file: %s', path) try: hfile = open(path, 'wb') except IOError as e: log.warning('Could not save %s: %s', path, e[1]) return False except Exception as e: log.error('Failed to save scene file %s\nError: %s\n', path, repr(e), exc_info=True) return False else: try: pickle.dump(mhscene_version, hfile, protocol=2) self.environment.save(hfile) pickle.dump(len(self.lights), hfile, protocol=2) for light in self.lights: light.save(hfile) except Exception as e: log.error('Failed to save scene file %s\nError: %s\n', path, repr(e), exc_info=True) hfile.close() return False hfile.close() self.path = path self.unsaved = False return True
def _hash_check(file_, absfile, macros, remove=True): failed = False hash = sources.get_hash(file_.lower(), macros) if hash is not None: hash = hash.split() if len(hash) != 2: raise error.internal('invalid hash format: %s' % (file_)) try: hashlib_algorithms = hashlib.algorithms except: hashlib_algorithms = [ 'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512' ] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError as err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except: msg = 'hash: %s: error' % (file_) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() raise if _in is not None: _in.close() log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1])) if hasher.hexdigest() != hash[1]: log.warning('checksum error: %s' % (file_)) failed = True if failed and remove: log.warning('removing: %s' % (file_)) if path.exists(absfile): try: os.remove(path.host(absfile)) except IOError as err: raise error.general('hash: %s: remove: %s' % (absfile, str(err))) except: raise error.general('hash: %s: remove error' % (file_)) if hasher is not None: del hasher else: if version.released(): raise error.general('%s: no hash found in released RSB' % (file_)) log.warning('%s: no hash found' % (file_)) return not failed
def fixperms(self): debug(self._label + 'fixperms') data_root = self._env['QPKG_DEST_DATA'] bin_path = [ 'bin', 'sbin', 'usr/bin', 'usr/sbin', 'usr/local/bin', 'usr/local/sbin', 'etc/init.d' ] for root, dirs, files in walk(data_root): fixperm = False if root in [pjoin(data_root, d) for d in bin_path]: chmod(root, 0755) fixperm = True for f in files: if isfile(pjoin(root, f)): # check setuid/setgid bits permissions fstat = os.stat(pjoin(root, f)) if fstat.st_mode & stat.S_ISUID: warning('{} has setuid attribute'.format( pjoin(root[len(data_root):], f))) if fstat.st_mode & stat.S_ISGID: warning('{} has setgid attribute'.format( pjoin(root[len(data_root):], f))) if fixperm: chmod(pjoin(root, f), fstat.st_mode | 755)
def run(self): """线程主函数 循环运行,接收来自客户端的数据并丢弃,向客户端发送 data_queue 中的数据包。 当 data_queue 过长时,丢弃旧的数据包。 """ log.info('sender thread %d: start, %s' % (self.sender_id, self.address)) while self.running: try: # ignore old data if self.data_queue.qsize() > 10: self.data_queue.empty() # send data data = self.data_queue.get(timeout=1) try: self.client_socket.settimeout(5) self.client_socket.sendall(data) self.send_count += 1 self.data_queue.task_done() except ValueError as e: log.warning('sender thread %d ValueError: %s' % (self.sender_id, e)) # rcv useless data try: self.client_socket.settimeout(0.1) self.client_socket.recv(256) except socket.timeout: pass except queue.Empty: pass except Exception as e: log.error('sender thread %d error: %s' % (self.sender_id, e)) self.running = False self.disconnect() log.info('sender thread %d: bye' % self.sender_id)
def start(): try: while run: try: for dweet in dweepy.listen_for_dweets_from('315novusledwall'): try: dweet = json.dumps(dweet["content"]) except: log.warning( file, "could not convert to JSON and extract contents") try: r = requests.post("http://localhost:321", data=dweet) except: log.warning(file, "Could not post to HTTPserver") if (run == False): log.warning(file, "breaking") break log.info(file, dweet) except: log.warning(file, "Failed to get dweet") except KeyboardInterrupt: log.warning(file, '\ngoogleAssistant: Interrupt detected')
def autoScaleBVH(self, bvh_file): """ Auto scale BVH translations by comparing upper leg length """ import numpy.linalg as la COMPARE_BONE = "upperleg02.L" if COMPARE_BONE not in bvh_file.joints: raise RuntimeError( 'Failed to auto scale BVH file %s, it does not contain a joint for "%s"' % (bvh_file.name, COMPARE_BONE)) bvh_joint = bvh_file.joints[COMPARE_BONE] bone = self.human.getSkeleton().getBoneByReference(COMPARE_BONE) if bone is not None: joint_length = la.norm(bvh_joint.children[0].position - bvh_joint.position) scale_factor = bone.length / joint_length log.message("Scaling BVH file %s with factor %s" % (bvh_file.name, scale_factor)) bvh_file.scale(scale_factor) else: log.warning( "Could not find bone or bone reference with name %s in skeleton %s, cannot auto resize BVH file %s", COMPARE_BONE, self.human.getSkeleton().name, bvh_file.name)
def findProxyByUuid(self, uuid): """ Find proxy file in this library by UUID. Proxy files can only be found if they are in the file metadata cache. Returns the path of the proxy file if it is found, else returns None. The returned path is a canonical path name. """ if self._proxyFileCache is None: self.loadProxyFileCache() if self._proxyFilePerUuid is None: self._loadUuidLookup() if uuid not in self._proxyFilePerUuid: # Try again once more, but update the proxy UUID lookup table first (lazy cache for performance reasons) self.updateProxyFileCache() self._loadUuidLookup() if uuid not in self._proxyFilePerUuid: log.warning( 'Could not find a proxy with UUID %s. Does not exist in %s library.', uuid, self.proxyName) return None return self._proxyFilePerUuid[uuid]
def loadShapeKeys(tmplName): tmpl = open(tmplName, "rU") if tmpl == None: log.warning("Cannot open template %s" % tmplName) return [] targets = [] for line in tmpl: lineSplit = line.split() if len(lineSplit) == 0: pass elif lineSplit[0] == 'ShapeKey': morph = {} targets.append((lineSplit[1], morph)) elif lineSplit[0] == 'wv': v = int(lineSplit[1]) x = float(lineSplit[2]) y = float(lineSplit[3]) z = float(lineSplit[4]) morph[v] = [x, y, z] tmpl.close() return targets
def _get_watermark(_request, text: domain.Text, watermark: str, share: bool): referrer = _request.environ.get('HTTP_REFERER', "").lower() agent = _request.environ.get('HTTP_USER_AGENT', "").lower() log.debug("Referrer=%r Agent=%r", referrer, agent) if not text: log.debug("Watermark disabled (no text)") if watermark: return None, False else: return None, True if watermark == 'none': if share: log.debug("Watermark disabled (share=true)") return None, True for option in current_app.config['WATERMARK_OPTIONS']: for identity in (referrer, agent): if option and identity and option in identity: log.debug(f"Watermark disabled ({option} in {identity})") return None, True log.warning("Request does not support unmarked images") return None, False if watermark and watermark not in current_app.config['WATERMARK_OPTIONS']: log.warning("Unsupported custom watermark: %r", watermark) return watermark, False if watermark: log.debug("Using custom watermark: %r", watermark) return watermark, True default = current_app.config['WATERMARK_OPTIONS'][0] log.debug("Using default watermark: %r", default) return default, True
def metadata_from_file(relpath, basedir, tracknrandtitlere, postprocessors): """ create song metadata from given file with relative (to basedir) path relpath applying the given list of postprocessors""" path = os.path.normpath(os.path.join(basedir, relpath)) if not os.access(path, os.R_OK): raise IOError("cannot read song") md = song_metadata() md.size = os.stat(path).st_size md.type = gettype(os.path.splitext(path)[1]) read_path_metadata(md, relpath, tracknrandtitlere) try: metadatadecoder = getmetadatadecoder(md.type) except: raise RuntimeError("Support for %s songs not enabled" % md.type) try: log.debug("reading metadata for %r" % path) metadatadecoder(md, path) log.debug("metadata for %r read successfully" % path) except: log.warning("could not read metadata for %r" % path) log.debug_traceback() # strip leading and trailing whitespace if md.title: md.title = md.title.strip() if md.artist: md.artist = md.artist.strip() if md.album: md.album = md.album.strip() if md.length is None: log.warning("could not read length of song %r" % path) raise RuntimeError("could not read length of song %r" % path) for postprocessor_name in postprocessors: try: get_metadata_postprocessor(postprocessor_name)(md) except: log.warning("Postprocessing of song %r metadata with '%r' failed" % (path, postprocessor_name)) log.debug_traceback() # set album_artist if not present if md.album_artist is None: if md.compilation: md.album_artist = VARIOUS else: md.album_artist = md.artist return md
def pivote(self, vitesse=None, angle=None, duree=0): if duree < 0: log.warning('pivote: la durée doit être une nombre positif') if angle is None and vitesse is None: log.warning('pivote: il me faut une vitesse et/ou un angle') return if angle is None and vitesse is not None: self.roule(vitesse, -vitesse, duree) return v_max = 15 while angle < -180: angle += 360 while angle > 180: angle -= 360 if vitesse is not None: if duree > 0: log.warning( 'pivote: si l\'angle est donné et la vitesse aussi, la durée est superflue' ) v_max = abs(vitesse) theta0 = self.imu.get_orientation() erreur0 = angle seuil = 0.1 while True: delta_theta = self.imu.get_orientation() - theta0 erreur = angle - delta_theta v = -0.2 * erreur sign = 1 if v > 0 else -1 v_abs = abs(v) if v_abs < 3: v_abs = 3 if abs(erreur) > 5 or v_abs > v_max: v_abs = v_max v = sign * v_abs self.pivote(v) if (erreur0 < 0 and erreur > -seuil) or (erreur0 > 0 and erreur < seuil): break time.sleep(0.01) self.stop() erreur = angle - (self.imu.get_orientation() - theta0) if abs(erreur) > 5: log.warning("pivote: erreur: %0.1f°" % (erreur))
def normal_activity(components: dict) -> bool: from log import debugging, warning, repeat_message if components["button"]["object"].value( ) == 1 and not components["button"]["previousState"]: # Button pressed and # previous state is false components["button"][ "previousState"] = True # Turn previous state to true for next run time if components["led"]["object"].value( ) == 0: # Check if led is off (so it does not turn it on when it is on) components["led"]["object"].on() # Turn led on debugging("Turned led on") if components["laser"]["object"].value() == 0: # Check if laser is off components["laser"]["object"].on() # Turn laser on warning("Turned laser on") elif components["button"]["object"].value( ) == 0 and components["button"]["previousState"]: # If button is not # pressed and previous state is true components["button"][ "previousState"] = False # Turn previous state to false for next run time if components["led"]["object"].value() == 1: # Check if led is on components["led"]["object"].off() # Turn led off debugging("Turned led off") if components["laser"]["object"].value() == 1: # Check if laser is on components["laser"]["object"].off() # Turn laser on warning("Turned laser off") try: if components["lightSensor"]["object"].read() > components["lightSensor"]["thresholdSensitivity"] and \ components["button"]["object"].value() == 1: # Laser on sensor and button pressed repeat_message("Laser on light sensor", 10, "laser on light") elif components["button"]["object"].value( ) == 1: # Laser not on sensor and button pressed warning("A robbery has been detected") robbery_activity( components) # Execute the "there is a robbery function" return True # Exit function with true except KeyError: # This error will be call if the sensor is not calibrated warning("ThresholdSensitivity has not been calibrated") return False # Exit function with false
def readVertexDefinitions(self): self.bodyparts = dict() # List of all body part groups self.vertices = dict() # Dict per vertgroup index, all vertex indices self.groups = dict() infile = open(DATA_PATH+"/vertgroup_mapping.txt", "r") lineCnt = 0 for line in infile: lineCnt = lineCnt +1 line = line.strip() # Ignore comments and empty lines if(not line or line.startswith("#")): continue # Define bodypart vertex group if(line.startswith("vertgroup")): items = line.split() try: gIdx = int(items[1]) gName = items[2] self.bodyparts[gIdx] = gName continue except: if WARNINGS: log.warning("Warning: error at line "+str(lineCnt)+" of file "+ os.path.abspath(infile.name)+"!") continue # Parse vertex - vertgroups assignment try: items = line.split() vertIdx = int(items[0]) if(len(items) == 1): if WARNINGS: log.warning("Warning: vertex "+str(vertIdx)+" at line "+str(lineCnt)+" of file "+ os.path.abspath(infile.name)+" is not assigned to any vertex group!") continue self.groups[vertIdx] = list() # Assign vertex groups for i in range(1,len(items)): vGroupIdx = int(items[i]) if(vGroupIdx in self.vertices): vList = self.vertices[vGroupIdx] else: vList = list() self.vertices[int(vGroupIdx)] = vList #print "Adding "+str(vertIdx)+" to group "+str(vGroupIdx) vList.append(vertIdx) self.groups[vertIdx].append(vGroupIdx) except: if WARNINGS: log.warning("Warning: Parsing error at line "+str(lineCnt)+" of file "+ os.path.abspath(infile.name)+"!")
def __init__(self): f = Figlet(font='big') print(f.renderText('Repository')) cfg.RUNCFG["verbose"] = args.verbose if args.verbose == 1: log.warning("Log verbosity is enabled.") elif args.verbose == 2: log.warning("HIGH verbosity is enabled!") else: log.warning("Only regular information will be shown.") self.__manager = AuctionRepo()
def compute_positions_PSO(rotation_start, rotation_step, num_angles, PSOCountsPerRotation): '''Computes several parameters describing the fly scan motion. Computes the spacing between points, ensuring it is an integer number of encoder counts. Uses this spacing to recalculate the end of the scan, if necessary. Computes the taxi distance at the beginning and end of scan to allow the stage to accelerate to speed. Assign the fly scan angular position to theta[] ''' overall_sense, user_direction = _compute_senses() # Get the distance needed for acceleration = 1/2 a t^2 = 1/2 * v * t motor_accl_time = 3 #float(self.epics_pvs['RotationAccelTime'].get()) # Acceleration time in s accel_dist = motor_accl_time / 2.0 * 100 #float(self.motor_speed) # Compute the actual delta to keep each interval an integer number of encoder counts encoder_multiply = PSOCountsPerRotation / 360. #float(self.epics_pvs['PSOCountsPerRotation'].get()) / 360. raw_delta_encoder_counts = rotation_step * encoder_multiply delta_encoder_counts = round(raw_delta_encoder_counts) if abs(raw_delta_encoder_counts - delta_encoder_counts) > 1e-4: log.warning( ' *** *** *** Requested scan would have used a non-integer number of encoder counts.' ) log.warning( ' *** *** *** Calculated # of encoder counts per step = {0:9.4f}'. format(raw_delta_encoder_counts)) log.warning( ' *** *** *** Instead, using {0:d}'.format(delta_encoder_counts)) # self.epics_pvs['PSOEncoderCountsPerStep'].put(delta_encoder_counts) # Change the rotation step Python variable and PV rotation_step = delta_encoder_counts / encoder_multiply # self.epics_pvs['RotationStep'].put(self.rotation_step) # Make taxi distance an integer number of measurement deltas >= accel distance # Add 1/2 of a delta to ensure that we are really up to speed. taxi_dist = (math.ceil(accel_dist / rotation_step) + 0.5) * rotation_step # self.epics_pvs['PSOStartTaxi'].put(self.rotation_start - taxi_dist * user_direction) # self.epics_pvs['PSOEndTaxi'].put(self.rotation_stop + taxi_dist * user_direction) #Where will the last point actually be? rotation_stop = (rotation_start + (num_angles - 1) * rotation_step * user_direction) # Assign the fly scan angular position to theta[] theta = rotation_start + np.arange( num_angles) * rotation_step * user_direction print(theta)
def __init__(self): f = Figlet(font='big') print(f.renderText('Client')) cfg.RUNCFG["verbose"] = args.verbose if args.verbose == 1: log.warning("Log verbosity is enabled.") elif args.verbose == 2: log.warning("HIGH verbosity is enabled!") else: log.warning("Only regular information will be shown.") self.__client = AuctionClient(args.clientnumber) self.mainLoop()
def run(self): while self.running: try: event = self.queue.get(block=True, timeout=2) info(event) if 'start' in event: phone_number = event['start'] if CallTask.stopped_phone_numbers.exist(phone_number): CallTask.stopped_phone_numbers.remove(phone_number) self.queue.put_nowait({'call': phone_number}) # 呼叫事件 elif 'call' in event: phone_number = event['call'] warning("XXXXXXXXX, phone num: %s".format(phone_number)) warning(CallTask.stopped_phone_numbers.str()) warning("XXXXXXXXX") if not CallTask.stopped_phone_numbers.exist(phone_number): call = self.callmgr.call(self.__number_mgr.get(), phone_number) debug('make a call, %s', str(call)) # 挂断事件 elif 'hangup' in event: call_sid = event['hangup'] self.callmgr.hangup(call_sid) # twilio回调状态事件 elif 'status' in event: status, call_sid, called = event['status'] self.__handle_status(status, call_sid, called) elif 'stop' in event: """ 停止呼叫事件,将停止号码加到停止号码集合,呼叫事件中检查 并忽略 """ phone_number = event['stop'] CallTask.stopped_phone_numbers.add(phone_number) self.queue.task_done() except queue.Empty: pass finally: pass
def addBoundMesh(self, mesh, vertexToBoneMapping): if mesh.name in self.getBoundMeshes(): log.warning("Replacing bound mesh with same name %s" % mesh.name) m, _ = self.getBoundMesh(mesh.name) if m == mesh: log.warning("Attempt to add the same bound mesh %s twice" % mesh.name) self.removeBoundMesh(mesh.name) if vertexToBoneMapping and mesh.getVertexCount() != vertexToBoneMapping.vertexCount: log.warning('Vertex count of bound mesh %s (%s) and vertex its weights (%s) differs, this might cause errors when skinning.', mesh.name, mesh.getVertexCount(), vertexToBoneMapping.vertexCount) # allows multiple meshes (also to allow to animate one model consisting of multiple meshes) originalMeshCoords = np.zeros((mesh.getVertexCount(),4), np.float32) originalMeshCoords[:,:3] = mesh.coord[:,:3] originalMeshCoords[:,3] = 1.0 self.__originalMeshCoords.append(originalMeshCoords) self.__vertexToBoneMaps.append(vertexToBoneMapping) self.__meshes.append(mesh)
def update(patt, db, num_jitters, encoding_model, max_size, out_size): encoder = faceencoder.FaceEncoder(encoding_model=encoding_model, num_jitters=num_jitters, align=True) # TODO: add skip encoding loading or something like it files_faces = list(db.get_all()[1]) encodings, names, filenames = patt.encodings() for patt_fname, enc in zip(filenames, encodings): fname, box = get_from_db(files_faces, db, patt_fname) if fname is None: log.warning(f'Not found in db: {patt_fname}') continue log.debug(f'Found in db file: {fname} {box}') try: image = tools.read_image(fname, max_size) except Exception as ex: log.warning(f'Cant' 't read image: {fname}: ' + str(ex)) continue try: encodings, landmarks = encoder.encode(image, (box, )) if not tools.test_landmarks(landmarks[0]): log.warning(f'bad face detected in {patt_fname}') continue enc = { 'box': box, 'encoding': encodings[0], 'frame': 0, 'landmarks': landmarks[0] } tools.save_face(patt_fname, image, enc, out_size, fname) log.info(f'Updated: {patt_fname}') except Exception as ex: log.exception(f'Failed: {patt_fname}')
def main(): home = os.path.expanduser("~") logs_home = home + '/logs/' # make sure logs directory exists if not os.path.exists(logs_home): os.makedirs(logs_home) lfname = logs_home + 'viktor_' + datetime.strftime(datetime.now(), "%Y-%m-%d_%H:%M:%S") + '.log' log.setup_custom_logger(lfname) init_general_PVs(global_PVs, variableDict) try: while True: h5fname = global_PVs['HDF1_FullFileName_RBV'].get() h5fname_str = "".join([chr(item) for item in h5fname]) temp = global_PVs['Temperature'].get() pressure = global_PVs['Voltage'].get()*1500/4.8434 # to calibrate log.warning('Temperature: %4.4f C; Pressure: %4.4f psi: %s' % (temp, pressure, h5fname_str)) time.sleep(5) except KeyboardInterrupt: log.warning('interrupted!') log.warning('Log information saved at: %s', lfname)
def execute_imports(build_cfg): '''performs the xmake IMPORT phase (imports are defined in <cfgdir>/import.ais and resolved using the Artifact Importer)''' mkdirs(build_cfg.import_dir()) if not build_cfg.do_import(): log.info( "importing was skipped, because the according option '-i' was not set\n" ) return absent_import_scripts = filter(lambda (x): not is_existing_file(x), build_cfg.import_scripts()) import_scripts = filter(lambda (x): is_existing_file(x), build_cfg.import_scripts()) if len(import_scripts) == 0: log.info('no standard import') else: log.info('standard import scripts: ' + str(import_scripts)) #add explicit import targets from build plugin tool_import_script = _create_tool_import_script(build_cfg) if tool_import_script is not None: log.info('adding tool import script ' + tool_import_script) import_scripts.insert(0, tool_import_script) if not len(absent_import_scripts) == 0: log.warning( 'importing was switched on, but the following import mapping scripts were not found:' ) log.warning(', '.join(build_cfg.import_scripts())) if len(import_scripts) == 0: return #run artifact importer log.info("performing import...") log.info('import scripts: ' + str(import_scripts)) ai_args = prepare_ai_command(build_cfg, { 'default': build_cfg.import_dir(), 'tools': build_cfg.import_tools_dir() }, build_cfg.import_repos(), '.tmp') if not build_cfg.suppress_variant_handling(): def add_variant_coord(k): ai_args.extend(['-Dbuild' + k.capitalize() + '=' + vcoords[k] ]) # why different from export script variables??? ai_args.extend(['-Dbuild' + k + '=' + vcoords[k]]) vcoords = build_cfg.variant_coords() if vcoords != None and len(vcoords) != 0: map(add_variant_coord, vcoords.keys()) else: log.error("using variant coordinate system (" + build_cfg.variant_cosy_gav() + ") requires coordinates/variant options") raise XmakeException("using variant coordinate system (" + build_cfg.variant_cosy_gav() + ") requires coordinates/variant options") #add custom import config if present bs = build_cfg.build_script() for (name, value) in bs.import_roots().items(): ai_args.extend(['-C', 'root.' + name + '=' + value]) for (name, value) in bs.import_variables().items(): ai_args.extend(['-D', name + '=' + value]) assert_import_file(build_cfg) for script in import_scripts: execute_ai(build_cfg, ai_args, script, "") update_import_file(build_cfg, '.tmp') _setup_global_settings_xml(build_cfg)
async def on_member_remove(self, member: discord.Member): for func in self._member_remove_handlers: try: await func(member) except Exception: log.warning("Ignoring exception in member_leave coroutine (see stack trace below)", include_exception=True)
async def on_reaction_remove(self, reaction: discord.Reaction, source: Union[discord.User, discord.Member]): for func in self._reaction_remove_handlers: try: await func(reaction, source) except Exception: log.warning("Ignoring exception in reaction_remove coroutine (see stack trace below)", include_exception=True)
def loadBinaryProxy(path, human, type): log.debug("Loading binary proxy %s.", path) npzfile = np.load(path) #if type is None: # proxyType = npzfile['proxyType'].tostring() #else: proxyType = type proxy = Proxy(path, proxyType, human) proxy.name = npzfile['name'].tostring() proxy.uuid = npzfile['uuid'].tostring() proxy.basemesh = npzfile['basemesh'].tostring() if 'description' in npzfile: proxy.description = npzfile['description'].tostring() if 'version' in npzfile: proxy.version = int(npzfile['version']) if 'lic_str' in npzfile and 'lic_idx' in npzfile: proxy.license.fromNumpyString(npzfile['lic_str'], npzfile['lic_idx']) proxy.tags = set(_unpackStringList(npzfile['tags_str'], npzfile['tags_idx'])) if 'z_depth' in npzfile: proxy.z_depth = int(npzfile['z_depth']) if 'max_pole' in npzfile: proxy.max_pole = int(npzfile['max_pole']) if 'special_pose_str' in npzfile: special_poses = _unpackStringList(npzfile['special_pose_str'], npzfile['special_pose_idx']) for idx in range(0, len(special_poses), 2): proxy.special_pose[special_poses[idx]] = special_poses[idx+1] num_refverts = int(npzfile['num_refverts']) if num_refverts > 1: # 3 or 4 proxy.ref_vIdxs = npzfile['ref_vIdxs'] proxy.weights = npzfile['weights'] if 'offsets' in npzfile: proxy.offsets = npzfile['offsets'] else: if proxy.new_fitting: proxy.offsets = None else: proxy.offsets = np.zeros((num_refs,3), dtype=np.float32) else: # 1 refvert num_refs = npzfile['ref_vIdxs'].shape[0] proxy.ref_vIdxs = np.zeros((num_refs,3), dtype=np.uint32) proxy.ref_vIdxs[:,0] = npzfile['ref_vIdxs'] proxy.offsets = np.zeros((num_refs,3), dtype=np.float32) proxy.weights = np.zeros((num_refs,3), dtype=np.float32) proxy.weights[:,0] = npzfile['weights'] if "deleteVerts" in npzfile: proxy.deleteVerts = npzfile['deleteVerts'] # Reconstruct reverse vertex (and weights) mapping proxy._reloadReverseMapping() if proxy.new_fitting: # Create alias proxy.deltas = proxy.weights # TODO we could skip this for new-style proxies proxy.tmatrix.fromNumpyStruct(npzfile) proxy.uvLayers = {} for uvIdx, uvName in enumerate(_unpackStringList(npzfile['uvLayers_str'], npzfile['uvLayers_idx'])): proxy.uvLayers[uvIdx] = uvName proxy.material = material.Material(proxy.name) if 'material_file' in npzfile: proxy._material_file = npzfile['material_file'].tostring() if proxy.material_file: proxy.material.fromFile(proxy.material_file) proxy._obj_file = npzfile['obj_file'].tostring() if 'vertexBoneWeights_file' in npzfile: proxy._vertexBoneWeights_file = npzfile['vertexBoneWeights_file'].tostring() if proxy.vertexBoneWeights_file: from animation import VertexBoneWeights proxy.vertexBoneWeights = VertexBoneWeights.fromFile(proxy.vertexBoneWeights_file) if proxy.z_depth == -1: log.warning('Proxy file %s does not specify a Z depth. Using 50.', path) proxy.z_depth = 50 return proxy