def show_faces(self): trace(6, 'show faces on ' + self.name) img = self.im.copy() # Draw a rectangle around the faces for (x, y, w, h) in self.faces: cv2.rectangle(img, (x, y), (x + w, y + h), (40, 255, 60), 5) ## and eyes for (x, y, w, h) in self.eyes: cv2.rectangle(img, (x, y), (x + w, y + h), (255, 40, 0), 3) ## and glasses for (x, y, w, h) in self.glasses: cv2.rectangle(img, (x, y), (x + w, y + h), (0, 40, 246), 3) #for (x, y, w, h) in self.left_eyes: # cv2.rectangle(img, (x, y), (x + w, y + h), (245, 40, 90), 3) #for (x, y, w, h) in self.right_eyes: # cv2.rectangle(img, (x, y), (x + w, y + h), (0, 40, 246), 3) height, width, depth = img.shape max_h = 800.0 max_w = 1000.0 if height > max_h or width > max_w: scale = max_h / height if height > max_h else max_w / width #nw,nh= int(img.shape[1]*scale), int(img.shape[0]*scale) nw, nh = int(width * scale), int(height * scale) trace( 7, 'Scaling image from {0} to {1}'.format((width, height), (nw, nh))) img = cv2.resize(img, (nw, nh)) cv2.imshow("Faces found", img) cv2.waitKey(0)
def __init__(self, selectorkey, obj): self.selectorkey = selectorkey self.obj = obj try: imgScreen = ImageGrab.grab() imgScreen.save('screenshot.png') image = cv2.imread('screenshot.png') template = cv2.imread('images\\' + self.selectorkey + '.png') result = cv2.matchTemplate(image, template, cv2.TM_CCOEFF_NORMED) max_val = cv2.minMaxLoc(result)[1] pos = np.unravel_index(result.argmax(), result.shape) w1 = image.shape[1] h1 = image.shape[0] w2 = template.shape[1] h2 = template.shape[0] x1 = pos[1] y1 = pos[0] x2 = x1 + w2 y2 = y1 + h2 self.x = x1 self.y = y1 self.w = w2 self.h = h2 self.conf = max_val self.center = (int((x1 + x2) / 2), int((y1 + y2) / 2)) trace('conf:%d on (%d,%d)(%d,%d)' % (max_val, x1, y1, x2, y2)) finally: # delete screenshot if (os.path.isfile('screenshot.png')): pass
def forbidtoken(files, config_name): include_patterns = common.get_option('forbidtoken-hook.' + config_name, default=tr[config_name][2]).split() common.note('Checking for "' + config_name + '" tokens on ' + ', '.join(include_patterns) + ' files') abort = False token = tr[config_name][0] line_iter = lambda x: enumerate(re.finditer(".*\n", x, re.MULTILINE), 1) line_match = lambda test, x: (n for n, m in line_iter(x) if test(m.group())) count = 0 for f in files: if not any(f.fnmatch(p) for p in include_patterns): continue common.trace('Checking ' + str(f.path) + '...') content = f.contents if not common.binary(content) and token(content.decode()): if not abort: common.error(WARNING % (tr[config_name][1])) for n in line_match(token, content.decode()): common.error(FILEWARN % (f.path, n)) abort = True count += 1 if abort: common.error('Hook "' + config_name + '" failed.') common.note('%d file(s) checked.' % count) return abort
def filesize(files): abort = False limit = int(common.get_option('filesize-hook.max-size', default=1024**2)) check_all_files = common.get_option('filesize-hook.type', "all").strip().lower() != "binary" too_big_files = [] common.note('Checking files size...') count = 0 for f in files: check_file = check_all_files or common.binary(f.contents) if check_file: common.trace('Checking ' + str(f.path) + ' size...') count += 1 if f.size > limit: too_big_files.append(f) common.note('%d file(s) checked.' % count) if too_big_files: common.error(WARNING % limit) for f in too_big_files: common.error(FILEWARN % (f.path, f.size, limit)) abort = True return abort
def _setSlideshowCollection(self): common.debug("Starting slideshow collection") artistsArray = self.getArtistNames() artistsIndex = 0 for artist in artistsArray: if self._isPlaybackChanged(): common.debug( "Cancel slideshow collection due to the change of player content" ) break if artist is None or artist == '': continue artistsIndex += 1 common.debug("Collecting slideshow for artist [%s]" % artist) self.dir_cache = self._resdir(artist) common.trace("Cache directory for artist [%s]: %s" % (artist, self.dir_cache)) self._setSkinSlideshow(self.dir_show, self.dir_cache) if artistsIndex == 1: self._setSkinArtistBiografy(artist) self._setSkinArtistAlbumInfo(artist) self._setSkinArtistImages(artist) self._cache2show() self._setSkinSlideshow(self.dir_cache, self.dir_show) common.sleep() common.debug('Ended slideshow collection')
def AddFeaturesToFeatureLayer(self,url,pathToFeatureClass): fl = None try: fl = FeatureLayer( url=url, securityHandler=self._securityHandler) return fl.addFeatures(fc=pathToFeatureClass) except arcpy.ExecuteError: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "AddFeaturesToFeatureLayer", "line": line, "filename": filename, "synerror": synerror, "arcpyError": arcpy.GetMessages(2), } ) except: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "AddFeaturesToFeatureLayer", "line": line, "filename": filename, "synerror": synerror, } ) finally: fl = None del fl gc.collect()
def WriteFile(data, filename): if type(data).__name__ == 'unicode': data = data.encode('utf-8') elif isinstance(data, dict): data = json.dumps(data) try: thefile = xbmcvfs.File(filename, 'wb') except: thefile = open(filename, 'wb') try: thefile.write(data) thefile.close() common.trace('Successfully wrote data to file: %s' % filename, "utilities") return True except IOError as e: common.error( 'Unable to write data to [%s] file: %s' % (filename, str(e)), "utilities") return False except Exception as e: common.error( 'Unknown error while writing data to [%s] file: %s' % (filename, str(e)), "utilities") return False
def GetLayerFromFeatureServiceByURL(self,url,layerName="",returnURLOnly=False): fs = None try: fs = FeatureService( url=url, securityHandler=self._securityHandler) return self.GetLayerFromFeatureService(fs=fs,layerName=layerName,returnURLOnly=returnURLOnly) except arcpy.ExecuteError: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "GetLayerFromFeatureServiceByURL", "line": line, "filename": filename, "synerror": synerror, "arcpyError": arcpy.GetMessages(2), } ) except: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "GetLayerFromFeatureServiceByURL", "line": line, "filename": filename, "synerror": synerror, } ) finally: fs = None del fs gc.collect()
def DeleteFeaturesFromFeatureLayer(self,url,sql): fl = None try: fl = FeatureLayer( url=url, securityHandler=self._securityHandler,) return fl.deleteFeatures(where=sql) except arcpy.ExecuteError: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "DeleteFeaturesFromFeatureLayer", "line": line, "filename": filename, "synerror": synerror, "arcpyError": arcpy.GetMessages(2), } ) except: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "DeleteFeaturesFromFeatureLayer", "line": line, "filename": filename, "synerror": synerror, } ) finally: fl = None del fl gc.collect()
def __init__(self, parent, imageName, im): trace(8, 'creating Check from ' + imageName) self.parent = parent self.name = imageName self.im = im self.gray = cv2.cvtColor(self.im, cv2.COLOR_BGR2GRAY) # trace(8, im) # Detect faces in the image self.detect_faces() self.eyes = [] if len(self.faces) > 0: self.glasses = [] self.detect_eyes_in_faces() # search for glasses everywhere! self.detect_glasses() # TODO, for glasses, detect eyes!! self.detect_right_left_eyes() #self.right_eyes = self.parent.righteye.detectMultiScale( # self.gray, # scaleFactor=1.12, # minNeighbors=5, # minSize=(30, 30) #) #trace(4, e) trace(7, 'detect done for ' + self.name)
def setup(self): self._configurations() common.trace( 'Updating content providers list (instances and their configuration)' ) self.PROVIDERS.clear() self._providers() self._reload = False
def detect_glasses(self): self.glasses = self.parent.glasses_casc.detectMultiScale( self.gray, scaleFactor=1.12, minNeighbors=5, minSize=(30, 30)) trace( 6, 'glasses-detecting in {0}, whole-image found {2} glasses: {3} \n{4} ' .format(self.name, 'dummy', len(self.glasses), type(self.glasses), self.glasses))
def click(self, pos): trace('performing click @[%s, %s]' % (pos[0], pos[1])) cmd = ['adb', 'shell', 'input', 'tap'] cmd.extend(map(str, pos)) subprocess.call(cmd, stdout=subprocess.PIPE, cwd=configs['adb'], shell=True)
def addFile(self, filename): try: filename = filename.decode('UTF-8') except UnicodeDecodeError: filename = filename.decode('ISO-8859-2') # write the full remote path name of this file common.trace("Add file: " + filename, "FileManager") self.fileArray.append(filename)
def load_cascade(filename): try: casc = cv2.CascadeClassifier(filename) trace(7, 'successfully loaded cascade from ' + filename) return casc except: trace(1, 'Failed to load "' + filename + '"') raise
def GetLayerFromFeatureService(self,fs,layerName="",returnURLOnly=False): layers = None table = None layer = None sublayer = None try: layers = fs.layers for layer in layers: if layer.name == layerName: if returnURLOnly: return fs.url + '/' + str(layer.id) else: return layer elif not layer.subLayers is None: for sublayer in layer.subLayers: if sublayer == layerName: return sublayer for table in fs.tables: if table.name == layerName: if returnURLOnly: return fs.url + '/' + str(layer.id) else: return table return None except arcpy.ExecuteError: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "GetLayerFromFeatureService", "line": line, "filename": filename, "synerror": synerror, "arcpyError": arcpy.GetMessages(2), } ) except: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "GetLayerFromFeatureService", "line": line, "filename": filename, "synerror": synerror, } ) finally: layers = None table = None layer = None sublayer = None del layers del table del layer del sublayer gc.collect()
def parse_find_desc(div: ET.ElementTree) -> str: parent_div = div.getparent() parent_html = ET.tostring(parent_div, pretty_print="True") common.trace(8, 'parent ', parent_html) try: audio_episode_body = XmlHandler.find_element_attribute(parent_div, 'div', 'class', "audio-episode-body") p = XmlHandler.find_element_attribute(audio_episode_body, 'p', 'class', "*preamble") if p: return p.text_content().strip() except AttributeError: pass try: audio_audiobox_body = XmlHandler.find_element_attribute(parent_div, 'div', 'class', "audio-box-body") p = XmlHandler.find_element_attribute(audio_audiobox_body, 'p', 'class', "preamble") return p.text_content().strip() except AttributeError: pass try: episode_body = XmlHandler.find_element_attribute(div, 'div', 'class', "episode*-body") episode__content = XmlHandler.find_element_attribute(episode_body, 'div', 'class', "*episode__content") episode__body = XmlHandler.find_element_attribute(episode__content, 'div', 'class', "*episode__body") if episode__body is not None: p = XmlHandler.find_first_child(episode__body, 'p') el = p if p is not None else episode__body return el.text_content().strip() except AttributeError: pass try: ep_desc = XmlHandler.find_element_attribute(parent_div, 'div', 'class', "episode-list*item*description*" ) desc = ep_desc.text_content().strip() if len(desc) > 0: return desc p = XmlHandler.find_element_attribute(audio_audiobox_body, 'p', 'class', "text*") return p.text_content().strip() except AttributeError: pass try: ep_desc = XmlHandler.find_element_attribute(parent_div, 'div', 'class', 'latest-episode__preamble ltr' ) desc = ep_desc.text_content().strip() if len(desc) > 0: return desc p = XmlHandler.find_element_attribute(audio_audiobox_body, 'p', 'class', "text*") return p.text_content().strip() except AttributeError: pass if parent_div.tag == 'div' and parent_div.attrib['class'] == "episode-list-item__content": return parse_find_desc(parent_div.getparent()) return None
def transform(self, atom_thing): trace(8, 'transforming atom_thing: ', str(type(atom_thing))) trace(8, 'transforming atom_thing: ', dir(atom_thing)) #if True or isinstance(atom_thing, ET._Element): # atom_thing = atom_thing.getroottree() # trace(8, 'transfroming atom_thing: ', str(type(atom_thing))) # atom_thing = atom_thing.getroot() return self.transformer(atom_thing)
def typetext(self, txt): # Not support Unicode currently trace('performing typing') txt = txt.replace(' ', '%s') cmd = ['adb', 'shell', 'input', 'text'] cmd.append(txt) subprocess.call(cmd, stdout=subprocess.PIPE, cwd=configs['adb'], shell=True)
def _parse(self, content): if content is not None: common.trace('Parsing content: %s' % content) try: raw = content.replace('<br>', ' ') raw = raw.replace('"NA"', '""') output = json.loads(raw) except BaseException as be: common.error('Failed to parse weather data: %s' % be) output = {} return output else: return {}
def _setSkinArtistImages(self, artist): common.debug("Collecting images for artist: %s" % str(artist)) images = [] params = {} kontor = 0 params['lang'] = self.__BIOLANGUAGE params['artist'] = artist params['infodir'] = self.dir_cache params['exclusionsfile'] = os.path.join(self.dir_cache, "_exclusions.nfo") for key in self.PROVIDERS.keys(): if self._isPlaybackChanged(): common.debug( "Cancel collecting images due to the change of player content" ) return common.debug('Identifying images by provider: [%s]' % key) params['getall'] = common.setting(key + "_all") params['clientapikey'] = common.setting(key + "_apikey") content = self.PROVIDERS[key].getImageList(params) if content is not None and len(content) > 0: images.extend(content) if self._reload: self._reload = False common.debug( "Cancel collecting images due to the addon configuration update" ) break common.trace("Downloading images for artist [%s]" % str(artist)) _, cachefiles = xbmcvfs.listdir(self.dir_cache) for url in images: if self._isPlaybackChanged(): common.debug( "Cancel downloading images due to the change of player content" ) break common.trace('Checking image URL: %s' % url) cachepath = utilities.ItemHashWithPath( url, self.dir_cache) + utilities.ImageType(url) if os.path.split( cachepath )[1] not in cachefiles and not xbmc.abortRequested and not self._isPlaybackChanged( ): common.trace('Downloading image file: %s' % cachepath) urldata = common.urlcall(url, output='binary', certver=self.__SSLCHECK) success = utilities.WriteFile(urldata, cachepath) if urldata else False if success and xbmcvfs.Stat(cachepath).st_size() < 999: utilities.DeleteFile(cachepath) elif success: kontor += 1 if (kontor % 5 == 0) or (kontor == 1 and len([ f for f in cachefiles if os.path.splitext(f)[1] != ".nfo" ]) == 0): self._setSkinSlideshow(None, self.dir_cache) common.trace("Images setup is done")
def load_cascades(self): # Create the haar cascade #self.face_frontal_casc = EyeChecker.load_cascade(casc_file_frontal) #self.face_frontal_impr_casc = EyeChecker.load_cascade(casc_file_frontal_impr) #self.face_profile_lbp_casc = EyeChecker.load_cascade(casc_file_profile_lbp) self.glasses_casc = EyeChecker.load_cascade(casc_file_glasses) self.righteye = EyeChecker.load_cascade(casc_file_righteye) self.lefteye = EyeChecker.load_cascade(casc_file_lefteye) l = lambda filename: (filename, EyeChecker.load_cascade(filename)) self.face_cascades = map(l, casc_face_files) #[('frontal', self.face_frontal_casc), ('frontal_impr', self.face_frontal_impr_casc), ('profile', self.face_profile_lbp_casc)] trace(6, 'Cascades loaded')
def detect_right_left_eyes(self): self.right_eyes = self.parent.righteye.detectMultiScale( self.gray, scaleFactor=1.12, minNeighbors=5, minSize=(30, 30)) trace( 6, 'right-eye-detecting in {0}, whole-image found {2} eyes: {3} \n{4} ' .format(self.name, 'dummy', len(self.right_eyes), type(self.right_eyes), self.right_eyes)) self.left_eyes = self.parent.lefteye.detectMultiScale( self.gray, scaleFactor=1.12, minNeighbors=5, minSize=(30, 30)) trace( 6, 'left-eye-detecting in {0}, whole-image found {2} eyes: {3} \n{4} ' .format(self.name, 'dummy', len(self.left_eyes), type(self.left_eyes), self.left_eyes))
def exportToXML(self, outFolder=None, outName=None): """ Exports a metadata file (.xml) to a save location and a given name. To get the save changes, call the save() before running the function. Example: >>> fc = r"c:\temp\scratch.gdb\states" >>> pw = Paperwork(dataset=fc) >>> val = pw.convert() >>> val['metadata']['dataIdInfo']['searchKeys'] = {} >>> val['metadata']['dataIdInfo']['searchKeys']['keywords'] = ['states', 'USA'] >>> pw.save(d=val) >>> print pw.exportToXML(r"c:\temp\mymetadata", "system_shell.xml") Inputs: outFolder - optional - is the value provided is not given, then systems temp folder will be used. outName - optional - is the name of the xml file. This can be provided, or created by the system. The file create will be randomly generated. Output: path to xml file """ try: if outFolder is None: outFolder = tempfile.gettempdir() elif not outFolder is None: if os.path.isdir(outFolder) == False: os.makedirs(outFolder) else: outFolder = tempfile.gettempdir() if not outName is None: if outName.lower().endswith('.xml'): fullPath = os.path.join(outFolder, outName) else: fullPath = os.path.join(outFolder, outName + ".xml") else: from uuid import uuid4 fullPath = os.path.join(outFolder, uuid4().get_hex() + ".xml") d = self.convert() res = self._dictionary_to_metadata(d) writer = None with open(fullPath, 'wb') as writer: writer.write(res) writer.flush() writer.close() del writer return fullPath except: line, filename, synerror = trace() raise HermesErrorHandler( { "function": "exportToXML", "line": line, "filename": filename, "synerror": synerror, "arc" : synerror } )
def setSyncMethod(self, method="ALWAYS"): """ Automatically updates an ArcGIS item's metadata with the current properties of the item. For example, if the metadata describes the item as having one projection but the item's projection has changed since the last automatic update, the old projection information in the metadata will be replaced with the new projection information. By default, metadata is automatically updated when anyone who has write access to the ArcGIS item views its metadata. Metadata can also be synchronized by running this tool. The option to turn off synchronization when you view metadata doesn't affect how this tool operates. Inputs: method - The type of synchronization that will take place. ALWAYS -Properties of the source item are always added to or updated in its metadata. Metadata will be created if it doesn't already exist. This is the deault. ACCESSED -Properties of the source item are added to or updated in its metadata when it is accessed. Metadata will be created if it doesn't already exist. CREATED -Metadata will be created and properties of the source item will be added to it if the item doesn't already have metadata. NOT_CREATED -Properties of the source item are added to or updated in existing metadata. OVERWRITE -The same as "ALWAYS" except all information that can be recorded automatically in the metadata will be recorded. Any properties typed in by a person will be replaced with the item's actual properties. SELECTIVE -The same as "OVERWRITE" except the title and the content type will not be overwritten with default values for the item. Used when metadata is upgraded to the ArcGIS 10.x metadata format. Ouput: dataset path """ try: methods = ["ALWAYS", "CREATED", "NOT_CREATED", "OVERWRITE", "SELECTIVE", "ACCESSED"] if method.upper() in methods: arcpy.SynchronizeMetadata_conversion(source=self._dataset, synctype=method) return self.dataset else: raise Exception("Invalid method type: %s" % method) except: line, filename, synerror = trace() raise HermesErrorHandler( { "function": "setSyncMethod", "line": line, "filename": filename, "synerror": synerror, "arc" : str(arcpy.GetMessages(2)) } )
def backupFiles(self, fileList, source, dest): result = True common.debug( "Writing files to '" + dest.RootPath + "', Source is '" + source.RootPath + "'", "SystemRecovery") for aFile in fileList: common.trace('Writing file: ' + aFile, "SystemRecovery") if aFile.startswith("-"): dest.mkdir(dest.RootPath + aFile[len(source.RootPath) + 1:]) else: # copy using normal method wroteFile = dest.put( aFile, dest.RootPath + aFile[len(source.RootPath):]) # if result is still true but this file failed if not wroteFile and result: result = False return result
def __init__(self, eobj): self.obj = eobj line = str(eobj) trace(line) g = re.match( '.*EventType: (.+?);.*PackageName:(.+?);.*ClassName:(.+?);.*Text: \\[(.*?)\\];.*ContentDescription:(.*?).*;', str(self.obj)) self.eventtype = line[g.regs[1][0]:g.regs[1][1]] self.packagename = line[g.regs[2][0]:g.regs[2][1]] self.classname = line[g.regs[3][0]:g.regs[3][1]] self.text = line[g.regs[4][0]:g.regs[4][1]] if (self.text.startswith('[\\x')): self.text = self.text.replace('\\x', '') self.text = bytes.fromhex(self.text).decode('utf-8') self.contentdescription = line[g.regs[5][0]:g.regs[5][1]] print(self.eventtype, self.packagename, self.classname, self.text, self.contentdescription)
def GetFeatureService(self,itemId,returnURLOnly=False): admin = None item = None try: admin = arcrest.manageorg.Administration(securityHandler=self._securityHandler) if self._securityHandler.valid == False: self._valid = self._securityHandler.valid self._message = self._securityHandler.message return None item = admin.content.item(itemId=itemId) if item.itemType == "Feature Service": if returnURLOnly: return item.url else: return FeatureService( url=item.url, securityHandler=self._securityHandler) return None except arcpy.ExecuteError: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "GetFeatureService", "line": line, "filename": filename, "synerror": synerror, "arcpyError": arcpy.GetMessages(2), } ) except: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "GetFeatureService", "line": line, "filename": filename, "synerror": synerror, } ) finally: admin = None item = None del item del admin gc.collect()
def getImageList(self, params): common.trace("Starting to search images using parameters: %s" % str(params), "theaudiodb") images = [] self._setFilepaths(params) url, url_params = self._getUrlDetails(params, self.URL_ARTISTSEARCH) if url: json_data = self._getData(self.ARTISTFILEPATH, self.CACHEFILEPATH, url, url_params) if json_data: content = json_data.get('artists') if content is not None: if "strMusicBrainzID" in content[0]: params["mbid"] = content[0].get("strMusicBrainzID") if "strArtistFanart" in content[0]: image = content[0].get('strArtistFanart') if image: images.append(image) if "strArtistFanart2" in content[0]: image = content[0].get('strArtistFanart2') if image: images.append(image) if "strArtistFanart3" in content[0]: image = content[0].get('strArtistFanart3') if image: images.append(image) if "strArtistThumb" in content[0]: image = content[0].get('strArtistWideThumb') if image: images.append(image) if "strArtistWideThumb" in content[0]: image = content[0].get('strArtistWideThumb') if image: images.append(image) if "strArtistClearart" in content[0]: image = content[0].get('strArtistClearart') if image: images.append(image) if "strArtistAlternate" in content[0] and not common.isempty(content[0].get('strArtistAlternate')): params['fullname'] = content[0].get('strArtistAlternate') if "strArtist" in content[0] and not common.isempty(content[0].get('strArtist')): params['alias'] = content[0].get('strArtist') if "strCountryCode" in content[0] and not common.isempty(content[0].get('strCountryCode')): params['location'] = content[0].get('strCountryCode') if not images: return [] else: return self._delExclusions(images, params.get('exclusionsfile', ''))
def DeleteFile(filename): if xbmcvfs.exists(filename): try: xbmcvfs.delete(filename) common.trace('Deleting file: %s' % filename, "utilities") return True except IOError: common.error('Unable to delete file: %s' % filename, "utilities") return False except Exception as e: common.error( 'Unknown error while attempting to delete [%s] file path: %s' % (filename, e), "utilities") return False, else: common.trace('File does not exist: %s' % filename, "utilities") return False
def _setSkinArtistAlbumInfo(self, artist): common.debug("Collecting album information for artist: %s" % str(artist)) albums = [] params = {} params['infodir'] = self.dir_cache params['lang'] = self.__BIOLANGUAGE params['artist'] = artist for key in self.PROVIDERS.keys(): if self._isPlaybackChanged(): common.debug( "Cancel collecting album information due to the change of player content" ) break common.debug('Collecting album information from provider: [%s]' % key) params['getall'] = common.setting(key + "_all") params['clientapikey'] = common.setting(key + "_apikey") content = self.PROVIDERS[key].getAlbumList(params) if content is not None and len(content) > len(albums): common.debug( 'Stored album information from provider [%s], found up to %d albums' % (key, min(10, len(content)))) albums = content if self._reload: self._reload = False common.debug( "Cancel collecting album information due to the addon configuration update" ) break index = 0 for item in albums: index += 1 self.setSkinProperty("SlideshowAddon.%d.AlbumName" % index, item[0]) self.setSkinProperty("SlideshowAddon.%d.AlbumThumb" % index, item[1]) self.setSkinProperty("SlideshowAddon.%d.AlbumYear" % index, item[2]) self.setSkinProperty("SlideshowAddon.%d.AlbumGenre" % index, item[3]) if index >= 10: break self.setSkinProperty("SlideshowAddon.AlbumCount", str(index)) common.trace("Album information setup is done")
def check_xml(files): abort = False for f in files: if f.path.lower().endswith(('.xml', '.xsd')): content = f.contents common.trace('Checking ' + str(f.path) + ' syntax...') try: tree = xml_parser(content.decode()) msg = check_configurations(tree) if msg: common.error('XML parsing error in ' + f.path + ' :\n' + msg) abort = True except ET.ParseError as err: common.error('XML parsing error in ' + f.path + ' :\n' + err.msg + '\n') abort = True return abort
def dump(self): if (not DroidAdapter.isDumping): DroidAdapter.isDumping = True DroidAdapter.__locker.acquire() try: trace('dumping layout') p = subprocess.Popen( [self.adbPath + '\\adb', 'shell', 'uiautomator', 'dump'], shell=True) p.wait() p = subprocess.Popen([ self.adbPath + '\\adb', 'pull', '/sdcard/window_dump.xml', '.' ], shell=True, cwd='.') p.wait() finally: DroidAdapter.__locker.release() DroidAdapter.isDumping = False
def _rotateBackups(self): total_backups = common.setting('backup_rotation') if total_backups > 0: # get a list of valid backup folders dirs = self.listBackups() if len(dirs) > total_backups: # remove backups to equal total wanted remove_num = 0 # update the progress bar if it is available while remove_num < (len(dirs) - total_backups): common.trace("Removing backup " + dirs[remove_num][0], "SystemRecovery") if dirs[remove_num][0].split('.')[-1] == 'zip': # this is a file, remove it that way self.remoteFS.rmfile(self.remoteBasePath + dirs[remove_num][0]) else: self.remoteFS.rmdir(self.remoteBasePath + dirs[remove_num][0] + "/") remove_num += 1
def CheckPath(path, create=True): common.trace('Checking %s' % path, "utilities") if not xbmcvfs.exists(path): if create: common.trace('Not found and try to create it: %s' % path, "utilities") xbmcvfs.mkdirs(path) return True else: common.trace('Not found: %s' % path, "utilities") return False else: common.trace('Found: %s' % path, "utilities") return True
def detect_faces(self): faces_per_casc = {} self.faces = [] minSize = int(self.gray.shape[1] / 4) settings = {} settings.update(self.parent.detect_settings) settings['minSize'] = (minSize, minSize) trace( 5, 'Starting face-detection, settings ' + pprint.pformat(settings, indent=3, compact=True)) for name, casc in self.parent.face_cascades: faces = casc.detectMultiScale(self.gray, **settings) if len(faces) == 0: trace( 6, 'detecting {0}/{1} found NO faces'.format( self.name, name, len(faces))) else: trace( 6, 'detecting {0}/{1} found {2} faces\n{3}'.format( self.name, name, len(faces), faces)) faces_per_casc[name] = faces self.faces.extend(faces)
def detect_eyes_in_faces(self): for i in range(0, len(self.faces)): face = self.faces[i] (x, y, w, h) = face trace( 8, 'detecting {0} face {1} shape {2}'.format( self.name, i, face)) roi_gray = self.gray[y:y + h, x:x + w] roi_color = self.im[y:y + h, x:x + w] eyes = self.parent.glasses_casc.detectMultiScale( roi_gray, scaleFactor=1.12, minNeighbors=5, minSize=(30, 30)) trace( 6, 'glasses-detecting in {0}, face {1} found {2} eyes: {3} \n{4} ' .format(self.name, i, len(eyes), type(eyes), eyes)) if len(eyes) > 0: try: for (ex, ey, ew, eh) in eyes.tolist(): e2 = (ex + x, ey + y, ew, eh) self.eyes.append(e2) except: trace(1, 'Failed to lambda on \n' + str(eyes)) raise
def _setSkinArtistBiografy(self, artist): common.debug("Collecting biography for artist: %s" % str(artist)) biography = '' params = {} params['infodir'] = self.dir_cache params['lang'] = self.__BIOLANGUAGE params['artist'] = artist for key in self.PROVIDERS.keys(): if self._isPlaybackChanged(): common.debug( "Cancel collecting biography due to the change of player content" ) break common.trace('Collecting biography from provider: [%s]' % key) params['getall'] = common.setting(key + "_all") params['clientapikey'] = common.setting(key + "_apikey") content = self.PROVIDERS[key].getBiography(params) if content is not None and content and len(content) > len( biography): common.trace('Stored new biography from provider [%s]' % key) biography = content if self._reload: self._reload = False common.debug( "Cancel collecting biography due to the addon configuration update" ) break self.setSkinProperty("SlideshowAddon.Biography", biography) common.trace("Biography setup is done")
def ReadFile(filename): if xbmcvfs.exists(filename): try: thefile = xbmcvfs.File(filename, 'r') except: thefile = open(filename, 'r') try: data = thefile.read() thefile.close() except IOError: common.error('Unable to read data from file: %s' % filename, "utilities") return None except Exception as e: common.error( 'Unknown error while reading data from [%s] file: %s' % (filename, str(e)), "utilities") return None return data else: common.trace('File does not exist: %s' % filename, "utilities") return None
def __init__(self, bGetFromNet): if bGetFromNet: trace(6, 'Loading xsl from ', self.xsl_http_url) self.xslt = ET.parse(self.xsl_http_url) else: trace(6, 'Loading xsl from ', self.xsl_file_path) self.xslt = ET.parse(self.xsl_file_path) self.transformer = ET.XSLT(self.xslt) trace(8, 'xsl loaded and transformer created: ' , self.transformer)
def save_location(self): """returns the location where the xml file is saved""" try: if self._temp_workspace is None: self._temp_workspace = tempfile.gettempdir() return self._temp_workspace except: line, filename, synerror = trace() raise HermesErrorHandler( { "function": "save_location", "line": line, "filename": filename, "synerror": synerror, "arc" : str(arcpy.GetMessages(2)) } )
def xmlfile(self): """gets the temporary xml file path""" try: if self._temp_xml_file is None: self._setup() return self._temp_xml_file except: line, filename, synerror = trace() raise HermesErrorHandler( { "function": "xmlfile", "line": line, "filename": filename, "synerror": synerror, "arc" : str(arcpy.GetMessages(2)) } )
def datasetProperties(self): """ returns a collection of common dataset properties including information about the workspace. The return object is a dictionary {} """ try: validationWorkspace = os.path.dirname(self._dataset) desc = arcpy.Describe(self._dataset) descWrksp = arcpy.Describe(desc.path) database, owner, tableName = [i.strip() if i.strip() != "(null)" else "" \ for i in arcpy.ParseTableName(desc.name, validationWorkspace).split(",")] datasetType = desc.datasetType if hasattr(desc, "datasetType") else "" workspaceFactoryProgID = descWrksp.workspaceFactoryProgID if hasattr(descWrksp, "workspaceFactoryProgID") else "" workspaceType = descWrksp.workspaceType if hasattr(descWrksp, "workspaceType") else "" connectionString = descWrksp.connectionString if hasattr(descWrksp, "connectionString") else "" alias = desc.aliasName if hasattr(desc, "aliasName") else "" dataType = descWrksp.dataType if hasattr(descWrksp, "dataType") else "" return { "owner" : owner, "tableName" : tableName, "alias" : alias, "database" : database, "dataType" : dataType, "datasetType" : datasetType, "workspace" : { "type" : descWrksp.dataType, "path" : desc.path, "connectionString" : connectionString, "workspaceType" : workspaceType, "workspaceFactoryProgID" : workspaceFactoryProgID } } except: line, filename, synerror = trace() raise HermesErrorHandler( { "function": "datasetProperties", "line": line, "filename": filename, "synerror": synerror, "arc" : str(arcpy.GetMessages(2)) } )
def convert(self): """ converts an xml document to a dictionary """ try: if self._temp_xml_file is None: self._setup() tree = ET.XML(text=open(self._temp_xml_file, 'rb').read()) return self._metadata_to_dictionary(tree) except: line, filename, synerror = trace() raise HermesErrorHandler( { "function": "convert", "line": line, "filename": filename, "synerror": synerror, "arc" : str(arcpy.GetMessages(2)) } )
def save(self, d=None): """ commits the xml changes from the dictionary to the dataset If d is set to None, then dictionary from convert() will be used. Inputs: d - optional - either None or dictionary to be converted to metdata xml and applied to the dataset. Raises: HermesErrorHandler """ try: if d is None: d = self.convert() if isinstance(d, dict): res = self._dictionary_to_metadata(d) writer = None with open(self._temp_xml_file, 'wb') as writer: writer.write(res) writer.flush() writer.close() del writer arcpy.MetadataImporter_conversion (self._temp_xml_file, self._dataset) if os.path.isfile(self._temp_xml_file): os.remove(self._temp_xml_file) self._temp_xml_file = None self._temp_workspace = None self._xmlText = None return True else: raise Exception("Input must be of type dictionary") return False except: line, filename, synerror = trace() raise HermesErrorHandler( { "function": "save", "line": line, "filename": filename, "synerror": synerror, "arc" : synerror } )
def _setup(self): """creates a blank metadata file""" try: fd, filepath = tempfile.mkstemp(".xml", dir=self.save_location, text=True) self._temp_xml_file = filepath with os.fdopen(fd, "w") as f: f.write("<metadata />") f.close() del fd arcpy.MetadataImporter_conversion(self._dataset, filepath) except: line, filename, synerror = trace() raise HermesErrorHandler( { "function": "_setup", "line": line, "filename": filename, "synerror": synerror, "arc" : str(arcpy.GetMessages(2)) } )
def removeUserData(self,users=None): admin = None portal = None user = None adminusercontent = None userFolder = None userContent = None userItem = None folderContent = None try: admin = arcrest.manageorg.Administration(securityHandler=self._securityHandler) portal = admin.portals(portalId='self') if users is None: users = portal.users(start=1, num=100) if users: for user in users['users']: print user['username'] adminusercontent = admin.content.usercontent(username=user['username']) userContent = admin.content.getUserContent(username=user['username']) for userItem in userContent['items']: print adminusercontent.deleteItems(items=userItem['id']) if 'folders' in userContent: for userFolder in userContent['folders']: folderContent = admin.content.getUserContent(username=user['username'],folderId=userFolder['id']) for userItem in folderContent['items']: print adminusercontent.deleteItems(items=userItem['id']) print adminusercontent.deleteFolder(folderId=userItem['id']) except arcpy.ExecuteError: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "DeleteFeaturesFromFeatureLayer", "line": line, "filename": filename, "synerror": synerror, "arcpyError": arcpy.GetMessages(2), } ) except: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "DeleteFeaturesFromFeatureLayer", "line": line, "filename": filename, "synerror": synerror, } ) finally: admin = None portal = None user = None adminusercontent = None userFolder = None userContent = None userItem = None folderContent = None del admin del portal del user del adminusercontent del userFolder del userContent del userItem del folderContent gc.collect()
return get_root(element_thing.getroottree()) return element_thing class TestSrFetch(unittest.TestCase): pass if __name__ == '__main__': for a in sys.argv: if a.find('unittest') >= 0: sys.exit(unittest.main()) parser = argparse.ArgumentParser(description='My favorite argparser.') parser.add_argument('-l', '--tracelevel', help='Verbosity level 1 is important like error, 9 is unneeded debuginfo', default=4, type=int) parser.add_argument('--avsnitt', help='avsnitt', default=None, type=int, required=False) parser.add_argument('--progid', help='progid', default=None, type=int, required=False) parser.add_argument('--artikel', help='artikel', default=None, type=int, required=False) parser.add_argument('--url', help='use url rather than deduce from progid', default=None, required=False) r = parser.parse_args(None) common.tracelevel = r.tracelevel parser = SrProgramPageParser(common.tracelevel) parser.url = r.url episodes = parser.episodes() common.trace(3, 'SrProgramPageParser: result: ' , sorted(episodes, key=lambda ep: ep['avsnitt'])) common.trace(5, 'newest episode ', parser.timestamp)
def trace(self, level, *args): common.trace(level, 'SrProgramPageParser: ', args)
return get_root(element_thing.getroottree()) return element_thing class TestSrFetch(unittest.TestCase): pass #args 4430 12 if __name__ == '__main__': print(sys.argv) do_proxy = False for a in sys.argv: if a.find('unittest') >= 0: common.trace(4, 'Running sr_feed-unitttests') sys.exit(unittest.main(argv=['v'])) parser = argparse.ArgumentParser(description='My favorite argparser.') parser.add_argument('-l', '--tracelevel', help='Verbosity level 1 is important like error, 9 is unneeded debuginfo', default=4, type=int) parser.add_argument('--avsnitt', help='avsnitt', default=None, type=int, required=False) parser.add_argument('--progid', help='progid', default=4430, type=int, required=False) parser.add_argument('--artikel', help='artikel', default=None, type=int, required=False) parser.add_argument('--feed', help='Full feed url', default=None, required=False) parser.add_argument('--url', help='Full feed url', default=None, required=False) parser.add_argument('--source', help="Should parse rss or html. rss/html", default='rss', required=False) parser.add_argument('--format', help="rss/atom", default='rss', required=False) parser.add_argument('--proxy', help="if urls should to proxy data", default=False, required=False) r = parser.parse_args(None)
def trace(self, level, *args): common.trace(level, 'SrFeed: ', args)
def log(self, level, *args): common.trace(level, self.app_name, ': ', args)
def transform(self, page_dict_list, title, timestamp, description=None, logo_url=None, lang=None, copyright=None): ns = 'http://www.w3.org/2005/Atom' ns_xml = 'http://www.w3.org/XML/1998/namespace' nsmap = {None: ns, 'xml': ns_xml } nsxpath = {'a':ns, 'xml': ns_xml } def getfirst(el, xp): r = get(el, xp) if len(r) > 0: return r[0] else: return '' def get(el, xp): return el.xpath(xp, namespaces=nsxpath) rss_root = ET.Element('rss', version='2.0') rss_channel = ET.SubElement(rss_root, 'channel') rss_title = ET.SubElement(rss_channel, 'title') rss_title.text = title if description: ET.SubElement(rss_channel, 'description').text = description if timestamp: ET.SubElement(rss_channel, 'lastBuildDate').text = timestamp ET.SubElement(rss_channel, 'pubDate').text = timestamp if logo_url: rss_image = ET.SubElement(rss_channel, 'image') ET.SubElement(rss_image, 'url').text = logo_url ET.SubElement(rss_image, 'title').text = title ET.SubElement(rss_image, 'link').text = logo_url if not lang is None: ET.SubElement(rss_channel, 'language').text = lang if not copyright is None: ET.SubElement(rss_channel, 'copyright').text = copyright for episode_dict in page_dict_list: rss_item = ET.SubElement(rss_channel, 'item') #rss #<item> avsnitt_id = episode_dict['avsnitt'] guid = ET.SubElement(rss_item, 'guid') guid.set('isPermaLink', 'false') guid.text= avsnitt_id ET.SubElement(rss_item, 'title').text= episode_dict['title'] timestamp = episode_dict['timestamp'] ET.SubElement(rss_item, 'pubDate').text= email.utils.format_datetime(timestamp) ET.SubElement(rss_item, 'description').text = episode_dict.get('description', '') href_link = ET.SubElement(rss_item, 'link', type="text/html") href_link.text= self.text_url_formater(episode_dict['avsnitt']) trace(7, 'text href ', ET.tostring(href_link, pretty_print=True)) try: media_url = self.media_url_formater(episode_dict['avsnitt']) filename, file_ext = os.path.splitext(os.path.basename(urllib.parse.urlparse(media_url).path)) if file_ext is None or file_ext == '': file_ext = 'm4a' enclosure_link = ET.SubElement(rss_item, 'enclosure', type='audio/'+file_ext.strip('.'), url=media_url) trace(7, 'page2rss enclosure ', ET.tostring(enclosure_link, pretty_print=True)) ET.SubElement(rss_item, 'link').text = media_url except AttributeError as e: trace(1, 'atom_enclosure="', atom_enclosure, '"') raise return rss_root
def trace(level, *args): common.trace(level, 'sr_helpers: ', args)
def parse_find_title(root: ET.ElementTree) -> str: # Sample from 2018-09-22 # <div class="audio-heading__title"> # <a href="/avsnitt/1153306" data-clickable-content="link" class="heading" >Luftens dag!</a> # <div class="audio-heading__meta"> # <div class="audio-heading__meta-item"> # <abbr title="114 minuter">114 min</abbr> # </div> # <div class="audio-heading__meta-item"> # <span class="metadata-item-text">-</span> # </div> # <div class="audio-heading__meta-item"> # <span class="metadata-item-text">ons 19 sep kl 17:06</span> # </div> # </div> #</div> try: episode_body = XmlHandler.find_element_attribute(root, 'div', 'class', "audio-heading__title") episode_a_href = XmlHandler.find_element_attribute(episode_body, 'a', 'class', "heading") return episode_a_href.text_content().strip() except AttributeError: pass # <div class="audio-box-title"> try: audio_box_title = XmlHandler.find_element_attribute(root, 'div', 'class', "audio-box-title") title_span = XmlHandler.find_element_attribute(audio_box_title, 'span', 'class', "responsive-audio-box-title") return title_span.text_content().strip() except AttributeError: pass # <div class="audio-episode-title audio-info"> try: audio_episode_title = XmlHandler.find_element_attribute(root, 'div', 'class', "audio-episode-title audio-info") title_span = XmlHandler.find_element_attribute(audio_episode_title, 'span', 'class', "header2") return title_span.text_content().strip() except AttributeError: pass # <div class="latest-episode__playimage"> try: episode_body = XmlHandler.find_element_attribute(root, 'div', 'class', "episode*-body") episode__content = XmlHandler.find_element_attribute(episode_body, 'div', 'class', "*episode__content") title_span = XmlHandler.find_element_attribute(episode__content, 'span', 'class', "screen-reader-description") return title_span.text_content().strip() except AttributeError: pass try: episode_body = XmlHandler.find_element_attribute(root, 'div', 'class', "episode-list__item__title") episode_a_href = XmlHandler.find_element_attribute(episode_body, 'a', 'class', "heading heading--small") return episode_a_href.text_content().strip() except AttributeError: pass # <svg class="play-icon-play-pause" (which is inside a-href-tag) tag = XmlHandler.find_element_attribute(root, 'svg', 'class', "play-icon-play-pause") while tag: if tag.tag == 'a': return tag.attrib.get('aria-label') common.trace(8, 'Failed to find a title in div \n', ET.tostring(root, pretty_print=True)) return None
def my_fnmatch(val, match): try: return fnmatch(val, match) except TypeError as ex: trace(1, "fnmatch failed \n val:", str(val), "\n match: ", match, "\n Exception ", ex) return False
while (abs(d3) > -SIG * d0 or f3 > f0 + w3 * RHO * d0) and M > 0: if d3 > 0 or f3 > f0 + w3 * RHO * d0: w4, f4, d4 = w3, f3, d3 else: w2, f2, d2 = w3, f3, d3 try: if f4 > f0: w3 = w2 - (0.5 * d2 * (w4 - w2) ** 2) / (f4 - f2 - d2 * (w4 - w2)) else: A = 6 * (f2 - f4) / (w4 - w2) + 3 * (d4 + d2) B = 3 * (f4 - f2) - (2 * d2 + d4) * (w4 - w2) w3 = w2 + (sqrt(B * B - A * d2 * (w4 - w2) ** 2) - B) / A except Exception, e: print >> sys.stderr, 'Exception = %s' % e trace() w3 = float('NaN') if isnan(w3) or isinf(w3): w3 = (w2 + w4) / 2 w3 = max(min(w3, w4 - INT * (w4 - w2)), w2 + INT * (w4 - w2)) f3, df3 = f(w + w3 * s, **argc) fval.append(f3) if f3 < F0: w0, F0, dF0 = w + w3 * s, f3, df3 M -= 1 I += 1 print >> sys.stderr, 'Iter = %4.4i Cost = %lf' % (I, f3) d3 = float(df3.T * s) if abs(d3) < -SIG * d0 and f3 < f0 + w3 * RHO * d0: w, f0 = w + w3 * s, f3
def removeUserGroups(self,users=None): admin = None userCommunity = None portal = None groupAdmin = None user = None userCommData = None group = None try: admin = arcrest.manageorg.Administration(securityHandler=self._securityHandler) userCommunity = admin.community portal = admin.portals(portalId='self') if users is None: users = portal.users(start=1, num=100) groupAdmin = userCommunity.groups if users: for user in users['users']: print "Loading groups for user: %s" % user['username'] userCommData = userCommunity.getUserCommunity(username=user['username']) if 'groups' in userCommData: if len(userCommData['groups']) == 0: print "No Groups Found" else: for group in userCommData['groups']: if group['owner'] == user['username']: print groupAdmin.deleteGroup(groupID=group['id']) else: print "No Groups Found" except arcpy.ExecuteError: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "DeleteFeaturesFromFeatureLayer", "line": line, "filename": filename, "synerror": synerror, "arcpyError": arcpy.GetMessages(2), } ) except: line, filename, synerror = Common.trace() raise ArcRestHelperError({ "function": "DeleteFeaturesFromFeatureLayer", "line": line, "filename": filename, "synerror": synerror, } ) finally: admin = None userCommunity = None portal = None groupAdmin = None user = None userCommData = None group = None del admin del userCommunity del portal del groupAdmin del user del userCommData del group gc.collect()
def CG(f, w, max_fc, **argc): ''' conjugate gradient routine for optimization ''' INT = 0.1 EXT = 3.0 MAX = 20 RATIO = 10 SIG = 0.1 RHO = SIG / 2 red = 1.0 # if length > 0: S = 'Linesearch' # else: S = 'Function evaluation' I = J = 1 LS_failed = False f0, df0 = f(w, **argc) fval = [f0] print >> sys.stderr, 'Iter = %4.4i Cost = %lf' % (I, f0) s = -df0 d0 = float(-s.T * s) w3 = red / (1.0 - d0) while I < max_fc: w0, F0, dF0 = w, f0, df0 M = min(MAX, max_fc - I) while True: w2, f2, d2, f3, df3 = 0, f0, d0, f0, df0 success = False while not success and M > 0: try: M -= 1 I += 1 f3, df3 = f(w + w3 * s, **argc) fval.append(f3) print >> sys.stderr, 'Iter = %4.4i Cost = %lf' % (I, f3) if isnan(f3) or isinf(f3) or np.any(np.isnan(df3) + np.isinf(df3)): raise NameError, ('error') success = True except Exception, e: print >> sys.stderr, 'Exception = %s' % e trace() w3 = (w2 + w3) / 2.0 if f3 < F0: w0, F0, dF0 = w + w3 * s, f3, df3 d3 = float(df3.T * s) if d3 > SIG * d0 or f3 > f0 + w3 * RHO * d0 or M == 0: break w1, f1, d1 = w2, f2, d2 w2, f2, d2 = w3, f3, d3 A = 6 * (f1 - f2) + 3 * (d2 + d1) * (w2 - w1) B = 3 * (f2 - f1) - (2 * d1 + d2) * (w2 - w1) # if not isinstance(w3, float) or isnan(w3) or isinf(w3) or w3 < 0: w3 = w2 * EXT try: w3 = w1 - d1 * (w2 - w1) ** 2 / (B + sqrt(B * B - A * d1 * (w2 - w1))) # add sth except Exception, e: print >> sys.stderr, 'Exception = %s' % e trace() w3 = w2 * EXT continue if w3 < 0: w3 = w2 * EXT elif w3 > w2 * EXT: w3 = w2 * EXT elif w3 < w2 + INT * (w2 - w1): w3 = w2 + INT * (w2 - w1)
def trace(self, lvl, *args): common.trace(lvl, 'RssFromFiles: ', args)