def main(opts): # set up our channel conn_factory = ConnectionFactory() conn_factory.setUri(config['RABBITMQ_URI']) conn = conn_factory.newConnection() channel = conn.createChannel() channel.queueDeclare(opts.queue_name, False, False, False, None) channel.basicQos(1) # tells the channel we're only going to deliver one response before req acknowledgement workers = [PdfExtractor(channel, opts) for i in xrange(opts.workers)] log.info("creating pool with %d threads" % opts.workers) tpool = Executors.newFixedThreadPool(opts.workers) log.info("executing threads") futures = tpool.invokeAll(workers) log.info("shutting down thread pool") tpool.shutdown() try: if not tpool.awaitTermination(50, TimeUnit.SECONDS): log.info("thread pool not shutting down; trying again") tpool.shutdownNow() if not tpool.awaitTermination(50, TimeUnit.SECONDS): log.error("Pool did not terminate") except InterruptedException: log.info("exception during thread pool shutdown; trying again") tpool.shutdownNow() Thread.currentThread().interrupt()
def run_stitch(filePath): basePath,baseName = os.path.split(filePath) imagePrefix = os.path.splitext(baseName)[0] resizeName = basePath + "/" + baseName + "_tiles/resized/" checkName = basePath + "/" + baseName + "_tiles/resized/" + imagePrefix + "_seq/" activeName = basePath + "/" + baseName + "_tiles" IJ.log("Stitching " + filePath + "...") if (not os.path.isdir(checkName)): thread = Thread.currentThread() originalThread = thread.getName() thread.setName("Run$_stitch_process") options = "choose=" + resizeName + " select=" + activeName + "/tile_info.txt image_prefix=" + imagePrefix Macro.setOptions(Thread.currentThread(),options) try: IJ.run("Stitch wrapper",options) IJ.log("succeeded") returnVal = 0 except: IJ.log("failed") os.rmdir(checkName) returnVal = 1 thread.setName(originalThread) Macro.setOptions(thread,None) else: IJ.log("skipped") returnVal = 2 return returnVal
def setUpClass(self): rg = ResourceGroupCi() self.resource_group = rg.resourceName self.client = AzureClient.new_instance(rg.subscription) webjob_ci = TriggeredWebJobCi() webjob_ci.container = rg webjob_ci.file = File(Thread.currentThread().getContextClassLoader().getResource("webjob.zip").toURI()) continuous_webjob_ci = ContinuousWebJobCi() continuous_webjob_ci.container = rg continuous_webjob_ci.file = File(Thread.currentThread().getContextClassLoader().getResource("continuous_webjob.zip").toURI()) webapp_ci = WebAppCi() webapp_ci.appSettings = {} webapp_ci.customConnectionStrings = {} webapp_ci.sqlServerConnectionStrings = {} webapp_ci.sqlDatabaseConnectionStrings = {} self.client.create_resource_group(rg.resourceName, rg.resourceLocation) define_app_service_plan.create_or_update(AppServicePlanCi(), rg) define_web_app.create_or_update(webapp_ci, rg) self.client.wait_for_kudu_services(webjob_ci.appName) self.webjob = webjob_ci.webJobName self.site_name = webjob_ci.appName self.webjob_ci = webjob_ci self.continuous_webjob_ci = continuous_webjob_ci self.continuous_webjob = continuous_webjob_ci.webJobName
def tick(self, framerate=0): """ Call once per program cycle, returns ms since last call. An optional framerate will add pause to limit rate. """ while self._repaint_sync.get(): try: self._thread.sleep(1) except InterruptedException: Thread.currentThread().interrupt() break self._time = System.nanoTime() // 1000000 if framerate: time_pause = ((1000 // framerate) - (self._time - self._time_init)) if time_pause > 0: try: self._thread.sleep(time_pause) except InterruptedException: Thread.currentThread().interrupt() self._time = System.nanoTime() // 1000000 if self._pos: self._pos -= 1 else: self._pos = 9 self._time_diff[self._pos] = self._time - self._time_init self._time_init = self._time return self._time_diff[self._pos]
def tick(self, framerate=0): """ Call once per program cycle, returns ms since last call. An optional framerate will add pause to limit rate. """ while self._repaint_sync.get(): try: self._thread.sleep(1) except InterruptedException: Thread.currentThread().interrupt() break self._time = System.nanoTime()//1000000 if framerate: time_pause = (1000//framerate) - (self._time-self._time_init) if time_pause > 0: try: self._thread.sleep(time_pause) except InterruptedException: Thread.currentThread().interrupt() self._time = System.nanoTime()//1000000 if self._pos: self._pos -= 1 else: self._pos = 9 self._time_diff[self._pos] = self._time-self._time_init self._time_init = self._time return self._time_diff[self._pos]
def __init__(self): """ Initialize tests """ Logger.getLogger("net.spy.memcached").setLevel(Level.DEBUG); self.clients = [] if USE_GLOBAL_CLIENT: # use global client self.client = global_client else: cfb = ConnectionFactoryBuilder() self.client = ArcusClient.createArcusClient(arcus_cloud, service_code, cfb) print 'Wait for per-thread client to be connected to Arcus cloud (%d seconds)' % DEFAULT_CONNECTION_WAIT Thread.currentThread().sleep(DEFAULT_CONNECTION_WAIT * 1000) self.flush_counter = 0 self.tests = [] # insert operations self.tests.append(Test(1, "KeyValue").wrap(self.KeyValue)) self.tests.append(Test(2, "Collection_Btree").wrap(self.Collection_Btree)) self.tests.append(Test(3, "Collection_Set").wrap(self.Collection_Set)) self.tests.append(Test(4, "Collection_List").wrap(self.Collection_List))
def __init__(self): """ Initialize tests """ Logger.getLogger("net.spy.memcached").setLevel(Level.DEBUG) self.clients = [] if USE_GLOBAL_CLIENT: # use global client self.client = global_client else: cfb = ConnectionFactoryBuilder() self.client = ArcusClient.createArcusClient( arcus_cloud, service_code, cfb) print 'Wait for per-thread client to be connected to Arcus cloud (%d seconds)' % DEFAULT_CONNECTION_WAIT Thread.currentThread().sleep(DEFAULT_CONNECTION_WAIT * 1000) self.flush_counter = 0 self.tests = [] # insert operations self.tests.append(Test(1, "KeyValue").wrap(self.KeyValue)) self.tests.append( Test(2, "Collection_Btree").wrap(self.Collection_Btree)) self.tests.append(Test(3, "Collection_Set").wrap(self.Collection_Set)) self.tests.append( Test(4, "Collection_List").wrap(self.Collection_List))
def main(opts): # set up our channel conn_factory = ConnectionFactory() conn_factory.setUri(config['RABBITMQ_URI']) conn = conn_factory.newConnection() channel = conn.createChannel() channel.queueDeclare(opts.queue_name, False, False, False, None) channel.basicQos(1); # tells the channel we're only going to deliver one response before req acknowledgement workers = [PdfExtractor(channel, opts) for i in xrange(opts.workers)] log.info("creating pool with %d threads" % opts.workers) tpool = Executors.newFixedThreadPool(opts.workers) log.info("executing threads") futures = tpool.invokeAll(workers) log.info("shutting down thread pool") tpool.shutdown() try: if not tpool.awaitTermination(50, TimeUnit.SECONDS): log.info("thread pool not shutting down; trying again") tpool.shutdownNow() if not tpool.awaitTermination(50, TimeUnit.SECONDS): log.error("Pool did not terminate") except InterruptedException: log.info("exception during thread pool shutdown; trying again") tpool.shutdownNow() Thread.currentThread().interrupt()
def run(self): while True: connection = httplib.HTTPConnection("sleepingbeauty.herokuapp.com") connection.request("GET", "/rough_movements/last_time.txt") response = connection.getresponse() if response.status == 200: self.arduino.send_rough_data(int(response.read())) JThread.currentThread().sleep(ROUGH_TIME);
def run(self): Thread.currentThread().setPriority(Thread.MAX_PRIORITY) firstTime = 1 while not self.stop_event.isSet(): if not firstTime: sys.stdout.write("\010" + self.maskChar) firstTime = 0 Thread.currentThread().sleep(1)
def add_url_post9(self, url): # in Java 9+ the system class loader is no longer implementing URLClassLoader, so this # trick with added url's does not work anymore for dynamic classes loading, so we define # our own class loader and set it as context class loader for current thread global sysloader if not sysloader: sysloader = Java9ClassLoader() Thread.currentThread().setContextClassLoader(sysloader) return self.add_url_with_url_classloader(sysloader, url)
def flip(self): """ Repaint display. """ self._rect_list = self._surface_rect try: SwingUtilities.invokeAndWait(self) except InterruptedException: Thread.currentThread().interrupt()
def setUp(self): self.orig_context = Thread.currentThread().contextClassLoader class AbstractLoader(ClassLoader): def __init__(self): ClassLoader.__init__(self) c = self.super__defineClass("ContextAbstract", CONTEXT_ABSTRACT, 0, len(CONTEXT_ABSTRACT), ClassLoader.protectionDomain) self.super__resolveClass(c) Thread.currentThread().contextClassLoader = AbstractLoader()
def run(self): me = Thread.currentThread( ); me.setPriority(Thread.MIN_PRIORITY); while self.thread == Thread.currentThread(): try: Thread.sleep(1) except InterruptedException: return self._draw()
def shutdown_and_await_termination(pool, timeout): pool.shutdown() try: if not pool.awaitTermination(timeout, TimeUnit.SECONDS): pool.shutdownNow() if not pool.awaitTermination(timeout, TimeUnit.SECONDS): print >> sys.stderr, "Pool did not terminate" except InterruptedException, ex: pool.shutdownNow() Thread.currentThread().interrupt()
def delay(self, time): """ **pyj2d.time.delay** Pause for given time (in ms). Return ms paused. """ start = System.nanoTime()//1000000 try: Thread.sleep(time) except InterruptedException: Thread.currentThread().interrupt() return (System.nanoTime()//1000000) - start
def delay(self, time): """ **pyj2d.time.delay** Pause for given time (in ms). Return ms paused. """ start = System.nanoTime() // 1000000 try: Thread.sleep(time) except InterruptedException: Thread.currentThread().interrupt() return (System.nanoTime() // 1000000) - start
def _fix_classloader_problems(self): # Get path to jython jar jython_jar = None for path in sys.path: if '.jar' in path and 'jython' in path.lower(): jython_jar = path[:path.index('.jar') + 4] if jython_jar is None: raise Exception("Could not locate jython jar in path!") classloader = URLClassLoader( [URL("file://" + jython_jar)], JavaThread.currentThread().getContextClassLoader()) JavaThread.currentThread().setContextClassLoader(classloader)
def run(self): while self._initialized: channel_active = [self._channels[id] for id in self._channel_pool if self._channels[id]._active] if not channel_active: try: self._thread.sleep(1) except InterruptedException: Thread.currentThread().interrupt() self.quit() continue if len(channel_active) > 1: for channel in channel_active: try: data, data_len, lvol, rvol = channel._get() except AttributeError: continue self._mixer.setAudioData(data, data_len, lvol, rvol) data_len = self._mixer.getAudioData(self._byteArray) if data_len > 0: try: self._mixer.write(self._byteArray, 0, data_len) except IllegalArgumentException: nonIntegralByte = data_len % self._audio_format.getFrameSize() if nonIntegralByte: data_len -= nonIntegralByte try: self._mixer.write(self._byteArray, 0, data_len) except (IllegalArgumentException, LineUnavailableException): pass except LineUnavailableException: pass else: try: data, data_len, lvol, rvol = channel_active[0]._get() except AttributeError: data_len = 0 if data_len > 0: if lvol < 1.0 or rvol < 1.0: data = self._mixer.processVolume(data, data_len, lvol, rvol) try: self._mixer.write(data, 0, data_len) except IllegalArgumentException: nonIntegralByte = data_len % self._audio_format.getFrameSize() if nonIntegralByte: data_len -= nonIntegralByte try: self._mixer.write(data, 0, data_len) except (IllegalArgumentException, LineUnavailableException): pass except LineUnavailableException: pass self._quit()
def run(self): if not self.arduino: self.connect_arduino() JThread.currentThread().sleep(START_TIME); self.serial_write("Time"+self.get_time()+"a") threads = [ JThread(RoughMovementThread(self)), JThread(ReallyRoughMovementThread(self)), JThread(LightSwitchThread(self)), JThread(ArduinoListenerThread(self)), JThread(BluetoothWaitThread(self)), ] for thread in threads: thread.start()
def update(self, rect_list=None): """ Repaint display. Optional rect or rect list to specify regions to repaint. """ if isinstance(rect_list, list): self._rect_list = rect_list elif rect_list: self._rect_list = [rect_list] else: self._rect_list = self._surface_rect try: SwingUtilities.invokeAndWait(self) except InterruptedException: Thread.currentThread().interrupt()
def run(self): while True: if self.arduino.block_light_switch: JThread.currentThread().sleep(LIGHT_POWER_TIME); continue connection = httplib.HTTPConnection("sleepingbeauty.herokuapp.com") connection.request("GET", "/light_power/last.txt") response = connection.getresponse() if response.status == 200: data = [int(x) for x in response.read().split(',')] if len(data) == 1: data.append(int(time.time()*1000)) print "weblight ", data self.arduino.switch_arduino_lights(*data) JThread.currentThread().sleep(LIGHT_POWER_TIME);
def setClassLoaderAndCheck(self, orig_jar, prefix, compile_path=''): # Create a new jar and compile prefer_compiled into it orig_jar = test_support.findfile(orig_jar) jar = os.path.join(self.temp_dir, os.path.basename(orig_jar)) shutil.copy(orig_jar, jar) code = os.path.join(self.temp_dir, 'prefer_compiled.py') fp = open(code, 'w') fp.write('compiled = True') fp.close() py_compile.compile(code) zip = zipfile.ZipFile(jar, 'a') zip.write( os.path.join(self.temp_dir, 'prefer_compiled$py.class'), os.path.join(compile_path, 'jar_pkg', 'prefer_compiled$py.class')) zip.close() Thread.currentThread( ).contextClassLoader = test_support.make_jar_classloader(jar) import flat_in_jar self.assertEquals(flat_in_jar.value, 7) import jar_pkg self.assertEquals(prefix + '/jar_pkg/__init__.py', jar_pkg.__file__) from jar_pkg import prefer_compiled self.assertEquals(prefix + '/jar_pkg/prefer_compiled$py.class', prefer_compiled.__file__) self.assert_(prefer_compiled.compiled) self.assertRaises(NameError, __import__, 'flat_bad') self.assertRaises(NameError, __import__, 'jar_pkg.bad')
def test_path_in_pyclasspath(self): jar = self.prepareJar('classimport_Lib.jar') compiled = self.compileToJar(jar, 'Lib') Thread.currentThread().contextClassLoader = test_support.make_jar_classloader(jar) with test_support.DirsOnSysPath(): sys.path = ['__pyclasspath__/Lib'] self.checkImports('__pyclasspath__/Lib', compiled)
def setClassLoaderAndCheck(self, orig_jar, prefix, compile_path=''): # Create a new jar and compile prefer_compiled into it orig_jar = test_support.findfile(orig_jar) jar = os.path.join(self.temp_dir, os.path.basename(orig_jar)) shutil.copy(orig_jar, jar) code = os.path.join(self.temp_dir, 'prefer_compiled.py') fp = open(code, 'w') fp.write('compiled = True') fp.close() py_compile.compile(code) zip = zipfile.ZipFile(jar, 'a') zip.write(os.path.join(self.temp_dir, 'prefer_compiled$py.class'), os.path.join(compile_path, 'jar_pkg', 'prefer_compiled$py.class')) zip.close() Thread.currentThread().contextClassLoader = test_support.make_jar_classloader(jar) import flat_in_jar self.assertEquals(flat_in_jar.value, 7) import jar_pkg self.assertEquals(prefix + '/jar_pkg/__init__.py', jar_pkg.__file__) from jar_pkg import prefer_compiled self.assertEquals(prefix + '/jar_pkg/prefer_compiled$py.class', prefer_compiled.__file__) self.assert_(prefer_compiled.compiled) self.assertRaises(NameError, __import__, 'flat_bad') self.assertRaises(NameError, __import__, 'jar_pkg.bad')
def tearDown(self): Thread.currentThread().contextClassLoader = self.orig_context sys.path = self.orig_path shutil.rmtree(self.temp_dir) for module in sys.modules.keys(): if module not in self.modules: del sys.modules[module]
def setUp(self): super(PyLuceneThreadTestCase, self).setUp() self.classLoader = Thread.currentThread().getContextClassLoader() writer = self.getWriter(analyzer=StandardAnalyzer()) doc1 = Document() doc2 = Document() doc3 = Document() doc4 = Document() doc1.add(Field("field", "one", TextField.TYPE_STORED)) doc2.add(Field("field", "two", TextField.TYPE_STORED)) doc3.add(Field("field", "three", TextField.TYPE_STORED)) doc4.add(Field("field", "one", TextField.TYPE_STORED)) writer.addDocument(doc1) writer.addDocument(doc2) writer.addDocument(doc3) writer.addDocument(doc4) writer.commit() writer.close() self.testData = [('one',2), ('two',1), ('three', 1), ('five', 0)] * 500 self.lock = threading.Lock() self.totalQueries = 0
def call(self): t = Thread.currentThread() if t.isInterrupted() or not t.isAlive(): return None t0 = System.nanoTime() r = self.fn(*self.args, **self.kwargs) return r, (System.nanoTime() - t0) / 1000000.0
def init(): cl = IJ.getClassLoader() if cl is not None: Thread.currentThread().setContextClassLoader(cl) print "init", cl ns = RT.var("clojure.core", "*ns*") warn_on_reflection = RT.var("clojure.core", "*warn-on-reflection*") unchecked_math = RT.var("clojure.core", "*unchecked-math*") compile_path = RT.var("clojure.core", "*compile-path*") Var.pushThreadBindings(ns, ns.get(), warn_on_reflection, warn_on_reflection.get(), unchecked_math, unchecked_math.get(), compile_path, "classes") in_ns = RT.var("clojure.core", "in-ns") refer = RT.var("clojure.core", "refer") in_ns.invoke(Symbol.intern("user")) refer.invoke(Symbol.intern("clojure.core"))
def setUp(self): super(PyLuceneThreadTestCase, self).setUp() self.classLoader = Thread.currentThread().getContextClassLoader() writer = self.getWriter(analyzer=StandardAnalyzer()) doc1 = Document() doc2 = Document() doc3 = Document() doc4 = Document() doc1.add(Field("field", "one", TextField.TYPE_STORED)) doc2.add(Field("field", "two", TextField.TYPE_STORED)) doc3.add(Field("field", "three", TextField.TYPE_STORED)) doc4.add(Field("field", "one", TextField.TYPE_STORED)) writer.addDocument(doc1) writer.addDocument(doc2) writer.addDocument(doc3) writer.addDocument(doc4) writer.commit() writer.close() self.testData = [('one', 2), ('two', 1), ('three', 1), ('five', 0)] * 500 self.lock = threading.Lock() self.totalQueries = 0
def test_pkgutil_get_data(self): # Test loader.get_data used via pkgutil jar = self.prepareJar('classimport.jar') name = self.addResourceToJar(jar) Thread.currentThread().contextClassLoader = test_support.make_jar_classloader(jar) data = pkgutil.get_data('jar_pkg', name) self.assertIsInstance(data, bytes) self.assertEqual(data, self.RESOURCE_DATA)
def test_loader_is_package(self): jar = self.prepareJar('classimport.jar') Thread.currentThread().contextClassLoader = test_support.make_jar_classloader(jar) mod_name = 'flat_in_jar' loader = pkgutil.get_loader(mod_name) self.assertFalse(loader.is_package(mod_name)) self.assertTrue(loader.is_package('jar_pkg')) self.assertFalse(loader.is_package('jar_pkg.prefer_compiled'))
def wait_until_interrupted(cv): name = threading.currentThread().getName() with cv: while not JThread.currentThread().isInterrupted(): try: cv.wait() except InterruptedException, e: break
def read_jar_stream(self, template_path): reader = BufferedReader(InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream(template_path))) line = reader.readLine() sb = StringBuilder() while line is not None: sb.append(line).append("\n") line = reader.readLine() return sb.toString()
def wait_until_interrupted(cv): with cv: while not Thread.currentThread().isInterrupted(): try: # this condition variable is never notified, so will only # succeed if interrupted cv.wait() except InterruptedException as e: break
def run(self): # If we have a threading trace, it won't have been set on this thread which was created # by Java. So set it here if hasattr(threading, "_trace_hook") and threading._trace_hook: sys.settrace(threading._trace_hook) # Eclipse uses a different class loader, set Jython's class loader # to use the same one, or things won't work sys.classLoader = Thread.currentThread().getContextClassLoader() self.method()
def register(metacls, key, default=None, interdict=False): if default is not None: assert isinstance(default, list) metacls.record[key] = default else: metacls.record[key] = [] if interdict: metacls._interdiction_threads[key] = Thread.currentThread()
def tearDown(self): Thread.currentThread().contextClassLoader = self.orig_context sys.path = self.orig_path try: del sys.modules['flat_in_jar'] del sys.modules['jar_pkg'] del sys.modules['jar_pkg.prefer_compiled'] except KeyError: pass
def tearDown(self): Thread.currentThread().contextClassLoader = self.orig_context for module in sys.modules.keys(): if module not in self.modules: del sys.modules[module] try: shutil.rmtree(self.temp_dir) except OSError: # On Windows at least we cannot delete the open JAR pass
def __init__(self, ddsImpl = "com.prismtech.cafe.core.ServiceEnvironmentImpl"): JSystem.setProperty(ServiceEnvironment.IMPLEMENTATION_CLASS_NAME_PROPERTY, ddsImpl); self.env = ServiceEnvironment.createInstance(JThread.currentThread().getContextClassLoader()) self.pf = self.env.getSPI().getPolicyFactory() self.dpf = DomainParticipantFactory.getInstance(self.env) self.defaultDP = self.dpf.createParticipant(0) self.defaultP = self.defaultDP.createPublisher() self.defaultS = self.defaultDP.createSubscriber() self.builtInTopicType = "dython.bit.KDython"
def test_importer_get_source(self): # Test loader.get_source used via pkgutil.get_importer jar = self.prepareJar('classimport.jar') Thread.currentThread().contextClassLoader = test_support.make_jar_classloader(jar) importer = pkgutil.get_importer('__pyclasspath__/') # In package mod = 'jar_pkg.prefer_compiled' source = importer.get_source(mod) self.assertIsInstance(source, bytes) self.assertEqual(source, 'compiled = False\n')
def getResource(path, classloader=None): """ Loads a resource from anywhere on the classpath. :param path: path to the resource (separate elements with '/') :param classloader: class loader to use for loading the resource :rtype: :class:`java.lang.Object` """ classloader = classloader or Thread.currentThread().contextClassLoader return classloader.getResource(path)
def load_classpath_resource(resource): """ Uploads the classpath resource to the session's working directory. :param resource: to find on the classpath to copy :return: string """ url = Thread.currentThread().contextClassLoader.getResource(resource) if url is None: raise Exception("Resource [%s] not found on classpath." % resource) return Resources.toString(url, Charset.defaultCharset())
def getResourceAsStream(path, classloader=None): """ Opens a stream to a resource anywhere on the classpath. :param path: path to the resource (separate elements with '/') :param classloader: class loader to use for loading the resource :rtype: :class:`java.io.InputStream` """ classloader = classloader or Thread.currentThread().contextClassLoader return classloader.getResourceAsStream(path)
def test_loader_get_data(self): # Test loader.get_data used via pkgutil.get_loader jar = self.prepareJar('classimport.jar') name = self.addResourceToJar(jar) Thread.currentThread().contextClassLoader = test_support.make_jar_classloader(jar) loader = pkgutil.get_loader('jar_pkg') # path is a resource path (not file system path using os.path.sep) path = 'jar_pkg/' + name data = loader.get_data(path) self.assertIsInstance(data, bytes) self.assertEqual(data, self.RESOURCE_DATA)
def getRootThreadGroup(): global rootThreadGroup if rootThreadGroup != None: return rootThreadGroup tg = Thread.currentThread().getThreadGroup() ptg = tg.getParent() while ptg != None: tg = ptg ptg = tg.getParent() return tg
def parse_error_file(self, parseTask): """Extract errors from GPX file """ checks = parseTask.checks # List of features checksWithoutSubs = [int(c.name) // 10 for c in checks if c.name[-1] == "0"] rootElement = parseTask.extractRootElement() listOfFeatures = rootElement.getElementsByTagName("wpt") featuresNumber = listOfFeatures.getLength() # print "Total number of features: ", featuresNumber for i in range(featuresNumber): if Thread.currentThread().isInterrupted(): return False featureNode = listOfFeatures.item(i) # errorId schemaNode = featureNode.getElementsByTagName("schema") schema = str(schemaNode.item(0).getFirstChild().getNodeValue()) errorIdNode = featureNode.getElementsByTagName("id") errorId = schema + " " + str(errorIdNode.item(0).getFirstChild().getNodeValue()) # desc descNode = featureNode.getElementsByTagName("desc") desc = descNode.item(0).getFirstChild().getNodeValue() # comment commentNode = featureNode.getElementsByTagName("comment") if commentNode.getLength() != 0: comment = commentNode.item(0).getFirstChild().getNodeValue() other = [comment] desc += "<br>Comment - %s" % comment else: other = [] # osmObject osmObjectNode = featureNode.getElementsByTagName("object_type") osmObject = str(osmObjectNode.item(0).getFirstChild().getNodeValue()) # osmId osmIdNode = featureNode.getElementsByTagName("object_id") osmId = osmObject[0] + str(osmIdNode.item(0).getFirstChild().getNodeValue()) # errorType errorTypeNode = featureNode.getElementsByTagName("error_type") errorType = str(errorTypeNode.item(0).getFirstChild().getNodeValue()) # geo lat = float(featureNode.getAttribute("lat")) lon = float(featureNode.getAttribute("lon")) bbox = parseTask.build_bbox(lat, lon) # Append to errors if errorType in parseTask.errors: parseTask.errors[errorType].append((osmId, (lat, lon), bbox, errorId, desc, other)) # check if it is a subtype elif int(errorType) // 10 in checksWithoutSubs: et = str(int(errorType) // 10 * 10) parseTask.errors[et].append((osmId, (lat, lon), bbox, errorId, desc, other)) return True