def get_all_issues(self, project_key, total, max_results): """Fetch all project issues.""" log.debug("%s issues to fetch" % total) # Setup a session for concurrent fetching s = FuturesSession(executor=ThreadPoolExecutor(max_workers=4)) s.auth = (self.username, self.password) s.params = { 'jql': "project=%s" % project_key, 'fields': 'summary,description,issuetype,status,resolution', 'maxResults': max_results } s.headers = {'Content-Type': 'application/json'} def parse_json_cb(sess, resp): resp.data = map( lambda item: { 'key': item['key'], 'summary': item['fields']['summary'], 'description': item['fields']['description'], 'type': item['fields']['issuetype']['name'], 'status': item['fields']['status']['name'], 'resolved': True if item['fields']['resolution'] else False }, resp.json()['issues']) def get_issues(start_at=0): future = s.get("%s/search" % self.url, params={'startAt': start_at}, background_callback=parse_json_cb) next_at = start_at + max_results log.debug("... %s/%s" % (min(next_at, total), total)) if next_at < total: data = get_issues(next_at) else: return future.result().data return future.result().data + data issues = get_issues() return issues
def get_all_issues(self, project_key, total, max_results): """Fetch all project issues.""" log.debug("%s issues to fetch" % total) # Setup a session for concurrent fetching s = FuturesSession(executor=ThreadPoolExecutor(max_workers=4)) s.auth = (self.username, self.password) s.params = { 'jql': "project=%s" % project_key, 'fields': 'summary,description,issuetype,status,resolution', 'maxResults': max_results } s.headers = {'Content-Type': 'application/json'} def parse_json_cb(sess, resp): resp.data = map(lambda item: { 'key': item['key'], 'summary': item['fields']['summary'], 'description': item['fields']['description'], 'type': item['fields']['issuetype']['name'], 'status': item['fields']['status']['name'], 'resolved': True if item['fields']['resolution'] else False }, resp.json()['issues']) def get_issues(start_at=0): future = s.get( "%s/search" % self.url, params={'startAt': start_at}, background_callback=parse_json_cb) next_at = start_at + max_results log.debug("... %s/%s" % (min(next_at, total), total)) if next_at < total: data = get_issues(next_at) else: return future.result().data return future.result().data + data issues = get_issues() return issues
def run(self): settings = QSettings() pref_target_path = settings.value(Settings.SETTINGS_SAVE_PATH, Settings.DEFAULT_TARGET_PATH, type=str) pref_max_pool_cnt = settings.value(Settings.SETTINGS_MAX_POOL_CNT, Settings.DEFAULT_MAX_POOL, type=int) gallery_save_path = pref_target_path+'/'+self.gallery.path if not os.path.exists(gallery_save_path): os.makedirs(gallery_save_path) # Cloudflare Authorization self.state.emit('Authorize..') Logger.LOGGER.info("Wait for Cloudflare Authorization..") self.driver.get(URL_HIYOBI) while "Just a moment..." in self.driver.page_source: pass user_agent = self.driver.execute_script("return navigator.userAgent;") try: cookie_value = '__cfduid=' + self.driver.get_cookie('__cfduid')['value'] + \ '; cf_clearance=' + self.driver.get_cookie('cf_clearance')['value'] headers = {'User-Agent': user_agent} cookies = {'session_id': cookie_value} except TypeError: Logger.LOGGER.warning("Not apply cookies to requests") headers = None cookies = None # Fetch image data from gallery page self.state.emit('Fetch..') Logger.LOGGER.info("Connect to Gallery page..") self.driver.get(self.gallery.url) sleep(1) soup = BeautifulSoup(self.driver.page_source, "html.parser") # Start download multi-thread Logger.LOGGER.info("Download Start..") img_urls = soup.find_all('div', class_="img-url") self.total_cnt = len(img_urls) session = FuturesSession(max_workers=pref_max_pool_cnt) if headers is not None: session.headers = headers if cookies is not None: session.cookies = cookies responses = {} for url_path in img_urls: url = READER_URL+url_path.text name = url.split('/')[-1] responses[name] = session.get(url) for filename in responses: self.response_to_file(response=responses[filename].result(), name=filename, path=gallery_save_path) session.close() # Compress Zip Files self.state.emit('Compressing..') if self.gallery.original != "": zip_path = pref_target_path+'/'+self.gallery.type+'/'+self.gallery.original+'/'+self.gallery.path+'.zip' else: zip_path = pref_target_path+'/'+self.gallery.type+'/'+self.gallery.path+'.zip' try: if not os.path.exists(zip_path[:zip_path.rfind('/')]): os.makedirs(zip_path[:zip_path.rfind('/')]) FileUtil.make_zip(gallery_save_path, zip_path) shutil.rmtree(gallery_save_path) except: print(traceback.format_exc()) Logger.LOGGER.error("Compressing Process Error... pass") # Save to Firebase # TODO Enable next line on Build FirebaseClient.fbclient.insert_data(self.gallery)
for definition in templateData: currentSeriesNames.append(definition["name"]) currentSeries.append(ValueTemplate(definition["name"], definition["min"], definition["max"])) else: currentSeries.append(ValueTemplate("temperature", 10, 40)) currentSeries.append(ValueTemplate("pressure", 950, 1100)) currentSeries.append(ValueTemplate("humidity", 20, 90)) m = Measurement(unide.process.local_now(), dimensions=currentSeriesNames) a = requests.adapters.HTTPAdapter(max_retries=maxRetries) session = FuturesSession() session.mount('http://', a) session.headers = { "Content-Type": "application/json", "Authorization": authHeader } def bg_cb(sess, resp): # parse the json storing the result on the response object resp.data = resp.json() print(resp) while True: lastMeasurement = datetime.datetime.utcnow() newMetrics = dict() for val in currentSeries: newMetrics[val.name] = r.randint(val.min, val.max) m.add_sample(unide.process.local_now(), **newMetrics)