def process(conn, cursor, current, message, data): if message.get('model') != 'sensor_ht': return False save = False if message.get('cmd') == 'report': if current: save = True if 'temperature' in data: current['temperature'] = data['temperature'] if 'humidity' in data: current['humidity'] = data['humidity'] else: current = data if not save: return False query = "INSERT INTO ht(sid, temperature, humidity, dt) VALUES (%s, %s, %s, %s)" cursor.execute(query, (message['sid'], current['temperature'], current['humidity'], datetime.now().isoformat())) conn.commit() data = { 'device': 'sensor_ht', 'sid': message['sid'], 'temperature': format_value(current['temperature']), 'humidity': format_value(current['humidity']) } UpdatesHandler.send_updates(data) return True
def generate_gcode(self): while True: values = self._axes.coords values["e"] = self._extruder.extruder yield "G1".ljust(5, " ") + " ".join( ["".join((k.upper(), format_value(v))) for k, v in values.items()] ) + "\n" yield ( "M104".ljust(5, " ") + "S" + format_value(self._temperature.target_temp, 3) + "\n" )
def dump_comments_for_page(page_id): comments = old_confluence_api.getComments(page_id) if len(comments) == 0: return with open( os.path.join(utils.DATA_DIR, 'comments', page_id + '.json'), 'w') as comments_file: comments_file.write(json.dumps(utils.format_value(comments)))
def get_attribute_value(self, index): """ Wrapper function for format_value to resolve the actual value of an attribute in a tag :param index: index of the current attribute :return: formatted value """ _type = self.axml.get_attribute_value_type(index) _data = self.axml.get_attribute_value_data(index) return format_value(_type, _data, lambda _: self.axml.getAttributeValue(index))
def dump_attachments_for_page(page_id): attachments = old_confluence_api.getAttachments(page_id) if len(attachments) == 0: return with open(os.path.join(utils.DATA_DIR, 'attachments', page_id + '.json'), 'w') as attachment_file: attachment_file.write(json.dumps(utils.format_value(attachments))) if not os.path.exists(os.path.join(utils.DATA_DIR, 'attachments', page_id + '_contents')): os.mkdir(os.path.join(utils.DATA_DIR, 'attachments', page_id + '_contents')) for attachment in attachments: with open(os.path.join(utils.DATA_DIR, 'attachments', page_id + '_contents', attachment['id']), 'wb') as content_file: content_file.write(old_confluence_api.getAttachmentData(page_id, attachment['fileName'], '0').data)
def dump_attachments_for_page(page_id): attachments = old_confluence_api.getAttachments(page_id) if len(attachments) == 0: return with open(os.path.join(utils.DATA_DIR, "attachments", page_id + ".json"), "w") as attachment_file: attachment_file.write(json.dumps(utils.format_value(attachments))) if not os.path.exists(os.path.join(utils.DATA_DIR, "attachments", page_id + "_contents")): os.mkdir(os.path.join(utils.DATA_DIR, "attachments", page_id + "_contents")) for attachment in attachments: with open( os.path.join(utils.DATA_DIR, "attachments", page_id + "_contents", attachment["id"]), "wb" ) as content_file: content_file.write(old_confluence_api.getAttachmentData(page_id, attachment["fileName"], "0").data)
def get_lines(self, file): file_lines = file.readlines() self._progress.config(maximum=len(file_lines)) values = { "X": "0000000", "Y": "0000000", "Z": "0000000", "E": "0000000" } for i, line in enumerate(file_lines): self._progress.config(value=i) if ";" in line: line = line[:line.index(";")] if line.startswith("G1 "): values.update( {v[0]: format_value(v[1:]) for v in line[3:].split()}) if "F" in values: del values["F"] items = sorted(values.items()) if "E" in values: items.append(items.pop(0)) line = "G1".ljust(5, " ") + " ".join( ["".join(item) for item in items]) elif line.startswith("M104") or line.startswith("M109"): pass elif line.startswith("G92"): _, value = line.split() line = "G92".ljust( 5, " ") + f"{value[0]}{format_value(value[1:])}" else: continue yield line + "\n"
def get_response(self, request): "Returns an HttpResponse object for the given HttpRequest" _adjust_request(request) start = time.time() try: # Setup default url resolver for this thread, this code is outside # the try/except so we don't get a spurious "unbound local # variable" exception in the event an exception is raised before # resolver is set request.user_id = None # fix for japa urlconf = ROOT_URLCONF urls = __import__(urlconf) #urlpatterns = urls.urlpatterns host_url_patterns_map = urls.host_url_patterns_map #urlresolvers.set_urlconf(urlconf) #resolver = urlresolvers.RegexURLResolver(r'^/', urlconf) resolver_map = { k: RegexURLResolver(v) for k, v in host_url_patterns_map.items() } resolver = resolver_map.get(request.META.get('HTTP_HOST'), resolver_map['default']) try: response = None # Apply request middleware resolver_match = resolver.resolve(request.path_info) callback_name, callback, callback_args, callback_kwargs = resolver_match request._view_path = callback_name for middleware_method in self._request_middleware: response = middleware_method(request) if response: _adjust_response(response) break if response is None: #if hasattr(request, 'urlconf'): # Reset url resolver with a custom urlconf. #urlconf = request.urlconf #urlresolvers.set_urlconf(urlconf) #resolver = urlresolvers.RegexURLResolver(r'^/', urlconf) #resolver = RegexURLResolver(r'^/', urlconf) #request.resolver_match = resolver_match # Apply view middleware for middleware_method in self._view_middleware: response = middleware_method(request, callback, callback_args, callback_kwargs) if response: _adjust_response(response) break if response is None: try: response = callback(request, *callback_args, **callback_kwargs) _adjust_response(response) except Exception as e: # If the view raised an exception, run it through exception # middleware, and if the exception middleware returns a # response, use that. Otherwise, reraise the exception. for middleware_method in self._exception_middleware: response = middleware_method(request, e) if response: _adjust_response(response) break if response is None: raise # Complain if the view returned None (a common error). if response is None: if isinstance(callback, types.FunctionType): # FBV view_name = callback.__name__ else: # CBV view_name = callback.__class__.__name__ + '.__call__' raise ValueError("The view %s.%s didn't return an HttpResponse object." % (callback.__module__, view_name)) # If the response supports deferred rendering, apply template # response middleware and the render the response if hasattr(response, 'render') and callable(response.render): for middleware_method in self._template_response_middleware: response = middleware_method(request, response) _adjust_response(response) response = response.render() except http.Http404 as e: logger.warning('Not Found: %s', request.path, extra={ 'status_code': 404, 'request': request }) if DEBUG: pass #response = debug.technical_404_response(request, e) else: try: callback, param_dict = resolver.resolve404() response = callback(request, **param_dict) _adjust_response(response) except: signals.got_request_exception.send(sender=self.__class__, request=request) response = self.handle_uncaught_exception(request, resolver, sys.exc_info()) except webxHttp404 as e: logger.warning('Not Found: %s', request.path, extra={ 'status_code': 404, 'request': request }) if DEBUG: #response = debug.technical_404_response(request, e) try: resolver_match = resolver.resolve('/404/') callback_name, callback, callback_args, callback_kwargs = resolver_match response = callback(request, *callback_args, **callback_kwargs) _adjust_response(response) except: signals.got_request_exception.send(sender=self.__class__, request=request) response = self.handle_uncaught_exception(request, resolver, sys.exc_info()) _adjust_response(response) else: try: resolver_match = resolver.resolve('/404/') callback_name, callback, callback_args, callback_kwargs = resolver_match response = callback(request, *callback_args, **callback_kwargs) _adjust_response(response) except: signals.got_request_exception.send(sender=self.__class__, request=request) response = self.handle_uncaught_exception(request, resolver, sys.exc_info()) _adjust_response(response) except exceptions.PermissionDenied: logger.warning( 'Forbidden (Permission denied): %s', request.path, extra={ 'status_code': 403, 'request': request }) try: callback, param_dict = resolver.resolve403() response = callback(request, **param_dict) _adjust_response(response) except: signals.got_request_exception.send( sender=self.__class__, request=request) response = self.handle_uncaught_exception(request, resolver, sys.exc_info()) _adjust_response(response) except SystemExit: # Allow sys.exit() to actually exit. See tickets #1023 and #4701 raise except: # Handle everything else, including SuspiciousOperation, etc. # Get the exception info now, in case another exception is thrown later. #signals.got_request_exception.send(sender=self.__class__, request=request) response = self.handle_uncaught_exception(request, resolver, sys.exc_info()) _adjust_response(response) finally: # Reset URLconf for this thread on the way out for complete # isolation of request.urlconf #urlresolvers.set_urlconf(None) pass try: # Apply response middleware, regardless of the response for middleware_method in self._response_middleware: #response = middleware_method(request, response) middleware_method(request, response) #response = self.apply_response_fixes(request, response) except: # Any exception should be gathered and handled signals.got_request_exception.send(sender=self.__class__, request=request) response = self.handle_uncaught_exception(request, resolver, sys.exc_info()) _adjust_response(response) _response = response if isinstance(_response, SimpleResult): response = HttpResponse( _response.get_content(), content_type=(_response.get_content_type()), status=(_response.get_status()) ) headers = _response.get_headers() if headers: for k, v in headers: response[k] = v elif isinstance(response, JsonResult): data = json.dumps(format_value(response.context)) response_kwargs = {'content_type': 'application/json'} response = HttpResponse(data, **response_kwargs) elif isinstance(response, TemplateResult): from webx import tiny; t=tiny.Tiny() data = t.render(response.template, response.context, request) response_kwargs = {'content_type': 'text/html'} response = HttpResponse(data, **response_kwargs) elif isinstance(response, RedirectResult): target = response.target response = HttpResponse("", status=302) response['Location'] = target elif isinstance(response, CSVFileResult): rows = response.rows name = response.name response_kwargs = {'content_type': 'text/csv'} response = HttpResponse("", **response_kwargs) response['Content-Disposition'] = 'attachment; filename="%s.csv"' % (name) writer = csv.writer(response, delimiter='\t') for row in rows: writer.writerow(row) elif isinstance(response, HttpResponseServerError): pass else: raise NotImplementedError() if hasattr(request, '_weby_cookies'): # fix for japa session for cookie in request._weby_cookies: response.set_cookie(**cookie) if hasattr(_response, '_weby_cookies'): # fix for japa session for cookie in _response._weby_cookies: response.set_cookie(**cookie) if hasattr(request.response, 'status'): # fix for japa status response.status_code = request.response.status logger.info('This request take %f ms' %((time.time() - start) * 1000)) return response
def get_new_tx_info(hash_str, raw_cache, block_cache): url = 'https://www.blockchain.com/btc/tx/' + hash_str r = requests.get(url) soup = BeautifulSoup(r.content, features='lxml') button = soup.find(id=re.compile("tx-*")) try: is_confirmed = button.find_all('button')[0].string != 'Unconfirmed Transaction!' row = soup.find_all("tr") size = row[3].find_all("td")[1].string.split("(")[0] # print('size = ' + size) receive_time = row[5].find_all("td")[1].string.lstrip() # print(receive_time) receive_time = utils.date_to_timestamp(receive_time) total_input_index = 8 block_time = 0 if is_confirmed: block_time = row[6].find_all("td")[1].text.split('(')[1].lstrip().split(' +')[0] block_time = utils.date_to_timestamp(block_time) # print(blockinfo) total_input_index = 10 if row.__len__() == 16 or row.__len__() == 18: total_input_index = total_input_index + 1 total_input = row[total_input_index].find_all("td")[1].find("span").string.split(' ')[0] total_input = utils.format_value(total_input) # print(total_input) total_output = row[total_input_index + 1].find_all("td")[1].find("span").string.split(' ')[0] total_output = utils.format_value(total_output) # print(total_output) fees = row[total_input_index + 2].find_all("td")[1].find("span").string.split(' ')[0] fees = utils.format_value(fees) # print(fees) feerate = row[total_input_index + 3].find_all("td")[1].string.lstrip().rstrip().split(' ')[0] feerate = utils.format_value(feerate) # print(feerate) tx = dict() tx['hash'] = hash_str tx['size'] = size tx['receive_time'] = float(receive_time) tx['block_time'] = float(block_time) tx['total_input'] = float(total_input) tx['total_output'] = float(total_output) tx['fees'] = float(fees) tx['fee_rate'] = float(feerate) if is_confirmed: block_cache.add_item(tx['hash'], tx, True) else: raw_cache.add_item(tx['hash'], tx, True) except Exception as e: print(str(e))
urls.add(best_swap_url) rows = sorted(rows, key=lambda r: r[0]) sns.scatterplot( x='fill size', y='bps', hue='api', data=pd.DataFrame( rows, columns=['api', 'bps', 'fill size'], ), ) for t in plt.gca().get_legend().texts[1:]: url = t.get_text() t.set_text('%s - %d%%' % (url, sum(1 for r in rows if r[0] == url) * 100 / len(rows))) plt.yscale('log') # plt.xscale('log') plt.gca().yaxis.set_major_formatter( ticker.FuncFormatter(lambda y, pos: f'{int(y)}')) plt.gca().xaxis.set_major_formatter( ticker.FuncFormatter(lambda x, pos: format_value(x))) metric_type = 'adjusted realized' if args.adjusted else 'realized' swap_type = 'buys' if args.buys else 'sells' if args.sells else 'swaps' plt.title( f'A-B {metric_type} fill win rate ({len(rows)}/{len(data)} unequal {swap_type})' ) plt.show()
def get_block_tx_info(hash_str): url = 'https://www.blockchain.com/btc/tx/' + hash_str while True: try: r = requests.get(url) break except Exception as e: print(e) soup = BeautifulSoup(r.content, features='lxml') try: row = soup.find_all("tr") size = row[3].find_all("td")[1].string.split("(")[0] # print('size = ' + size) weight = row[4].find_all("td")[1].string # print(weight) receive_time = row[5].find_all("td")[1].string.lstrip() # print(receive_time) receive_time = utils.date_to_timestamp(receive_time) lock_time = -1 block_time_index = 6 total_input_index = 10 if row.__len__() == 18: lock_time = row[6].find_all('td')[1].string.split('\n')[2].lstrip() block_time_index = block_time_index + 1 total_input_index = total_input_index + 1 block_time = row[block_time_index].find_all("td")[1].text.split('(')[1].lstrip().split(' +')[0] block_time = utils.date_to_timestamp(block_time) confirmations = row[block_time_index + 1].find_all("td")[1].string total_input = row[total_input_index].find_all("td")[1].find("span").string.split(' ')[0] total_input = utils.format_value(total_input) # print(total_input) total_output = row[total_input_index + 1].find_all("td")[1].find("span").string.split(' ')[0] total_output = utils.format_value(total_output) # print(total_output) fees = row[total_input_index + 2].find_all("td")[1].find("span").string.split(' ')[0] fees = utils.format_value(fees) # print(fees) fee_rate = row[total_input_index + 3].find_all("td")[1].string.lstrip().rstrip().split(' ')[0] fee_rate = utils.format_value(fee_rate) # print(fee_rate) fee_wrate = row[total_input_index + 4].find_all("td")[1].string.lstrip().rstrip().split(' ')[0] fee_wrate = utils.format_value(fee_wrate) transacted = row[total_input_index + 5].find_all("td")[1].text.lstrip().rstrip().split(' ')[0] transacted = utils.format_value(transacted) tx = dict() tx['hash'] = hash_str tx['size'] = int(size) tx['weight'] = int(weight) tx['lock_time'] = int(lock_time) tx['confirmations'] = int(confirmations) tx['receive_time'] = float(receive_time) tx['block_time'] = float(block_time) tx['total_input'] = float(total_input) tx['total_output'] = float(total_output) tx['fees'] = float(fees) tx['fee_rate'] = float(fee_rate) tx['fee_wrate'] = float(fee_wrate) tx['transacted'] = float(transacted) return tx except Exception as e: print(hash_str) print(str(e))
def dump_page(page_id): page = old_confluence_api.getPage(page_id) with open(os.path.join(utils.DATA_DIR, "pages", page_id + ".json"), "w") as page_file: page_file.write(json.dumps(utils.format_value(page)))
def dump_page_list(): pages = old_confluence_api.getPages("duitang") # very long time with open(utils.PAGES_JSON_FILE_PATH, "w") as pages_file: pages_file.write(json.dumps(utils.format_value(pages)))
def format_value(self): return format_value(self.data_type, self.data, self.parent.string_pool_main.get_string)
def get(self): from plugins import gateway_led, yeelight cursor = get_cursor() cursor.execute('SELECT DISTINCT ON (sid) sid, temperature, humidity FROM ht ORDER BY sid, dt DESC') sensors_current = [] sensors_data = {} for sid, temperature, humidity in cursor.fetchall(): sensors_current.append({ 'sid': sid, 'name': config.SENSORS.get(sid, {}).get('name', sid), 'temperature': format_value(temperature, split=True), #'{:0.2f}'.format(temperature/100.0), 'humidity': format_value(humidity, split=True) #'{:0.2f}'.format(humidity/100.0) }) sensors_data[sid] = { 'labels': [], 'datasets': [ { 'label': 'Temperature', 'data': [], 'borderColor': '#bf3d3d' }, { 'label': 'Humidity', 'data': [], 'borderColor': '#b7bce5' } ] } cursor.execute('SELECT sid, temperature, humidity, dt FROM ht WHERE sid = %s ORDER BY dt DESC LIMIT 25', (sid,)) for sid, temperature, humidity, dt in reversed(cursor.fetchall()): sensors_data[sid]['labels'].append(dt.isoformat()) sensors_data[sid]['datasets'][0]['data'].append(format_value(temperature)) sensors_data[sid]['datasets'][1]['data'].append(format_value(humidity)) bg_images = map(lambda x: '/static/img/bg/%s' % x, os.listdir(os.path.join(os.path.dirname(__file__), "static", "img", "bg"))) bg_images = list(filter(lambda x: x.endswith('.jpg'), bg_images)) brightness, color, status = gateway_led.get_status() brightness = int(brightness, 16) / 100 magnets = [] for sid, sensor in config.SENSORS.items(): if sensor.get('device') == 'magnet': magnet_status = get_store().get('magnet_{}'.format(sid)) magnets.append({ 'sid': sid, 'name': sensor.get('name'), 'status': magnet_status.decode() if magnet_status else 'open', }) self.render( "templates/index.html", sensors=config.SENSORS, sensors_current=sensors_current, sensors_data=sensors_data, magnets=magnets, bg_images=bg_images, gateway_led={ 'brightness': hex(int(brightness*100))[2:], 'color': color, 'status': status }, bulbs=yeelight.get_devices(), notifications=Notifications.list() )
def dump_comments_for_page(page_id): comments = old_confluence_api.getComments(page_id) if len(comments) == 0: return with open(os.path.join(utils.DATA_DIR, 'comments', page_id + '.json'), 'w') as comments_file: comments_file.write(json.dumps(utils.format_value(comments)))
def dump_page(page_id): page = old_confluence_api.getPage(page_id) with open(os.path.join(utils.DATA_DIR, 'pages', page_id + '.json'), 'w') as page_file: page_file.write(json.dumps(utils.format_value(page)))
def dump_page_list(): pages = old_confluence_api.getPages('duitang') # very long time with open(utils.PAGES_JSON_FILE_PATH, 'w') as pages_file: pages_file.write(json.dumps(utils.format_value(pages)))