def unsubscribe(self, chan): """ Sends the 'unsubscribe' command for <chan> """ if isinstance(chan, int): chan = self._getChannelName(str(chan)) # account chan? if chan == 'account': # sending commands to 'account' requires a key, secret and nonce if not self.key or not self.secret: raise PoloniexError( "self.key and self.secret needed for 'account' channel") self.channels[chan]['sub'] = False payload = {'nonce': self.nonce} payload_encoded = _urlencode(payload) sign = _new(self.secret.encode('utf-8'), payload_encoded.encode('utf-8'), _sha512) self.socket.send( _dumps({ 'command': 'unsubscribe', 'channel': self.channels[chan]['id'], 'sign': sign.hexdigest(), 'key': self.key, 'payload': payload_encoded })) else: self.channels[chan]['sub'] = False self.socket.send( _dumps({ 'command': 'unsubscribe', 'channel': self.channels[chan]['id'] }))
def unsubscribe(self, chan): """ Sends the 'unsubscribe' command for <chan> """ # account chan? if chan in ['1000', 1000]: # sending commands to 'account' requires a key, secret and nonce if not self.key or not self.secret: raise PoloniexError( "self.key and self.secret needed for 'account' channel") payload = {'nonce': self.nonce} payload_encoded = _urlencode(payload) sign = _new(self.secret.encode('utf-8'), payload_encoded.encode('utf-8'), _sha512) self.socket.send( _dumps({ 'command': 'unsubscribe', 'channel': chan, 'sign': sign.hexdigest(), 'key': self.key, 'payload': payload_encoded })) else: self.socket.send( _dumps({ 'command': 'unsubscribe', 'channel': chan }))
def dumps(data, round_digits=False, precision=2): if round_digits: def float_to_str(f): return '%.*f' % (precision, f) encoder.FLOAT_REPR = float_to_str encoder.c_make_encoder = None return _dumps(data, cls=JsonPlusEncoder) #clean ups encoder.FLOAT_REPR = old_FLOAT_REPR encoder.c_make_encoder = old_c_make_encoder else: return _dumps(data, cls=JsonPlusEncoder)
def json_response(data: dict, status=200, headers=None) -> _Response: dump = _dumps(data) resp = _Response(dump, status=status, headers=headers, content_type="application/json") return resp
def to_json(self): f_alcance = lambda key: dict(alcance=self.alcance, **f_asignaciones(key)) if key>=1 else {} f_asignaciones = lambda key: f_red() if key==2 else f_mup() if key==3 else f_cen() if key==4 else {} f_red = lambda: dict(red_salud=dict(id_red=self.red_salud.id_red, nombre=self.red_salud.nombre)) f_mup = lambda: dict(municipio=dict(id_mup=self.municipio.id_mup, nombre=self.municipio.nombre)) f_cen = lambda: dict(centro_salud=dict(id_cen=self.centro_salud.id_cen, nombre=self.centro_salud.nombre)) return _dumps(dict(persona=self.persona.__str__(), id=self.persona.id_per, rol=self.rol, login=self.login, **f_alcance(int(self.alcance or '0'))))
def dumps(o: object) -> str: """ Format object to JSON string without whitespaces :param i: The input object :return: JSON string with whitespace removed """ return _dumps(o, separators=(',', ':'))
def _close_writable(self): """ Close the object in write mode. """ for segment in self._write_futures: segment["etag"] = segment["etag"].result() with _handle_client_exception(): self._client.put_object( self._container, self._object_name, _dumps(self._write_futures), query_string="multipart-manifest=put", )
def _close_writable(self): """ Close the object in write mode. """ # Wait segments upload completion for segment in self._write_futures: segment['etag'] = segment['etag'].result() # Upload manifest file with _handle_client_exception(): self._client.put_object(self._container, self._object_name, _dumps(self._write_futures), query_string='multipart-manifest=put')
def write_json(filename, content): ''' Writes json files :param filename: The full path to the json file :param content: The content to dump :returns: The size written ''' j = _dumps(content, indent=4, sort_keys=True) if j: return write_file(filename, j)
def __init__(self, x, y, zoom, geometry=None, properties=None): self.x = x self.y = y self.zoom = zoom if properties is not None: self.extra_properties = unicode(_dumps(properties)) if geometry is None: geometry = self.to_polygon() multipolygon = MultiPolygon([geometry]) geometry = ST_Transform(shape.from_shape(multipolygon, 3857), 4326) self.geometry = geometry self.states.append(TaskState()) self.locks.append(TaskLock())
def dumps(*args, **kwargs): """ Agent's implementation of :func:`json.dumps` or :func:`pyfarm.master.utility.jsonify` """ indent = None if config["agent_pretty_json"]: indent = 2 if len(args) == 1 and not isinstance(args[0], (dict, UserDict)): obj = args[0] else: obj = dict(*args, **kwargs) return _dumps(obj, default=default_json_encoder, indent=indent)
def __init__(self, x, y, zoom, geometry=None, properties=None): self.x = x self.y = y self.zoom = zoom if properties is not None: self.extra_properties = _dumps(properties) if geometry is None: geometry = self.to_polygon() multipolygon = MultiPolygon([geometry]) geometry = ST_Transform(shape.from_shape(multipolygon, 3857), 4326) self.geometry = geometry self.states.append(TaskState()) self.locks.append(TaskLock())
def to_json(self): f_alcance = lambda key: dict( alcance=self.alcance, **f_asignaciones(key)) if key >= 1 else {} f_asignaciones = lambda key: f_red() if key == 2 else f_mup( ) if key == 3 else f_cen() if key == 4 else {} f_red = lambda: dict(red_salud=dict(id_red=self.red_salud.id_red, nombre=self.red_salud.nombre)) f_mup = lambda: dict(municipio=dict(id_mup=self.municipio.id_mup, nombre=self.municipio.nombre)) f_cen = lambda: dict(centro_salud=dict( id_cen=self.centro_salud.id_cen, nombre=self.centro_salud.nombre)) return _dumps( dict(persona=self.persona.__str__(), id=self.persona.id_per, rol=self.rol, login=self.login, **f_alcance(int(self.alcance or '0'))))
def json_pretty_print(_data): print(_dumps(_data, sort_keys=True, indent=4, ensure_ascii=False))
def dumps(obj, *args, **kw): """Returns bytes, not strings like json.dumps.""" return _dumps(obj).encode('utf-8')
def raw_data(self): return _dumps(self.__data, indent=4, sort_keys=True)
def _args_to_json_file(save_path, kwargs): # 将过滤后有效数据存储为json文件同时增加使用default和indent分别对序列化的json参数进行 # 对日期类型序列化支持和显示出来的indent参数更加人性化 with open(save_path, 'w+') as json_data: json_data.write(_dumps(kwargs, encoding='UTF-8', default=JsonHandle.__default, indent=4)) time.sleep(0.1)
def dumps(*args, **kwargs): return _dumps(*args, **kwargs, cls=NumpyEncoder)
def _on_open(self, ws): self._ws.send(_dumps({'command': 'subscribe', 'channel': 1002}))
def dumps(obj): return _dumps(obj, default=serializer)
def listenForRequests(self): """ Listening for requests """ # Listen for incoming connections # We retry binding the socket as long as we don't find a valid, unused port while True: try: self.sock.bind(tuple(self.server_address)) break except socket.error: self.server_address[1] += 1 self.sock.listen(5) while not self.hastofinish: try: connection, client_address = self.sock.accept() with connection: # print('connection from %s' % (client_address[0])) data = _loads(connection.recv(self.MAX_LENGTH).decode()) if isinstance(data, str): response = {} response['actual_time'] = str( str_datetime( self.call_inner_function( 'current_time_function'))) if data == 'progress': response[ 'input_filepath'] = self.const.WORKLOAD_FILEPATH response['progress'] = _path.getsize( self.const.SCHEDULING_OUTPUT) / _path.getsize( self.const.WORKLOAD_FILEPATH) response['time'] = clock( ) - self.const.start_simulation_time elif data == 'usage': response[ 'simulation_status'] = self.call_inner_function( 'simulated_status_function') response['usage'] = self.call_inner_function( 'usage_function') elif data == 'all': response[ 'input_filepath'] = self.const.input_filepath response['progress'] = _path.getsize( self.const.sched_output_filepath ) / _path.getsize(self.const.input_filepath) response['time'] = clock( ) - self.const.start_simulation_time response[ 'simulation_status'] = self.call_inner_function( 'simulated_status_function') response['usage'] = self.call_inner_function( 'usage_function') connection.sendall(_dumps(response).encode()) connection.close() except socket.timeout: pass self.sock.close()
def dumps(result): return _dumps(result, default=default)
def serialize(self, indent=2, sort_keys=True): return _dumps(self, indent=indent, sort_keys=sort_keys)
def dumps(obj): response.content_type = 'application/json' return _dumps(obj, ensure_ascii=False)
def dumps(s): return compress(_dumps(s))
def anomaly_mainentry(): outputPayload = {} outputPayload['status_detail_text'] = '' #initialize the string field for incremental updates to this field try: logger = _getLogger(__name__) logger.info('in anomaly_mainentry') app.logger.info('in anomaly_mainentry via app logger') requiredParameters = ['version', 'input_data', 'anomaly_engine', 'business_id', 'group_id', 'encoding', 'record_separator', 'field_separator'] parametersPresent = True if _request.method == 'POST': for elem in requiredParameters: if _request.form.get(elem) == None: parametersPresent = False outputPayload['status_code'] = 422 outputPayload['version'] = '1' if outputPayload.get('status_text') is None: outputPayload['status_text'] = 'Missing param:%s'%(elem) else: outputPayload['status_text'] += ', Missing param:%s'%(elem) if parametersPresent: if _request.form.get('version') == '1': logger.debug('after version check, before byte conversion') logger.debug('%s'%(str(_request.headers))) raw_data = bytes() try: if int(_request.form.get('isFilePath')) and _request.form.get('input_data'): file_path = _request.form['input_data'] all_files = sorted(_glob.glob(file_path)) for file in all_files: raw_data += _readData(file) elif not int(_request.form.get('isFilePath')) and _request.form.get('input_data'): raw_data = _request.form['input_data'].encode() else: logger.error('No file_path or data parameters are present') outputPayload['status_code'] = 501 outputPayload['status_text'] = 'ERROR' outputPayload['version'] = '1' outputPayload['status_detail_text'] += 'Error in mainProcessEngine no file_path or data present' raise Exception("No Data Present") except Exception as e: logger.error('Caught exception %s in data access'%(str(e))) outputPayload['status_code'] = 501 outputPayload['status_text'] = 'ERROR' outputPayload['version'] = '1' outputPayload['status_detail_text'] += 'Error in mainProcessEngine %s'%(str(e)) raise(e) logger.debug('have byteData, calling mainProcessEngine') #acquire a copy of global routing service info try: specificRoutingTable = _loads(_request.form['specificRoutingTable']) #specificRoutingTable['bID'] = _request.form['business_id'] #specificRoutingTable['gID'] = _request.form['group_id'] #specificRoutingTable['dbID'] = _request.form['database'] #process overrides #for elem in specificRoutingTable: # newElem = _request.form.get(elem) # if newElem is not None: # #then this override is present, put into dictionary # logger.info('overriding k,v %s,%s with %s'%(elem,specificRoutingTable[elem],newElem )) # specificRoutingTable[elem] = newElem except KeyError as e: logger.error('Key Error accessing globalRoutingService %s'%(str(e))) raise(e) try: logger.info('Routing info:%s'%(str(specificRoutingTable))) df = _mainProcessEngine(raw_data, encoding=_request.form['encoding'], recordSeparator = _request.form['record_separator'], fieldSeparator = _request.form['field_separator'], metaInformation = specificRoutingTable ) #call the grafana dashboard creation logger.info('after main process engine') except Exception as e: logger.error('caught exception %s'%(str(e))) outputPayload['status_code'] = 501 outputPayload['status_text'] = 'ERROR' outputPayload['version'] = '1' outputPayload['status_detail_text'] += 'Error in mainProcessEngine %s'%(str(e)) raise(e) logger.info('prepare output payload') outputPayload['status_code'] = 200 outputPayload['status_text'] = 'OK' outputPayload['version'] = '1' outputPayload['data'] = df.to_csv(None, header=True, index=False,sep=_request.form['field_separator']) else: #invalid version outputPayload['status_code'] = 422 outputPayload['status_text'] = 'Invalid API version' outputPayload['version'] = '1' outputPayload['status_detail_text'] += 'currently version 1 is supported' except Exception as e: logger.error('caught exception %s'%(str(e))) outputPayload['status_code'] = 501 outputPayload['status_text'] = 'ERROR' outputPayload['version'] = '1' outputPayload['status_detail_text'] += ':::%s'%(str(e)) logger.info('leaving anomaly_mainentry') return _dumps(outputPayload)
def dumps(d): return _dumps(d, indent=4, cls=JSONEncoder)
def dumps(o): return _dumps(o, ensure_ascii=False)
def ToJSON(self) -> str: from json import dumps as _dumps return _dumps(self.ToDict())
def to_json(obj: object): return _dumps(obj, cls=_JsonEncoder)
def json_dumps(cls, doc, *args, **kwarg): from json import dumps as _dumps return _dumps(doc, default=cls.BSONEncoder().default)
def dumps(d): json_string =_dumps(d, indent=4, cls=JSONEncoder) return json_string
def dumps(o, *args, **kwargs): return _dumps(o, cls=EnhancedJSONEncoder, *args, **kwargs)
def dumps(obj, cls=None, **kwargs): if not cls: cls = DjangoJSONEncoder return json._dumps(obj, cls=cls, **kwargs)
def push_dictionary(self, marker: dict): "json encodes a dictionary before pushing it with `~.push_marker`" self._client.push_marker(_dumps(marker))
def jsonify(obj): return _dumps(obj, default=jsonify_special)
def ToJSON(self): from json import dumps as _dumps dictRepr = self.ToDict() return _dumps(dictRepr)