def _send_request( self, method, url_path, query_string_args=None, body_deserialization=None, ): url = self._API_URL + url_path query_string_args = query_string_args or {} query_string_args = dict(query_string_args, auditId=self._change_source) request_headers = \ {'content-type': 'application/json'} if body_deserialization else {} if body_deserialization: request_body_serialization = json_serialize(body_deserialization) else: request_body_serialization = None response = self._session.request( method, url, params=query_string_args, auth=self._authentication_handler, data=request_body_serialization, headers=request_headers, ) response_body_deserialization = \ self._deserialize_response_body(response) return response_body_deserialization
def _check_request_sender( http_method_name, request_sender_name, include_request_body, ): connection = _MockPortalConnection() body_deserialization = {'foo': 'bar'} if include_request_body else None request_sender = getattr(connection, request_sender_name) request_sender_kwargs = {} if include_request_body: request_sender_kwargs[ 'body_deserialization'] = body_deserialization request_sender(_STUB_URL_PATH, **request_sender_kwargs) eq_(1, len(connection.prepared_requests)) prepared_request = connection.prepared_requests[0] eq_(http_method_name, prepared_request.method) if include_request_body: assert_in('content-type', prepared_request.headers) assert_equal('application/json', prepared_request.headers['content-type']) requested_url_path = _get_path_from_api_url(prepared_request.url) eq_(_STUB_URL_PATH, requested_url_path) if include_request_body: body_serialization = json_serialize(body_deserialization) eq_(body_serialization, prepared_request.body) else: assert_false(prepared_request.body)
def _check_request_sender( http_method_name, request_sender_name, include_request_body, ): connection = _MockPortalConnection() body_deserialization = {'foo': 'bar'} if include_request_body else None request_sender = getattr(connection, request_sender_name) request_sender_kwargs = {} if include_request_body: request_sender_kwargs['body_deserialization'] = body_deserialization request_sender(_STUB_URL_PATH, **request_sender_kwargs) eq_(1, len(connection.prepared_requests)) prepared_request = connection.prepared_requests[0] eq_(http_method_name, prepared_request.method) if include_request_body: assert_in('content-type', prepared_request.headers) assert_equal( 'application/json', prepared_request.headers['content-type'] ) requested_url_path = _get_path_from_api_url(prepared_request.url) eq_(_STUB_URL_PATH, requested_url_path) if include_request_body: body_serialization = json_serialize(body_deserialization) eq_(body_serialization, prepared_request.body) else: assert_false(prepared_request.body)
def _get_property_value(property_name, contact_properties): property_value = contact_properties[property_name] if isinstance(property_value, bool): property_value = json_serialize(property_value) elif isinstance(property_value, date): property_value = convert_date_to_timestamp_in_milliseconds(property_value) property_value = unicode(property_value) return property_value
def _get_property_value(property_name, contact_properties): property_value = contact_properties[property_name] if isinstance(property_value, bool): property_value = json_serialize(property_value) elif isinstance(property_value, date): property_value = \ convert_date_to_timestamp_in_milliseconds(property_value) property_value = text_type(property_value) return property_value
def _invoke(label, argument_dict, is_map=False): """low-level invoking with map ability""" # TODO: We actually have to json_serialize this.. somehows conn = _getcloudnetconnection() map_limit = conn.map_job_limit map_count = 1 extracted_argdict = {} for name, arglist in argument_dict.items(): extracted = [] if not hasattr(arglist, '__iter__'): raise TypeError('%s must map to an iterable' % name) argiter = iter(arglist) iterated = False for arg in argiter: iterated = True if not hasattr(arg, 'read'): # encode anything but file objects try: arg = json_serialize(arg) except (TypeError, UnicodeDecodeError): raise TypeError('%s is not json encodable' % name) extracted.append(arg) if not iterated: raise ValueError('%s cannot be bound to an empty list' % name) extracted_len = len(extracted) if map_count == 1: if not is_map and extracted_len > 1: raise ValueError('%s can only have 1 item when allow_map is False' % name) if extracted_len > map_limit: raise ValueError('%s has %s items. Maximum is %s', name, extracted_len, map_limit) map_count = extracted_len elif extracted_len > 1 and extracted_len != map_count: raise ValueError('%s has %s items. Expected %s to match with other arguments' % (name, extracted_len, map_count)) extracted_argdict[name] = extracted if is_map: resp = conn.send_request(_invoke_map_query % label, extracted_argdict) jid_res = resp['jids'] rstart, rend = jid_res.split('-') return xrange(int(rstart), int(rend)) else: resp = conn.send_request(_invoke_query % label, extracted_argdict) return resp['jid']
def __call__(self, request): response = super(_ResponseMaker, self).__call__(request) if self._content_type: content_type_header_value = \ '{}; charset=UTF-8'.format(self._content_type) response.headers['Content-Type'] = content_type_header_value if self._status_code != 204 and self._body_deserialization is not None: response._content = \ bytes(json_serialize(self._body_deserialization), 'UTF-8') return response
def rest_publish(command, label, return_file=None, ignore_exit_status=False, **kwargs): """Publish shell *command* to PiCloud so it can be invoked through the PiCloud REST API The published function will be managed in the future by a unique (URL encoded) *label*. Returns url of published function. See cloud.rest.publish See cloud.shell.execute for description other arguments See cloud.rest.publish for description of **kwargs """ if not label: raise ValueError('label must be provided') m = re.match(r'^[A-Z0-9a-z_+-.]+$', label) if not m: raise TypeError('Label can only consist of valid URI characters (alphanumeric or from set(_+-.$)') try: label = label.decode('ascii').encode('ascii') except (UnicodeDecodeError, UnicodeEncodeError): #should not be possible raise TypeError('label must be an ASCII string') cloud, params = _get_cloud_and_params(command, kwargs, ignore=['_label', '_depends_on', '_depends_on_errors'] ) # shell argspecs are dictionaries cmd_params = template.extract_vars(command) argspec = {'prms' : cmd_params, 'cmd' : command} argspec_serialized = json_serialize(argspec) if len(argspec_serialized) >= 255: #won't fit in db - clear command del argspec['command'] argspec_serialized = json_serialize(argspec) if len(argspec_serialized) >= 255: #commands too large; cannot type check argspec_serialized = json_serialize({}) params['argspec'] = argspec_serialized return _low_level_publish(_wrap_execute_program(command, return_file, ignore_exit_status), label, 'raw', 'actiondct', params, func_desc='command invoked in shell')['uri']
def __call__(self, request): response = RequestsResponse() response.status_code = self._status_code response.reason = 'Reason' if self._content_type: content_type_header_value = \ '{}; charset=UTF-8'.format(self._content_type) response.headers['Content-Type'] = content_type_header_value if self._status_code != 204 and self._body_deserialization is not None: response._content = json_serialize(self._body_deserialization) return response
def __call__(self, request): response = RequestsResponse() response.status_code = self._status_code response.reason = 'Reason' if self._content_type: content_type_header_value = \ '{}; charset=UTF-8'.format(self._content_type) response.headers['Content-Type'] = content_type_header_value if self._status_code != 204 and self._body_deserialization is not None: response._content = bytes( json_serialize(self._body_deserialization), 'UTF-8') return response
def __deliver_message(self, message): ''' The processing step to deliver new messages to the clients .. note:: We deliver the message first so we can mangle the message reference without having to do a deep copy. :param message: The message to process ''' client_count = len(ShellProcessor.connections) if client_count > 0: # don't waste time if there are no clients _logger.debug("delivering message to %d clients" % client_count) message = json_serialize(message) + "\r\n" for queue in ShellProcessor.connections: queue.put_nowait(message)
def _send_request( self, method, url, query_string_args=None, body_deserialization=None, ): if url.startswith(self._api_url): url = url else: url = self._api_url + url query_string_args = query_string_args or {} request_headers = \ {'content-type': 'application/json'} if body_deserialization else {} if body_deserialization: request_body_serialization = json_serialize(body_deserialization) else: request_body_serialization = None response = self._session.request( method, url, params=query_string_args, auth=self._authentication_handler, data=request_body_serialization, headers=request_headers, timeout=self._timeout, ) self._require_successful_response(response) self._require_deserializable_response_body(response) return response
def _json_serialize_to_boolean(value): value_boolean = bool(value) value_serialized = json_serialize(value_boolean) return value_serialized
def create_tinfoil_index(index_to_write: dict, out_path: Path, compression_flag: int, rsa_pub_key_path: Path = None, vm_path: Path = None): to_compress_buffer = b"" if vm_path is not None and vm_path.is_file(): to_compress_buffer += b"\x13\x37\xB0\x0B" vm_buffer = b"" with open(vm_path, "rb") as vm_stream: vm_buffer += vm_stream.read() to_compress_buffer += len(vm_buffer).to_bytes(4, "little") to_compress_buffer += vm_buffer to_compress_buffer += bytes(json_serialize(index_to_write).encode()) to_write_buffer = b"" session_key = b"" if compression_flag == CompressionFlag.ZSTD_COMPRESSION: to_write_buffer += ZstdCompressor( level=22).compress(to_compress_buffer) elif compression_flag == CompressionFlag.ZLIB_COMPRESSION: to_write_buffer += zlib_compress(to_compress_buffer, 9) elif compression_flag == CompressionFlag.NO_COMPRESSION: to_write_buffer += to_compress_buffer else: raise NotImplementedError( "Compression method supplied is not implemented yet.") data_size = len(to_write_buffer) flag = None to_write_buffer += (b"\x00" * (0x10 - (data_size % 0x10))) if rsa_pub_key_path is not None and rsa_pub_key_path.is_file(): def rand_aes_key_generator() -> bytes: return randint(0, 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF).to_bytes( 0x10, byteorder="big") rsa_pub_key = import_rsa_key(open(rsa_pub_key_path).read()) rand_aes_key = rand_aes_key_generator() pkcs1_oaep_ctx = new_pkcs1_oaep_ctx(rsa_pub_key, hashAlgo=SHA256, label=b"") aes_ctx = new_aes_ctx(rand_aes_key, MODE_ECB) session_key += pkcs1_oaep_ctx.encrypt(rand_aes_key) to_write_buffer = aes_ctx.encrypt(to_write_buffer) flag = compression_flag | EncryptionFlag.ENCRYPT else: session_key += b"\x00" * 0x100 flag = compression_flag | EncryptionFlag.NO_ENCRYPT Path(out_path.parent).mkdir(parents=True, exist_ok=True) with open(out_path, "wb") as out_stream: out_stream.write(b"TINFOIL") out_stream.write(flag.to_bytes(1, byteorder="little")) out_stream.write(session_key) out_stream.write(data_size.to_bytes(8, "little")) out_stream.write(to_write_buffer)
def publish(func, label, out_encoding='json', **kwargs): """ Publish *func* (a callable) to PiCloud so it can be invoked through the PiCloud REST API The published function will be managed in the future by a unique (URL encoded) *label*. *out_encoding* specifies the format that the return value should be in when retrieving the result via the REST API. Valid values are "json" for a JSON-encoded object and "raw", where the return value must be an str (but can contain any characters). The return value is the URL which can be HTTP POSTed to to invoke *func*. See http://docs.picloud.com/rest.html for information about PiCloud's REST API Certain special *kwargs* associated with cloud.call can be attached to the periodic jobs: * _cores: Set number of cores your job will utilize. See http://docs.picloud.com/primer.html#choose-a-core-type/ In addition to having access to more CPU cores, the amount of RAM available will grow linearly. Possible values for ``_cores`` depend on what ``_type`` you choose: * c1: 1 * c2: 1, 2, 4, 8 * f2: 1, 2, 4, 8, 16 * m1: 1, 2, 4, 8 * s1: 1 * _env: A string specifying a custom environment you wish to run your job within. See environments overview at http://docs.picloud.com/environment.html * _fast_serialization: This keyword can be used to speed up serialization, at the cost of some functionality. This affects the serialization of the spawned jobs' return value. The stored function will always be serialized by the enhanced serializer, with debugging features. Possible values keyword are: 0. default -- use cloud module's enhanced serialization and debugging info 1. no debug -- Disable all debugging features for result 2. use cPickle -- Use python's fast serializer, possibly causing PicklingErrors * _max_runtime: Specify the maximum amount of time (in integer minutes) jobs can run. If the job runs beyond this time, it will be killed. * _os_env_vars: List of operating system environment variables that should be copied to PiCloud from your system Alternatively a dictionary mapping the environment variables to the desired values. * _priority: A positive integer denoting the job's priority. PiCloud tries to run jobs with lower priority numbers before jobs with higher priority numbers. * _profile: Set this to True to enable profiling of your code. Profiling information is valuable for debugging, but may slow down your jobs. * _restartable: In the very rare event of hardware failure, this flag indicates that a spawned job can be restarted if the failure happened in the middle of it. By default, this is true. This should be unset if the function has external state (e.g. it modifies a database entry) * _type: Choose the type of core to use, specified as a string: * c1: 1 compute unit, 300 MB ram, low I/O (default) * c2: 2.5 compute units, 800 MB ram, medium I/O * f2: 5.5 compute units, 3.75 GB ram, high I/O, hyperthreaded core * m1: 3.25 compute units, 8 GB ram, high I/O * s1: Up to 2 compute units (variable), 300 MB ram, low I/O, 1 IP per core See http://www.picloud.com/pricing/ for pricing information * _vol: A string or list of strings specifying a volume(s) you wish your jobs to have access to. """ if not callable(func): raise TypeError( 'cloud.rest.publish first argument (%s) is not callable' % (str(func) )) m = re.match(r'^[A-Z0-9a-z_+-.]+$', label) if not m: raise TypeError('Label can only consist of valid URI characters (alphanumeric or from set(_+-.$)') #ASCII label: try: label = label.decode('ascii').encode('ascii') except (UnicodeDecodeError, UnicodeEncodeError): #should not be possible raise TypeError('label must be an ASCII string') try: docstring = '' if (func.__doc__ is None) else func.__doc__ func_desc = (docstring).encode('utf8') except (UnicodeDecodeError, UnicodeEncodeError): raise TypeError('function docstring must be an UTF8 compatible unicode string') if not isinstance(out_encoding, str): raise TypeError('out_encoding must be an ASCII string') cloud = _getcloud() params = cloud._getJobParameters(func, kwargs, ignore=['_label', '_depends_on', '_depends_on_errors']) #argument specification for error checking and visibility argspec = getargspec(func) argspec_serialized = json_serialize(argspec,default=str) if len(argspec_serialized) >= 255: #won't fit in db - clear defaults argspec[4] = {} argspec_serialized = json_serialize(argspec,default=str) params['argspec'] = argspec_serialized resp = _low_level_publish(func, label, out_encoding, 'raw', params, func_desc) return resp['uri']