def testDecodePacketWithTooBigPacket(self): try: self.packet.DecodePacket(six.b('\x00\x00\x24\x00') + (0x2400 - 4) * six.b('X')) except packet.PacketError as e: self.failUnless('too long' in str(e)) else: self.fail()
def do_request(self, req, **kwargs): self.cookiejar.clear() if req.environ['REQUEST_METHOD'] != 'OPTIONS': # Making sure endpoint handles OPTIONS method properly self.options(req.environ['PATH_INFO']) res = super(TestApp, self).do_request(req, **kwargs) if res.headers.get('Warning', None): raise ResponseValidationError('Endpoint produced invalid response. Make sure the ' 'response matches OpenAPI scheme for the endpoint.') if not kwargs.get('expect_errors', None): try: body = res.body except AssertionError as e: if 'Iterator read after closed' in six.text_type(e): body = b'' else: raise e if six.b(SUPER_SECRET_PARAMETER) in body or \ six.b(ANOTHER_SUPER_SECRET_PARAMETER) in body: raise ResponseLeakError('Endpoint response contains secret parameter. ' 'Find the leak.') if 'Access-Control-Allow-Origin' not in res.headers: raise ResponseValidationError('Response missing a required CORS header') return res
def encode_to(object, stream): ''' encode_to(object, stream) Encodes the object into the stream ``stream`` Parameters ---------- object : Any object stream : file-like object ''' if object is None: return prefix = six.b('P') write = lambda f,a: pickle.dump(f, a, protocol=pickle.HIGHEST_PROTOCOL) try: import numpy as np if type(object) == np.ndarray: prefix = six.b('N') write = (lambda f,a: np.save(a,f)) except ImportError: pass stream = compress_stream(stream) stream.write(prefix) write(object, stream) stream.flush()
def test_bytes_tokens(self): bytes_token = BytesToken(unhexlify(six.b('01'))) self.assertEqual(bytes_token.value, six.b('\x01')) self.assertEqual(str(bytes_token), "<BytesToken: %s>" % bytes_token.value) self.assertEqual(bytes_token.hash_fn('123'), '123') self.assertEqual(bytes_token.hash_fn(123), 123) self.assertEqual(bytes_token.hash_fn(str(cassandra.metadata.MAX_LONG)), str(cassandra.metadata.MAX_LONG))
def readline(self): """Read one entire line from the file. A trailing newline character is kept in the string (but may be absent when a file ends with an incomplete line). """ bits = [self._buffer if self._buffer_pos == self.tell() else b("")] indx = bits[-1].find(self._newline) if indx == -1: # Read chunks until first newline is found or entire file is read. while indx == -1: bit = self.read(self.buffer_size) bits.append(bit) if not bit: break indx = bit.find(self._newline) if indx == -1: return b("").join(bits) indx += len(self._newline) extra = bits[-1][indx:] bits[-1] = bits[-1][:indx] self._buffer = extra self._buffer_pos = self.tell() return b("").join(bits)
def test_rollup_datapoint(self): self.prepare_response("GET", "/ws/DataStream/test", GET_TEST_DATA_STREAM) example_json = { "id": "07d77854-0557-11e4-ab44-fa163e7ebc6b", "timestamp": "1404683207981", "timestampISO": "2014-07-06T21:46:47.981Z", "serverTimestamp": "1404683207981", "serverTimestampISO": "2014-07-06T21:46:47.981Z", "data": "0.0", "description": "Test", "quality": "20", "location": "1.0,2.0,3.0" } stream = self._get_stream("test", with_cached_data=True) dp = DataPoint.from_rollup_json(stream, example_json) self.assertEqual(dp.get_data(), 0.0) orig_dt = dp.get_timestamp() dt_wo_ms = datetime.datetime(year=orig_dt.year, month=orig_dt.month, day=orig_dt.day, hour=orig_dt.hour, minute=orig_dt.minute, second=orig_dt.second, tzinfo=orig_dt.tzinfo) self.assertEqual(six.b(dt_wo_ms.isoformat()), six.b('2014-07-06T21:46:47+00:00'))
def decode_from(stream): ''' object = decode_from(stream) Decodes the object from the stream ``stream`` Parameters ---------- stream : file-like object Returns ------- object : decoded object ''' stream = decompress_stream(stream) prefix = stream.read(1) if not prefix: return None elif prefix == six.b('P'): return pickle.load(stream) elif prefix == six.b('N'): import numpy as np return np.load(stream) else: raise IOError("jug.backend.decode_from: unknown prefix '%s'" % prefix)
def __iter__(self): """ Iterate over serialized chunks. """ messages = [] buf = 0 sent = 0 # for all registration ids for idx, token in enumerate(self.tokens): tok = binascii.unhexlify(token) # |COMMAND|FRAME-LEN|{token}|{payload}|{id:4}|{expiry:4}|{priority:1} frame_len = 3*5 + len(tok) + len(self.payload) + 4 + 4 + 1 # 5 items, each 3 bytes prefix, then each item length fmt = ">BIBH{0}sBH{1}sBHIBHIBHB".format(len(tok), len(self.payload)) message = pack(fmt, self.VERSION, frame_len, 1, len(tok), tok, 2, len(self.payload), self.payload, 3, 4, idx, 4, 4, self.expiry, 5, 1, self.priority) messages.append(message) buf += len(message) if buf >= self.packet_size: chunk = six.b("").join(messages) buf = 0 prev_sent = sent sent += len(messages) messages = [] yield prev_sent, chunk # last small chunk if messages: yield sent, six.b("").join(messages)
def testUtf8Validator(self): state = validate_utf8(six.b('\xf0\x90\x80\x80')) self.assertEqual(state, True) state = validate_utf8(six.b('\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5\xed\xa0\x80edited')) self.assertEqual(state, False) state = validate_utf8(six.b('')) self.assertEqual(state, True)
def readline(self,size=-1): """Read a line from the file, or at most <size> bytes.""" bits = [] indx = -1 sizeSoFar = 0 while indx == -1: nextBit = self.read(self._bufsize) bits.append(nextBit) sizeSoFar += len(nextBit) if not nextBit: break if size > 0 and sizeSoFar >= size: break indx = nextBit.find(b("\n")) # If not found, return whole string up to <size> length # Any leftovers are pushed onto front of buffer if indx == -1: data = b("").join(bits) if size > 0 and sizeSoFar > size: extra = data[size:] data = data[:size] self._rbuffer = extra + self._rbuffer return data # If found, push leftovers onto front of buffer # Add one to preserve the newline in the return value indx += 1 extra = bits[-1][indx:] bits[-1] = bits[-1][:indx] self._rbuffer = extra + self._rbuffer return b("").join(bits)
def PwDecrypt(self, password): """Unobfuscate a RADIUS password. RADIUS hides passwords in packets by using an algorithm based on the MD5 hash of the packet authenticator and RADIUS secret. This function reverses the obfuscation process. :param password: obfuscated form of password :type password: binary string :return: plaintext password :rtype: unicode string """ buf = password pw = six.b('') last = self.authenticator while buf: hash = md5_constructor(self.secret + last).digest() if six.PY3: for i in range(16): pw += bytes((hash[i] ^ buf[i],)) else: for i in range(16): pw += chr(ord(hash[i]) ^ ord(buf[i])) (last, buf) = (buf[:16], buf[16:]) while pw.endswith(six.b('\x00')): pw = pw[:-1] return pw.decode('utf-8')
def _get_login_info(self): """Get login IP, username and password from config file.""" logininfo = {} filename = self.configuration.manila_huawei_conf_file tree = ET.parse(filename) root = tree.getroot() RestURL = root.findtext('Storage/RestURL') logininfo['RestURL'] = RestURL.strip() # Prefix !$$$ means encoded already. prefix_name = '!$$$' need_encode = False for key in ['UserName', 'UserPassword']: node = root.find('Storage/%s' % key) if node.text.find(prefix_name) > -1: logininfo[key] = base64.b64decode(six.b(node.text[4:])) else: logininfo[key] = node.text node.text = prefix_name + six.text_type( base64.b64encode(six.b(node.text))) need_encode = True if need_encode: self._change_file_mode(filename) try: tree.write(filename, 'UTF-8') except Exception as err: err_msg = (_('File write error %s.') % err) LOG.error(err_msg) raise exception.InvalidShare(reason=err_msg) return logininfo
def test_boto3_list_object_versions(): s3 = boto3.client('s3', region_name='us-east-1') bucket_name = 'mybucket' key = 'key-with-versions' s3.create_bucket(Bucket=bucket_name) s3.put_bucket_versioning( Bucket=bucket_name, VersioningConfiguration={ 'Status': 'Enabled' } ) items = (six.b('v1'), six.b('v2')) for body in items: s3.put_object( Bucket=bucket_name, Key=key, Body=body ) response = s3.list_object_versions( Bucket=bucket_name ) # Two object versions should be returned len(response['Versions']).should.equal(2) keys = set([item['Key'] for item in response['Versions']]) keys.should.equal({key}) # Test latest object version is returned response = s3.get_object(Bucket=bucket_name, Key=key) response['Body'].read().should.equal(items[-1])
def test_make_f77_block_padded(): """F77-unformatted block with padding""" f77_block_padded = make_f77_block(six.b('asdf'), 10) assert_equal( f77_block_padded, six.b('\x0a\x00\x00\x00asdf\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00') )
def test_return_nesteddict(self): r = self.call('returntypes/getnesteddict', _rt={wsme.types.bytes: NestedOuter}) self.assertEquals(r, { b('a'): {'inner': {'aint': 0}}, b('b'): {'inner': {'aint': 0}} })
def test_operation_request_and_reply(self): xsd_content = '<xsd:element name="Data" type="xsd:string"/>' web_service_URL = "Great minds think alike" xsd_target_namespace = "omicron psi" wsdl = testutils.wsdl(xsd_content, operation_name="pi", xsd_target_namespace=xsd_target_namespace, input="Data", output="Data", web_service_URL=web_service_URL) test_input_data = "Riff-raff" test_output_data = "La-di-da-da-da" store = MockDocumentStore(wsdl=wsdl) transport = MockTransport(send_data=b("""\ <?xml version="1.0"?> <env:Envelope xmlns:env="http://schemas.xmlsoap.org/soap/envelope/"> <env:Body> <Data xmlns="%s">%s</Data> </env:Body> </env:Envelope>""" % (xsd_target_namespace, test_output_data))) client = suds.client.Client("suds://wsdl", documentStore=store, cache=None, transport=transport) assert transport.mock_log == [] reply = client.service.pi(test_input_data) assert len(transport.mock_log) == 1 assert transport.mock_log[0][0] == "send" assert transport.mock_log[0][1][0] == web_service_URL request_message = transport.mock_log[0][1][1] assert b(xsd_target_namespace) in request_message assert b(test_input_data) in request_message assert reply == test_output_data
def test_make_f77_block(): """Plain F77-unformatted block""" f77_block = make_f77_block(six.b('asdf')) assert_equal( f77_block, six.b('\x04\x00\x00\x00asdf\x04\x00\x00\x00') )
def chunk_stream(): """Generate predictable-but-randomy binary content.""" r = random.Random(0) randint = r.randint int2byte = six.int2byte for _i in xrange(num_chunks): c = b("").join(int2byte(randint( 0, 255)) for _j in xrange(chunk_size//8)) yield c * 8 f = self.fs.open("bigfile", "wb") try: for chunk in chunk_stream(): f.write(chunk) finally: f.close() chunks = chunk_stream() f = self.fs.open("bigfile", "rb") try: try: while True: if chunks.next() != f.read(chunk_size): assert False, "bigfile was corrupted" except StopIteration: if f.read() != b(""): assert False, "bigfile was corrupted" finally: f.close()
def test_avoid_external_XSD_fetching(self): # Prepare document content. xsd_target_namespace = "balancana" wsdl = testutils.wsdl("""\ <xsd:import schemaLocation="suds://imported_xsd"/> <xsd:include schemaLocation="suds://included_xsd"/>""", xsd_target_namespace=xsd_target_namespace) external_xsd_format = """\ <?xml version='1.0' encoding='UTF-8'?> <schema xmlns="http://www.w3.org/2001/XMLSchema"> <element name="external%d" type="string"/> </schema>""" external_xsd1 = b(external_xsd_format % (1,)) external_xsd2 = b(external_xsd_format % (2,)) # Add to cache. cache = MockCache() store1 = MockDocumentStore(wsdl=wsdl, imported_xsd=external_xsd1, included_xsd=external_xsd2) c1 = suds.client.Client("suds://wsdl", cachingpolicy=1, cache=cache, documentStore=store1, transport=MockTransport()) assert store1.mock_log == ["suds://wsdl", "suds://imported_xsd", "suds://included_xsd"] assert len(cache.mock_data) == 1 wsdl_object_id, wsdl_object = next(iteritems(cache.mock_data)) assert wsdl_object.__class__ is suds.wsdl.Definitions # Reuse from cache. cache.mock_log = [] store2 = MockDocumentStore(wsdl=wsdl) c2 = suds.client.Client("suds://wsdl", cachingpolicy=1, cache=cache, documentStore=store2, transport=MockTransport()) assert cache.mock_log == [("get", [wsdl_object_id])] assert store2.mock_log == []
def test_rename(self): check = self.check # test renaming a file in the same directory self.fs.setcontents("foo.txt", b("Hello, World!")) self.assert_(check("foo.txt")) self.fs.rename("foo.txt", "bar.txt") self.assert_(check("bar.txt")) self.assert_(not check("foo.txt")) # test renaming a directory in the same directory self.fs.makedir("dir_a") self.fs.setcontents("dir_a/test.txt", b("testerific")) self.assert_(check("dir_a")) self.fs.rename("dir_a", "dir_b") self.assert_(check("dir_b")) self.assert_(check("dir_b/test.txt")) self.assert_(not check("dir_a/test.txt")) self.assert_(not check("dir_a")) # test renaming a file into a different directory self.fs.makedir("dir_a") self.fs.rename("dir_b/test.txt", "dir_a/test.txt") self.assert_(not check("dir_b/test.txt")) self.assert_(check("dir_a/test.txt")) # test renaming a file into a non-existent directory self.assertRaises(ParentDirectoryMissingError, self.fs.rename, "dir_a/test.txt", "nonexistent/test.txt")
def test_write_past_end_of_file(self): if self.fs.getmeta('file.read_and_write', True): with self.fs.open("write_at_end", "wb") as f: f.seek(25) f.write(b("EOF")) with self.fs.open("write_at_end", "rb") as f: self.assertEquals(f.read(), b("\x00")*25 + b("EOF"))
def get_body_content(self): """ Returns content of BODY element for this HTML document. Content will be of type 'str' (Python 2) or 'bytes' (Python 3). :Returns: Returns content of this document. """ content = self.get_content() try: html_tree = parse_html_string(self.content) except: return '' html_root = html_tree.getroottree() if len(html_root.find('body')) != 0: body = html_tree.find('body') tree_str = etree.tostring(body, pretty_print=True, encoding='utf-8', xml_declaration=False) # this is so stupid if tree_str.startswith(six.b('<body>')): n = tree_str.rindex(six.b('</body>')) return tree_str[7:n] return tree_str return ''
def test_watch_readfile(self): self.setupWatchers() self.fs.setcontents("hello", b("hello world")) self.assertEventOccurred(CREATED,"/hello") self.clearCapturedEvents() old_atime = self.fs.getinfo("hello").get("accessed_time") self.assertEquals(self.fs.getcontents("hello"), b("hello world")) if not isinstance(self.watchfs,PollingWatchableFS): # Help it along by updting the atime. # TODO: why is this necessary? if self.fs.hassyspath("hello"): syspath = self.fs.getsyspath("hello") mtime = os.stat(syspath).st_mtime atime = int(time.time()) os.utime(self.fs.getsyspath("hello"),(atime,mtime)) self.assertEventOccurred(ACCESSED,"/hello") elif old_atime is not None: # Some filesystems don't update atime synchronously, or only # update it if it's too old, or don't update it at all! # Try to force the issue, wait for it to change, but eventually # give up and bail out. for i in xrange(10): if self.fs.getinfo("hello").get("accessed_time") != old_atime: if not self.checkEventOccurred(MODIFIED,"/hello"): self.assertEventOccurred(ACCESSED,"/hello") break time.sleep(0.2) if self.fs.hassyspath("hello"): syspath = self.fs.getsyspath("hello") mtime = os.stat(syspath).st_mtime atime = int(time.time()) os.utime(self.fs.getsyspath("hello"),(atime,mtime))
def format(self): """ format this object to string(byte array) to send data to server. """ if any(x not in (0, 1) for x in [self.fin, self.rsv1, self.rsv2, self.rsv3]): raise ValueError("not 0 or 1") if self.opcode not in ABNF.OPCODES: raise ValueError("Invalid OPCODE") length = len(self.data) if length >= ABNF.LENGTH_63: raise ValueError("data is too long") frame_header = chr(self.fin << 7 | self.rsv1 << 6 | self.rsv2 << 5 | self.rsv3 << 4 | self.opcode) if length < ABNF.LENGTH_7: frame_header += chr(self.mask << 7 | length) frame_header = six.b(frame_header) elif length < ABNF.LENGTH_16: frame_header += chr(self.mask << 7 | 0x7E) frame_header = six.b(frame_header) frame_header += struct.pack("!H", length) else: frame_header += chr(self.mask << 7 | 0x7F) frame_header = six.b(frame_header) frame_header += struct.pack("!Q", length) if not self.mask: return frame_header + self.data else: mask_key = self.get_mask_key(4) return frame_header + self._get_masked(mask_key)
def test_watch_writefile(self): self.setupWatchers() self.fs.setcontents("hello", b("hello world")) self.assertEventOccurred(CREATED,"/hello") self.clearCapturedEvents() self.fs.setcontents("hello", b("hello again world")) self.assertEventOccurred(MODIFIED,"/hello")
def encode_multiparts(fields): """ Breaks up the multipart_encoded content into first and last part, to be able to "insert" the file content itself in-between :param fields: dict() fields to encode :return:(header_body, close_body, content_type) """ (data, content_type) = requests.packages.urllib3.filepost.encode_multipart_formdata(fields) #logging.debug(data) header_body = BytesIO() # Remove closing boundary lines = data.split("\r\n") boundary = lines[0] lines = lines[0:len(lines)-2] header_body.write(b("\r\n".join(lines) + "\r\n")) #Add file data part except data header_body.write(b('%s\r\n' % boundary)) header_body.write(b('Content-Disposition: form-data; name="userfile_0"; filename="fake-name"\r\n')) header_body.write(b('Content-Type: application/octet-stream\r\n\r\n')) closing_boundary = b('\r\n%s--\r\n' % (boundary)) return (header_body.getvalue(), closing_boundary, content_type)
def _read_mathp(self, data, n): """MATHP(4506,45,374) - Record 11""" nmaterials = 0 s1 = Struct(b(self._endian + 'i7f3i23fi')) s2 = Struct(b(self._endian + '8i')) n2 = n while n2 < n: edata = data[n:n+140] n += 140 out1 = s1.unpack(edata) (mid, a10, a01, d1, rho, alpha, tref, ge, sf, na, nd, kp, a20, a11, a02, d2, a30, a21, a12, a03, d3, a40, a31, a22, a13, a04, d4, a50, a41, a32, a23, a14, a05, d5, continue_flag) = out1 data_in = [out1] if continue_flag: edata = data[n:n+32] # 7*4 n += 32 out2 = s2.unpack(edata) (tab1, tab2, tab3, tab4, x1, x2, x3, tab5) = out2 data_in.append(out2) mat = MATHP.add_op2_data(data_in) self.add_op2_material(mat) nmaterials += 1 self.card_count['MATHP'] = nmaterials return n
def test_egain_on_buffer_size(self, *args): # get a connection that's already fully started c = self.test_successful_connection() header = six.b('\x00\x00\x00\x00') + int32_pack(20000) responses = [ header + (six.b('a') * (4096 - len(header))), six.b('a') * 4096, socket_error(errno.EAGAIN), six.b('a') * 100, socket_error(errno.EAGAIN)] def side_effect(*args): response = responses.pop(0) if isinstance(response, socket_error): raise response else: return response c._socket.recv.side_effect = side_effect c.handle_read(None, 0) self.assertEqual(c._total_reqd_bytes, 20000 + len(header)) # the EAGAIN prevents it from reading the last 100 bytes c._iobuf.seek(0, os.SEEK_END) pos = c._iobuf.tell() self.assertEqual(pos, 4096 + 4096) # now tell it to read the last 100 bytes c.handle_read(None, 0) c._iobuf.seek(0, os.SEEK_END) pos = c._iobuf.tell() self.assertEqual(pos, 4096 + 4096 + 100)
def testTFSummaryTensor(self): """Verify processing of tf.summary.tensor.""" event_sink = _EventGenerator(self, zero_out_timestamps=True) writer = SummaryToEventTransformer(event_sink) with self.test_session() as sess: summary_lib.tensor_summary('scalar', constant_op.constant(1.0)) summary_lib.tensor_summary('vector', constant_op.constant( [1.0, 2.0, 3.0])) summary_lib.tensor_summary('string', constant_op.constant(six.b('foobar'))) merged = summary_lib.merge_all() summ = sess.run(merged) writer.add_summary(summ, 0) accumulator = ea.EventAccumulator(event_sink) accumulator.Reload() self.assertTagsEqual(accumulator.Tags(), { ea.TENSORS: ['scalar', 'vector', 'string'], }) scalar_proto = accumulator.Tensors('scalar')[0].tensor_proto scalar = tensor_util.MakeNdarray(scalar_proto) vector_proto = accumulator.Tensors('vector')[0].tensor_proto vector = tensor_util.MakeNdarray(vector_proto) string_proto = accumulator.Tensors('string')[0].tensor_proto string = tensor_util.MakeNdarray(string_proto) self.assertTrue(np.array_equal(scalar, 1.0)) self.assertTrue(np.array_equal(vector, [1.0, 2.0, 3.0])) self.assertTrue(np.array_equal(string, six.b('foobar')))
def test_walk(self): self.fs.setcontents('a.txt', b('hello')) self.fs.setcontents('b.txt', b('world')) self.fs.makeopendir('foo').setcontents('c', b('123')) sorted_walk = sorted([(d, sorted(fs)) for (d, fs) in self.fs.walk()]) self.assertEquals(sorted_walk, [("/", ["a.txt", "b.txt"]), ("/foo", ["c"])]) # When searching breadth-first, shallow entries come first found_a = False for _, files in self.fs.walk(search="breadth"): if "a.txt" in files: found_a = True if "c" in files: break assert found_a, "breadth search order was wrong" # When searching depth-first, deep entries come first found_c = False for _, files in self.fs.walk(search="depth"): if "c" in files: found_c = True if "a.txt" in files: break assert found_c, "depth search order was wrong: " + \ str(list(self.fs.walk(search="depth")))
def make_auth_null(): return six.b('')
VALUE_UNKNOWN = 'UNKNOWN' CATEGORY_LIST = [ CATEGORY_PC, CATEGORY_SMARTPHONE, CATEGORY_MOBILEPHONE, CATEGORY_CRAWLER, CATEGORY_APPLIANCE, CATEGORY_MISC, VALUE_UNKNOWN ] ATTRIBUTE_LIST = [ ATTRIBUTE_NAME, ATTRIBUTE_CATEGORY, ATTRIBUTE_OS, ATTRIBUTE_VENDOR, ATTRIBUTE_VERSION, ATTRIBUTE_OS_VERSION ] DATASET = {} def _init(): %s _init() def get(label): return DATASET[label] """ % dynamic_lines fp = open(py_file, 'wb') try: fp.write(six.b(module_text)) finally: fp.close()
def doS4U(self, tgt, cipher, oldSessionKey, sessionKey, nthash, aesKey, kdcHost): decodedTGT = decoder.decode(tgt, asn1Spec=AS_REP())[0] # Extract the ticket from the TGT ticket = Ticket() ticket.from_asn1(decodedTGT['ticket']) apReq = AP_REQ() apReq['pvno'] = 5 apReq['msg-type'] = int(constants.ApplicationTagNumbers.AP_REQ.value) opts = list() apReq['ap-options'] = constants.encodeFlags(opts) seq_set(apReq, 'ticket', ticket.to_asn1) authenticator = Authenticator() authenticator['authenticator-vno'] = 5 authenticator['crealm'] = str(decodedTGT['crealm']) clientName = Principal() clientName.from_asn1(decodedTGT, 'crealm', 'cname') seq_set(authenticator, 'cname', clientName.components_to_asn1) now = datetime.datetime.utcnow() authenticator['cusec'] = now.microsecond authenticator['ctime'] = KerberosTime.to_asn1(now) if logging.getLogger().level == logging.DEBUG: logging.debug('AUTHENTICATOR') print(authenticator.prettyPrint()) print('\n') encodedAuthenticator = encoder.encode(authenticator) # Key Usage 7 # TGS-REQ PA-TGS-REQ padata AP-REQ Authenticator (includes # TGS authenticator subkey), encrypted with the TGS session # key (Section 5.5.1) encryptedEncodedAuthenticator = cipher.encrypt(sessionKey, 7, encodedAuthenticator, None) apReq['authenticator'] = noValue apReq['authenticator']['etype'] = cipher.enctype apReq['authenticator']['cipher'] = encryptedEncodedAuthenticator encodedApReq = encoder.encode(apReq) tgsReq = TGS_REQ() tgsReq['pvno'] = 5 tgsReq['msg-type'] = int(constants.ApplicationTagNumbers.TGS_REQ.value) tgsReq['padata'] = noValue tgsReq['padata'][0] = noValue tgsReq['padata'][0]['padata-type'] = int( constants.PreAuthenticationDataTypes.PA_TGS_REQ.value) tgsReq['padata'][0]['padata-value'] = encodedApReq # In the S4U2self KRB_TGS_REQ/KRB_TGS_REP protocol extension, a service # requests a service ticket to itself on behalf of a user. The user is # identified to the KDC by the user's name and realm. clientName = Principal( self.__options.impersonate, type=constants.PrincipalNameType.NT_PRINCIPAL.value) S4UByteArray = struct.pack( '<I', constants.PrincipalNameType.NT_PRINCIPAL.value) S4UByteArray += b(self.__options.impersonate) + b( self.__domain) + b'Kerberos' if logging.getLogger().level == logging.DEBUG: logging.debug('S4UByteArray') hexdump(S4UByteArray) # Finally cksum is computed by calling the KERB_CHECKSUM_HMAC_MD5 hash # with the following three parameters: the session key of the TGT of # the service performing the S4U2Self request, the message type value # of 17, and the byte array S4UByteArray. checkSum = _HMACMD5.checksum(sessionKey, 17, S4UByteArray) if logging.getLogger().level == logging.DEBUG: logging.debug('CheckSum') hexdump(checkSum) paForUserEnc = PA_FOR_USER_ENC() seq_set(paForUserEnc, 'userName', clientName.components_to_asn1) paForUserEnc['userRealm'] = self.__domain paForUserEnc['cksum'] = noValue paForUserEnc['cksum']['cksumtype'] = int( constants.ChecksumTypes.hmac_md5.value) paForUserEnc['cksum']['checksum'] = checkSum paForUserEnc['auth-package'] = 'Kerberos' if logging.getLogger().level == logging.DEBUG: logging.debug('PA_FOR_USER_ENC') print(paForUserEnc.prettyPrint()) encodedPaForUserEnc = encoder.encode(paForUserEnc) tgsReq['padata'][1] = noValue tgsReq['padata'][1]['padata-type'] = int( constants.PreAuthenticationDataTypes.PA_FOR_USER.value) tgsReq['padata'][1]['padata-value'] = encodedPaForUserEnc reqBody = seq_set(tgsReq, 'req-body') opts = list() opts.append(constants.KDCOptions.forwardable.value) opts.append(constants.KDCOptions.renewable.value) opts.append(constants.KDCOptions.canonicalize.value) reqBody['kdc-options'] = constants.encodeFlags(opts) serverName = Principal( self.__user, type=constants.PrincipalNameType.NT_UNKNOWN.value) seq_set(reqBody, 'sname', serverName.components_to_asn1) reqBody['realm'] = str(decodedTGT['crealm']) now = datetime.datetime.utcnow() + datetime.timedelta(days=1) reqBody['till'] = KerberosTime.to_asn1(now) reqBody['nonce'] = random.getrandbits(31) seq_set_iter(reqBody, 'etype', (int( cipher.enctype), int(constants.EncryptionTypes.rc4_hmac.value))) if logging.getLogger().level == logging.DEBUG: logging.debug('Final TGS') print(tgsReq.prettyPrint()) logging.info('\tRequesting S4U2self') message = encoder.encode(tgsReq) r = sendReceive(message, self.__domain, kdcHost) tgs = decoder.decode(r, asn1Spec=TGS_REP())[0] if logging.getLogger().level == logging.DEBUG: logging.debug('TGS_REP') print(tgs.prettyPrint()) if self.__force_forwardable: # Convert hashes to binary form, just in case we're receiving strings if isinstance(nthash, str): try: nthash = unhexlify(nthash) except TypeError: pass if isinstance(aesKey, str): try: aesKey = unhexlify(aesKey) except TypeError: pass # Compute NTHash and AESKey if they're not provided in arguments if self.__password != '' and self.__domain != '' and self.__user != '': if not nthash: nthash = compute_nthash(self.__password) if logging.getLogger().level == logging.DEBUG: logging.debug('NTHash') print(hexlify(nthash).decode()) if not aesKey: salt = self.__domain.upper() + self.__user aesKey = _AES256CTS.string_to_key(self.__password, salt, params=None).contents if logging.getLogger().level == logging.DEBUG: logging.debug('AESKey') print(hexlify(aesKey).decode()) # Get the encrypted ticket returned in the TGS. It's encrypted with one of our keys cipherText = tgs['ticket']['enc-part']['cipher'] # Check which cipher was used to encrypt the ticket. It's not always the same # This determines which of our keys we should use for decryption/re-encryption newCipher = _enctype_table[int(tgs['ticket']['enc-part']['etype'])] if newCipher.enctype == Enctype.RC4: key = Key(newCipher.enctype, nthash) else: key = Key(newCipher.enctype, aesKey) # Decrypt and decode the ticket # Key Usage 2 # AS-REP Ticket and TGS-REP Ticket (includes tgs session key or # application session key), encrypted with the service key # (section 5.4.2) plainText = newCipher.decrypt(key, 2, cipherText) encTicketPart = decoder.decode(plainText, asn1Spec=EncTicketPart())[0] # Print the flags in the ticket before modification logging.debug('\tService ticket from S4U2self flags: ' + str(encTicketPart['flags'])) logging.debug('\tService ticket from S4U2self is' + ('' if ( encTicketPart['flags'][TicketFlags.forwardable.value] == 1 ) else ' not') + ' forwardable') #Customize flags the forwardable flag is the only one that really matters logging.info('\tForcing the service ticket to be forwardable') #convert to string of bits flagBits = encTicketPart['flags'].asBinary() #Set the forwardable flag. Awkward binary string insertion flagBits = flagBits[:TicketFlags.forwardable. value] + '1' + flagBits[TicketFlags. forwardable.value + 1:] #Overwrite the value with the new bits encTicketPart['flags'] = encTicketPart['flags'].clone( value=flagBits) #Update flags logging.debug('\tService ticket flags after modification: ' + str(encTicketPart['flags'])) logging.debug('\tService ticket now is' + ('' if ( encTicketPart['flags'][TicketFlags.forwardable.value] == 1 ) else ' not') + ' forwardable') # Re-encode and re-encrypt the ticket # Again, Key Usage 2 encodedEncTicketPart = encoder.encode(encTicketPart) cipherText = newCipher.encrypt(key, 2, encodedEncTicketPart, None) #put it back in the TGS tgs['ticket']['enc-part']['cipher'] = cipherText ################################################################################ # Up until here was all the S4USelf stuff. Now let's start with S4U2Proxy # So here I have a ST for me.. I now want a ST for another service # Extract the ticket from the TGT ticketTGT = Ticket() ticketTGT.from_asn1(decodedTGT['ticket']) #Get the service ticket ticket = Ticket() ticket.from_asn1(tgs['ticket']) apReq = AP_REQ() apReq['pvno'] = 5 apReq['msg-type'] = int(constants.ApplicationTagNumbers.AP_REQ.value) opts = list() apReq['ap-options'] = constants.encodeFlags(opts) seq_set(apReq, 'ticket', ticketTGT.to_asn1) authenticator = Authenticator() authenticator['authenticator-vno'] = 5 authenticator['crealm'] = str(decodedTGT['crealm']) clientName = Principal() clientName.from_asn1(decodedTGT, 'crealm', 'cname') seq_set(authenticator, 'cname', clientName.components_to_asn1) now = datetime.datetime.utcnow() authenticator['cusec'] = now.microsecond authenticator['ctime'] = KerberosTime.to_asn1(now) encodedAuthenticator = encoder.encode(authenticator) # Key Usage 7 # TGS-REQ PA-TGS-REQ padata AP-REQ Authenticator (includes # TGS authenticator subkey), encrypted with the TGS session # key (Section 5.5.1) encryptedEncodedAuthenticator = cipher.encrypt(sessionKey, 7, encodedAuthenticator, None) apReq['authenticator'] = noValue apReq['authenticator']['etype'] = cipher.enctype apReq['authenticator']['cipher'] = encryptedEncodedAuthenticator encodedApReq = encoder.encode(apReq) tgsReq = TGS_REQ() tgsReq['pvno'] = 5 tgsReq['msg-type'] = int(constants.ApplicationTagNumbers.TGS_REQ.value) tgsReq['padata'] = noValue tgsReq['padata'][0] = noValue tgsReq['padata'][0]['padata-type'] = int( constants.PreAuthenticationDataTypes.PA_TGS_REQ.value) tgsReq['padata'][0]['padata-value'] = encodedApReq # Add resource-based constrained delegation support paPacOptions = PA_PAC_OPTIONS() paPacOptions['flags'] = constants.encodeFlags( (constants.PAPacOptions.resource_based_constrained_delegation. value, )) tgsReq['padata'][1] = noValue tgsReq['padata'][1][ 'padata-type'] = constants.PreAuthenticationDataTypes.PA_PAC_OPTIONS.value tgsReq['padata'][1]['padata-value'] = encoder.encode(paPacOptions) reqBody = seq_set(tgsReq, 'req-body') opts = list() # This specified we're doing S4U opts.append(constants.KDCOptions.cname_in_addl_tkt.value) opts.append(constants.KDCOptions.canonicalize.value) opts.append(constants.KDCOptions.forwardable.value) opts.append(constants.KDCOptions.renewable.value) reqBody['kdc-options'] = constants.encodeFlags(opts) service2 = Principal( self.__options.spn, type=constants.PrincipalNameType.NT_SRV_INST.value) seq_set(reqBody, 'sname', service2.components_to_asn1) reqBody['realm'] = self.__domain myTicket = ticket.to_asn1(TicketAsn1()) seq_set_iter(reqBody, 'additional-tickets', (myTicket, )) now = datetime.datetime.utcnow() + datetime.timedelta(days=1) reqBody['till'] = KerberosTime.to_asn1(now) reqBody['nonce'] = random.getrandbits(31) seq_set_iter(reqBody, 'etype', (int(constants.EncryptionTypes.rc4_hmac.value), int(constants.EncryptionTypes.des3_cbc_sha1_kd.value), int(constants.EncryptionTypes.des_cbc_md5.value), int(cipher.enctype))) message = encoder.encode(tgsReq) logging.info('\tRequesting S4U2Proxy') r = sendReceive(message, self.__domain, kdcHost) tgs = decoder.decode(r, asn1Spec=TGS_REP())[0] cipherText = tgs['enc-part']['cipher'] # Key Usage 8 # TGS-REP encrypted part (includes application session # key), encrypted with the TGS session key (Section 5.4.2) plainText = cipher.decrypt(sessionKey, 8, cipherText) encTGSRepPart = decoder.decode(plainText, asn1Spec=EncTGSRepPart())[0] newSessionKey = Key(encTGSRepPart['key']['keytype'], encTGSRepPart['key']['keyvalue']) # Creating new cipher based on received keytype cipher = _enctype_table[encTGSRepPart['key']['keytype']] return r, cipher, sessionKey, newSessionKey
def __call__(self, environ, start_response): # Request for this state, modified by replace_start_response() # and used when an error is being reported. state = {} def replacement_start_response(status, headers, exc_info=None): """Overrides the default response to make errors parsable.""" try: status_code = int(status.split(' ')[0]) state['status_code'] = status_code except (ValueError, TypeError): # pragma: nocover raise Exception( _('ErrorDocumentMiddleware received an invalid ' 'status %s') % status) else: if (state['status_code'] // 100) not in (2, 3): # Remove some headers so we can replace them later # when we have the full error message and can # compute the length. headers = [(h, v) for (h, v) in headers if h not in ('Content-Length', 'Content-Type')] # Save the headers in case we need to modify them. state['headers'] = headers return start_response(status, headers, exc_info) app_iter = self.app(environ, replacement_start_response) if (state['status_code'] // 100) not in (2, 3): errs = [] for err_str in app_iter: err = {} try: err = json.loads(err_str.decode('utf-8')) except ValueError: pass if 'title' in err and 'description' in err: title = err['title'] desc = err['description'] else: title = '' desc = '' error_code = err['faultstring'].lower() \ if 'faultstring' in err else '' # 'container' is the service-name. The general form of the # code is service-name.error-code. code = '.'.join(['container', error_code]) errs.append({ 'request_id': '', 'code': code, 'status': state['status_code'], 'title': title, 'detail': desc, 'links': [] }) body = [six.b(json.dumps({'errors': errs}))] state['headers'].append(('Content-Type', 'application/json')) state['headers'].append(('Content-Length', str(len(body[0])))) else: body = app_iter return body
def _handle_hybi_request(self, environ): if 'eventlet.input' in environ: sock = environ['eventlet.input'].get_socket() elif 'gunicorn.socket' in environ: sock = environ['gunicorn.socket'] else: raise Exception( 'No eventlet.input or gunicorn.socket present in environ.') hybi_version = environ['HTTP_SEC_WEBSOCKET_VERSION'] if hybi_version not in ( '8', '13', ): raise BadRequest(status='426 Upgrade Required', headers=[('Sec-WebSocket-Version', '8, 13')]) self.protocol_version = int(hybi_version) if 'HTTP_SEC_WEBSOCKET_KEY' not in environ: # That's bad. raise BadRequest() origin = environ.get('HTTP_ORIGIN', (environ.get('HTTP_SEC_WEBSOCKET_ORIGIN', '') if self.protocol_version <= 8 else '')) if self.origin_checker is not None: if not self.origin_checker(environ.get('HTTP_HOST'), origin): raise BadRequest(status='403 Forbidden') protocols = environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL', None) negotiated_protocol = None if protocols: for p in (i.strip() for i in protocols.split(',')): if p in self.supported_protocols: negotiated_protocol = p break key = environ['HTTP_SEC_WEBSOCKET_KEY'] response = base64.b64encode(sha1(six.b(key) + PROTOCOL_GUID).digest()) handshake_reply = [ b"HTTP/1.1 101 Switching Protocols", b"Upgrade: websocket", b"Connection: Upgrade", b"Sec-WebSocket-Accept: " + response ] if negotiated_protocol: handshake_reply.append(b"Sec-WebSocket-Protocol: " + six.b(negotiated_protocol)) parsed_extensions = {} extensions = self._parse_extension_header( environ.get("HTTP_SEC_WEBSOCKET_EXTENSIONS")) deflate = self._negotiate_permessage_deflate(extensions) if deflate is not None: parsed_extensions["permessage-deflate"] = deflate formatted_ext = self._format_extension_header(parsed_extensions) if formatted_ext is not None: handshake_reply.append(b"Sec-WebSocket-Extensions: " + formatted_ext) sock.sendall(b'\r\n'.join(handshake_reply) + b'\r\n\r\n') return RFC6455WebSocket(sock, environ, self.protocol_version, protocol=negotiated_protocol, extensions=parsed_extensions)
def compile_with_cache(source, options=(), arch=None, cache_dir=None, extra_source=None): # NVRTC does not use extra_source. extra_source is used for cache key. global _empty_file_preprocess_cache if cache_dir is None: cache_dir = get_cache_dir() if arch is None: arch = _get_arch() options += ('-ftz=true', ) env = (arch, options, _get_nvrtc_version()) base = _empty_file_preprocess_cache.get(env, None) if base is None: # This is checking of NVRTC compiler internal version base = _preprocess('', options, arch) _empty_file_preprocess_cache[env] = base key_src = '%s %s %s %s' % (env, base, source, extra_source) key_src = key_src.encode('utf-8') name = '%s_2.cubin' % hashlib.md5(key_src).hexdigest() if not os.path.isdir(cache_dir): try: os.makedirs(cache_dir) except OSError: if not os.path.isdir(cache_dir): raise mod = function.Module() # To handle conflicts in concurrent situation, we adopt lock-free method # to avoid performance degradation. path = os.path.join(cache_dir, name) if os.path.exists(path): with open(path, 'rb') as file: data = file.read() if len(data) >= 32: hash = data[:32] cubin = data[32:] cubin_hash = six.b(hashlib.md5(cubin).hexdigest()) if hash == cubin_hash: mod.load(cubin) return mod ptx = compile_using_nvrtc(source, options, arch) ls = function.LinkState() ls.add_ptr_data(ptx, six.u('cupy.ptx')) cubin = ls.complete() cubin_hash = six.b(hashlib.md5(cubin).hexdigest()) # shutil.move is not atomic operation, so it could result in a corrupted # file. We detect it by appending md5 hash at the beginning of each cache # file. If the file is corrupted, it will be ignored next time it is read. with tempfile.NamedTemporaryFile(dir=cache_dir, delete=False) as tf: tf.write(cubin_hash) tf.write(cubin) temp_path = tf.name shutil.move(temp_path, path) # Save .cu source file along with .cubin if _get_bool_env_variable('CUPY_CACHE_SAVE_CUDA_SOURCE', False): with open(path + '.cu', 'w') as f: f.write(source) mod.load(cubin) return mod
def multipart_encode(self, params, files, boundary=None, buffer=None): if six.PY3: boundary = boundary or b( '--------------------%s---' % random.random()) buffer = buffer or b('') for key, value in params: buffer += b('--%s\r\n' % boundary) buffer += b('Content-Disposition: form-data; name="%s"' % key) buffer += b('\r\n\r\n' + value + '\r\n') for key, fd in files: file_size = os.fstat(fd.fileno())[stat.ST_SIZE] filename = fd.name.split('/')[-1] contenttype = mimetypes.guess_type( filename)[0] or 'application/octet-stream' buffer += b('--%s\r\n' % boundary) buffer += b('Content-Disposition: form-data; ') buffer += b('name="%s"; filename="%s"\r\n' % (key, filename)) buffer += b('Content-Type: %s\r\n' % contenttype) fd.seek(0) buffer += b('\r\n') + fd.read() + b('\r\n') buffer += b('--%s--\r\n\r\n' % boundary) else: boundary = boundary or '--------------------%s---' % random.random( ) buffer = buffer or '' for key, value in params: buffer += '--%s\r\n' % boundary buffer += 'Content-Disposition: form-data; name="%s"' % key buffer += '\r\n\r\n' + value + '\r\n' for key, fd in files: file_size = os.fstat(fd.fileno())[stat.ST_SIZE] filename = fd.name.split('/')[-1] contenttype = mimetypes.guess_type( filename)[0] or 'application/octet-stream' buffer += '--%s\r\n' % boundary buffer += 'Content-Disposition: form-data; ' buffer += 'name="%s"; filename="%s"\r\n' % (key, filename) buffer += 'Content-Type: %s\r\n' % contenttype fd.seek(0) buffer += '\r\n' + fd.read() + '\r\n' buffer += '--%s--\r\n\r\n' % boundary return boundary, buffer
def delete_snapshot(self, name): self._call(COHO1_DELETE_SNAPSHOT, [(six.b(name), self.packer.pack_string)])
def memoizemethod_noargs(method): """Decorator to cache the result of a method (without arguments) using a weak reference to its object """ cache = weakref.WeakKeyDictionary() @wraps(method) def new_method(self, *args, **kwargs): if self not in cache: cache[self] = method(self, *args, **kwargs) return cache[self] return new_method _BINARYCHARS = {six.b(chr(i)) for i in range(32)} - {b"\0", b"\t", b"\n", b"\r"} _BINARYCHARS |= {ord(ch) for ch in _BINARYCHARS} @deprecated("scrapy.utils.python.binary_is_text") def isbinarytext(text): """ This function is deprecated. Please use scrapy.utils.python.binary_is_text, which was created to be more clear about the functions behavior: it is behaving inverted to this one. """ return not binary_is_text(text) def binary_is_text(data): """ Returns `True` if the given ``data`` argument (a ``bytes`` object) does not contain unprintable control characters.
def create_volume_from_snapshot(self, src, dst): self._call(COHO1_CREATE_VOLUME_FROM_SNAPSHOT, [(six.b(src), self.packer.pack_string), (six.b(dst), self.packer.pack_string)])
def test_dict_to_tf_example(self): image_file_name = '2012_12.jpg' image_data = np.random.rand(256, 256, 3) save_path = os.path.join(self.get_temp_dir(), image_file_name) image = PIL.Image.fromarray(image_data, 'RGB') image.save(save_path) data = { 'folder': '', 'filename': image_file_name, 'size': { 'height': 256, 'width': 256, }, 'object': [ { 'difficult': 1, 'bndbox': { 'xmin': 64, 'ymin': 64, 'xmax': 192, 'ymax': 192, }, 'name': 'person', 'truncated': 0, 'pose': '', }, ], } label_map_dict = { 'background': 0, 'person': 1, 'notperson': 2, } example = create_pascal_tfrecord.dict_to_tf_example( data, self.get_temp_dir(), label_map_dict, image_subdirectory='') self._assertProtoEqual( example.features.feature['image/height'].int64_list.value, [256]) self._assertProtoEqual( example.features.feature['image/width'].int64_list.value, [256]) self._assertProtoEqual( example.features.feature['image/filename'].bytes_list.value, [six.b(image_file_name)]) self._assertProtoEqual( example.features.feature['image/source_id'].bytes_list.value, [six.b(str(1))]) self._assertProtoEqual( example.features.feature['image/format'].bytes_list.value, [six.b('jpeg')]) self._assertProtoEqual( example.features.feature['image/object/bbox/xmin'].float_list. value, [0.25]) self._assertProtoEqual( example.features.feature['image/object/bbox/ymin'].float_list. value, [0.25]) self._assertProtoEqual( example.features.feature['image/object/bbox/xmax'].float_list. value, [0.75]) self._assertProtoEqual( example.features.feature['image/object/bbox/ymax'].float_list. value, [0.75]) self._assertProtoEqual( example.features.feature['image/object/class/text'].bytes_list. value, [six.b('person')]) self._assertProtoEqual( example.features.feature['image/object/class/label'].int64_list. value, [1]) self._assertProtoEqual( example.features.feature['image/object/difficult'].int64_list. value, [1]) self._assertProtoEqual( example.features.feature['image/object/truncated'].int64_list. value, [0]) self._assertProtoEqual( example.features.feature['image/object/view'].bytes_list.value, [six.b('')])
def create_snapshot(self, src, dst, flags): self._call(COHO1_CREATE_SNAPSHOT, [(six.b(src), self.packer.pack_string), (six.b(dst), self.packer.pack_string), (flags, self.packer.pack_uint)])
def generate_from_custom(annotations_path, output_path, log_step=5000, force_uppercase=True, save_filename=False): logging.info('Building a dataset from %s.', annotations_path) logging.info('Output dir: %s', output_path) train_file = os.path.join(output_path, 'training.tfrecords') test_file = os.path.join(output_path, 'testing.tfrecords') writerTrain = tf.python_io.TFRecordWriter(train_file) writerTest = tf.python_io.TFRecordWriter(test_file) longest_label = '' idx = 0 with open(annotations_path, 'r') as annotations: for idx, line in enumerate(annotations): line = line.rstrip('\n') columns = line.split(' ') img_path = columns[0] img = cv2.imread(img_path) # extract every LP number_of_lp = int((len(columns) - 1) / 5) for i in range(number_of_lp): x = int(columns[i * 5 + 1]) y = int(columns[i * 5 + 2]) w = int(columns[i * 5 + 3]) h = int(columns[i * 5 + 4]) label = columns[i * 5 + 5] if force_uppercase: label = label.upper() lp_img = img[y:y + h, x:x + w, :] #lp_img = cv2.cvtColor(lp_img, cv2.COLOR_BGR2RGB) cv2.imwrite('tmp.png', lp_img) with open('tmp.png', 'rb') as img_file: lp_img = img_file.read() feature = {} feature['image'] = _bytes_feature(lp_img) feature['label'] = _bytes_feature(b(label)) if save_filename: feature['comment'] = _bytes_feature(b(img_path)) example = tf.train.Example(features=tf.train.Features( feature=feature)) # send to train or test dst = random.choices(['train', 'test'], weights=[0.95, 0.05], k=1) if 'train' in dst: writerTrain.write(example.SerializeToString()) else: writerTest.write(example.SerializeToString()) if idx + number_of_lp % log_step == 0: logging.info('Processed %s pairs.', idx + 1) if idx: logging.info('Dataset is ready: %i pairs.', idx + 1) writerTrain.close() writerTest.close()
def processAuth(self, datagram, host, port): try: bas = self.find_nas(host) if not bas: raise PacketError( '[Radiusd] :: Dropping packet from unknown host %s' % host) secret, vendor_id = bas['bas_secret'], bas['vendor_id'] req = self.createAuthPacket(packet=datagram, dict=self.dict, secret=six.b(str(secret)), vendor_id=vendor_id) self.do_stat(req.code) logger.info("[Radiusd] :: Received radius request: %s" % (repr(req))) if self.config.system.debug: logger.debug(req.format_str()) if req.code != packet.AccessRequest: raise PacketError( 'non-AccessRequest packet on authentication socket') reply = req.CreateReply() reply.vendor_id = req.vendor_id aaa_request = dict(account_number=req.get_user_name(), domain=req.get_domain(), macaddr=req.client_mac, nasaddr=req.get_nas_addr(), vlanid1=req.vlanid1, vlanid2=req.vlanid2) auth_resp = RadiusAuth(self.db_engine, self.mcache, self.aes, aaa_request).authorize() if auth_resp['code'] > 0: reply['Reply-Message'] = auth_resp['msg'] reply.code = packet.AccessReject return reply if 'bypass' in auth_resp and int(auth_resp['bypass']) == 0: is_pwd_ok = True else: is_pwd_ok = req.is_valid_pwd(auth_resp.get('passwd')) if not is_pwd_ok: reply['Reply-Message'] = "password not match" reply.code = packet.AccessReject return reply else: if u"input_rate" in auth_resp and u"output_rate" in auth_resp: reply = rate_process.process( reply, input_rate=auth_resp['input_rate'], output_rate=auth_resp['output_rate']) attrs = auth_resp.get("attrs") or {} for attr_name in attrs: try: # todo: May have a type matching problem reply.AddAttribute(utils.safestr(attr_name), attrs[attr_name]) except Exception as err: errstr = "RadiusError:current radius cannot support attribute {0},{1}".format( attr_name, utils.safestr(err.message)) logger.error(errstr) for attr, attr_val in req.resp_attrs.iteritems(): reply[attr] = attr_val reply['Reply-Message'] = 'success!' reply.code = packet.AccessAccept if not req.VerifyReply(reply): raise PacketError('VerifyReply error') return reply except Exception as err: self.do_stat(0) errstr = 'RadiusError:Dropping invalid auth packet from {0} {1},{2}'.format( host, port, utils.safeunicode(err)) logger.error(errstr) import traceback traceback.print_exc()
def calc_hash(self, extra=""): return hashlib.md5(six.b(self.make_param_name() + extra)).hexdigest()
def test_hash_file(self): data = 'Mary had a little lamb, its fleece as white as snow' flo = six.StringIO(data) h1 = utils.hash_file(flo) h2 = hashlib.sha1(six.b(data)).hexdigest() self.assertEqual(h1, h2)
def read(self, *args, **kwargs): if self.closed: return six.b('') # not a new style object in python 2 return six.BytesIO.read(self, *args, **kwargs)
def test_ntlmv2(self): print("####### 4.2.4 NTLMv2 Authentication") ntlm.USE_NTLMv2 = True serverName = b('\x02\x00\x0c\x00\x44\x00\x6f\x00\x6d\x00\x61\x00\x69\x00\x6e\x00\x01\x00\x0c\x00\x53\x00\x65\x00\x72\x00\x76\x00\x65\x00\x72\x00\x00\x00\x00\x00') # Still the aTime won't be set to zero. that must be changed in ntlm.computeResponseNTLM2. Gotta make this more automated flags = ntlm.NTLMSSP_NEGOTIATE_KEY_EXCH | ntlm.NTLMSSP_NEGOTIATE_56 | ntlm.NTLMSSP_NEGOTIATE_128 | \ ntlm.NTLMSSP_NEGOTIATE_VERSION | ntlm.NTLMSSP_NEGOTIATE_TARGET_INFO | \ ntlm.NTLMSSP_NEGOTIATE_EXTENDED_SESSIONSECURITY | ntlm.NTLMSSP_TARGET_TYPE_SERVER | \ ntlm.NTLMSSP_NEGOTIATE_ALWAYS_SIGN | ntlm.NTLMSSP_NEGOTIATE_NTLM | ntlm.NTLMSSP_NEGOTIATE_SEAL | \ ntlm.NTLMSSP_NEGOTIATE_SIGN | ntlm.NTLM_NEGOTIATE_OEM | ntlm.NTLMSSP_NEGOTIATE_UNICODE print("Flags") hexdump(struct.pack('<L',flags)) print("\n") print("4.2.4.1.1 NTOWFv2 and LMOWFv2") res = ntlm.NTOWFv2(self.user,self.password,self.domain) hexdump(res) self.assertTrue(res==bytearray(b'\x0c\x86\x8a@;\xfdz\x93\xa3\x00\x1e\xf2.\xf0.?')) print("\n") print("\n") print("4.2.4.1.2 Session Base Key") ntResponse, lmResponse, sessionBaseKey = ntlm.computeResponseNTLMv2(flags, self.serverChallenge, self.clientChallenge, serverName, self.domain, self.user, self.password, '', '' ) hexdump(sessionBaseKey) self.assertTrue(sessionBaseKey==bytearray(b'\x8d\xe4\x0c\xca\xdb\xc1\x4a\x82\xf1\x5c\xb0\xad\x0d\xe9\x5c\xa3')) print("\n") print("4.2.4.2.1 LMv2 Response") hexdump(lmResponse) self.assertTrue(lmResponse==bytearray(b'\x86\xc3P\x97\xac\x9c\xec\x10%TvJW\xcc\xcc\x19\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa')) print("\n") print("4.2.4.2.2 NTLMv2 Response") hexdump(ntResponse[:16]) self.assertTrue(ntResponse[:16]==bytearray(b'\x68\xcd\x0a\xb8\x51\xe5\x1c\x96\xaa\xbc\x92\x7b\xeb\xef\x6a\x1c')) print("\n") print("4.2.4.2.3 Encrypted Session Key") keyExchangeKey = ntlm.KXKEY(flags, sessionBaseKey, lmResponse, self.serverChallenge, self.password,'','') encryptedSessionKey = ntlm.generateEncryptedSessionKey(keyExchangeKey,self.randomSessionKey) hexdump(encryptedSessionKey) self.assertTrue(encryptedSessionKey==bytearray(b'\xC5\xDA\xD2\x54\x4F\xC9\x79\x90\x94\xCE\x1C\xE9\x0B\xC9\xD0\x3E')) print("\n") print("AUTHENTICATE MESSAGE") encryptedSessionKey = ntlm.generateEncryptedSessionKey(keyExchangeKey,self.randomSessionKey) ntlmChallengeResponse = ntlm.NTLMAuthChallengeResponse(self.user, self.password, self.serverChallenge) ntlmChallengeResponse['flags'] = flags ntlmChallengeResponse['host_name'] = self.workstationName.encode('utf-16le') ntlmChallengeResponse['domain_name'] = self.domain.encode('utf-16le') ntlmChallengeResponse['lanman'] = lmResponse ntlmChallengeResponse['ntlm'] = ntResponse ntlmChallengeResponse['session_key'] = encryptedSessionKey hexdump(ntlmChallengeResponse.getData()) self.assertTrue(ntlmChallengeResponse.getData()==bytearray(b'NTLMSSP\x00\x03\x00\x00\x00\x18\x00\x18\x00|\x00\x00\x00T\x00T\x00\x94\x00\x00\x00\x0c\x00\x0c\x00X\x00\x00\x00\x08\x00\x08\x00d\x00\x00\x00\x10\x00\x10\x00l\x00\x00\x00\x10\x00\x10\x00\xe8\x00\x00\x003\x82\x8a\xe2D\x00o\x00m\x00a\x00i\x00n\x00U\x00s\x00e\x00r\x00C\x00O\x00M\x00P\x00U\x00T\x00E\x00R\x00\x86\xc3P\x97\xac\x9c\xec\x10%TvJW\xcc\xcc\x19\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaah\xcd\n\xb8Q\xe5\x1c\x96\xaa\xbc\x92{\xeb\xefj\x1c\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\x00\x00\x00\x00\x02\x00\x0c\x00D\x00o\x00m\x00a\x00i\x00n\x00\x01\x00\x0c\x00S\x00e\x00r\x00v\x00e\x00r\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc5\xda\xd2TO\xc9y\x90\x94\xce\x1c\xe9\x0b\xc9\xd0>')) print("\n") print("4.2.4.4 GSS_WrapEx") print("Plaintext") hexdump(self.plaintext) print("\n") print("Output of SEAL()") exportedSessionKey = self.randomSessionKey clientSigningKey = ntlm.SIGNKEY(flags, exportedSessionKey) clientSealingKey = ntlm.SEALKEY(flags, exportedSessionKey) from Cryptodome.Cipher import ARC4 cipher2 = ARC4.new(clientSealingKey) client_sealing_h = cipher2.encrypt print("SEALKEY()") hexdump(clientSealingKey) self.assertTrue(clientSealingKey==bytearray(b'Y\xf6\x00\x97<\xc4\x96\n%H\n|\x19nLX')) print("\n") print("SIGNKEY()") hexdump(clientSigningKey) self.assertTrue(clientSigningKey==bytearray(b'G\x88\xdc\x86\x1bG\x82\xf3]C\xfd\x98\xfe\x1a-9')) print("\n") print("Sealed Data") sealedMsg, signature = ntlm.SEAL(flags, clientSealingKey, clientSigningKey, self.plaintext, self.plaintext, self.seqNum, client_sealing_h) #signature = ntlm.SIGN(flags, clientSigningKey, plaintext, seqNum, client_sealing_h) hexdump(sealedMsg) self.assertTrue(sealedMsg==bytearray(b'T\xe5\x01e\xbf\x196\xdc\x99` \xc1\x81\x1b\x0f\x06\xfb_')) print("\n") print("Signature") hexdump(signature.getData()) self.assertTrue(signature.getData()==bytearray(b'\x01\x00\x00\x00\x00\xc1a\xa1\x1e@\x03\x9f\x00\x00\x00\x00')) #print (repr(bytearray(str(signature)))) #raise print("\n")
"protection_eligibility": b"Ineligible", "last_name": b"User", "txn_id": b"51403485VH153354B", "receiver_email": TEST_RECEIVER_EMAIL, "payment_status": b"Completed", "payment_gross": b"10.00", "tax": b"0.00", "residence_country": b"US", "invoice": b"0004", "payer_status": b"verified", "txn_type": b"express_checkout", "handling_amount": b"0.00", "payment_date": b"23:04:06 Feb 02, 2009 PST", "first_name": b"J\xF6rg", "item_name": b"", "charset": b(CHARSET), "custom": b"website_id=13&user_id=21", "notify_version": b"2.6", "transaction_subject": b"", "test_ipn": b"1", "item_number": b"", "receiver_id": b"258DLEHY2BDK6", "payer_id": b"BN5JZ2V7MLEV4", "verify_sign": b"An5ns1Kso7MWUdW4ErQKJJJ4qi4-AqdZy6dD.sGO3sDhTf1wAbuO2IZ7", "payment_fee": b"0.59", "mc_fee": b"0.59", "mc_currency": b"USD", "shipping": b"0.00", "payer_email": b"*****@*****.**", "payment_type": b"instant", "mc_gross": b"10.00",
def http_request(cls, url, method='GET', body=None, headers=None): """Make an HTTP request with the given method to the given URL, returning the resulting `http_client.HTTPResponse` instance. If the `body` argument is a `Resource` instance, it is serialized to XML by calling its `to_element()` method before submitting it. Requests are authenticated per the Recurly API specification using the ``recurly.API_KEY`` value for the API key. Requests and responses are logged at the ``DEBUG`` level to the ``recurly.http.request`` and ``recurly.http.response`` loggers respectively. """ if recurly.API_KEY is None: raise recurly.UnauthorizedError('recurly.API_KEY not set') is_non_ascii = lambda s: any(ord(c) >= 128 for c in s) if is_non_ascii(recurly.API_KEY) or is_non_ascii(recurly.SUBDOMAIN): raise recurly.ConfigurationError("""Setting API_KEY or SUBDOMAIN to unicode strings may cause problems. Please use strings. Issue described here: https://gist.github.com/maximehardy/d3a0a6427d2b6791b3dc""") urlparts = urlsplit(url) connection_options = {} if recurly.SOCKET_TIMEOUT_SECONDS: connection_options['timeout'] = recurly.SOCKET_TIMEOUT_SECONDS if urlparts.scheme != 'https': connection = http_client.HTTPConnection(urlparts.netloc, **connection_options) elif recurly.CA_CERTS_FILE is None: connection = http_client.HTTPSConnection(urlparts.netloc, **connection_options) else: connection_options['context'] = ssl.create_default_context(cafile=recurly.CA_CERTS_FILE) connection = http_client.HTTPSConnection(urlparts.netloc, **connection_options) headers = {} if headers is None else dict(headers) headers.setdefault('Accept', 'application/xml') headers.update({ 'User-Agent': recurly.USER_AGENT }) headers['X-Api-Version'] = recurly.api_version() headers['Authorization'] = 'Basic %s' % base64.b64encode(six.b('%s:' % recurly.API_KEY)).decode() log = logging.getLogger('recurly.http.request') if log.isEnabledFor(logging.DEBUG): log.debug("%s %s HTTP/1.1", method, url) for header, value in six.iteritems(headers): if header == 'Authorization': value = '<redacted>' log.debug("%s: %s", header, value) log.debug('') if method in ('POST', 'PUT') and body is not None: if isinstance(body, Resource): log.debug(body.as_log_output()) else: log.debug(body) if isinstance(body, Resource): body = ElementTree.tostring(body.to_element(), encoding='UTF-8') headers['Content-Type'] = 'application/xml; charset=utf-8' if method in ('POST', 'PUT') and body is None: headers['Content-Length'] = '0' connection.request(method, url, body, headers) resp = connection.getresponse() resp_headers = cls.headers_as_dict(resp) log = logging.getLogger('recurly.http.response') if log.isEnabledFor(logging.DEBUG): log.debug("HTTP/1.1 %d %s", resp.status, resp.reason) log.debug(resp_headers) log.debug('') recurly.cache_rate_limit_headers(resp_headers) return resp
def _parse_calibration(self): try: return self.image_calibration.get(six.b('SLxCalibration'), {}).get(six.b('dCalibration')) except KeyError: return None
def test_paypal_date_invalid_format(self): params = IPN_POST_PARAMS.copy() params.update({"time_created": b("2015-10-25 01:21:32")}) self.paypal_post(params) self.assertTrue(PayPalIPN.objects.latest('id').flag) self.assertIn( PayPalIPN.objects.latest('id').flag_info, ['Invalid form. (time_created: Invalid date format ' '2015-10-25 01:21:32: need more than 2 values to unpack)', 'Invalid form. (time_created: Invalid date format ' '2015-10-25 01:21:32: not enough values to unpack ' '(expected 5, got 2))' ] ) # day not int convertible params = IPN_POST_PARAMS.copy() params.update({"payment_date": b("01:21:32 Jan 25th 2015 PDT")}) self.paypal_post(params) self.assertTrue(PayPalIPN.objects.latest('id').flag) self.assertEqual( PayPalIPN.objects.latest('id').flag_info, "Invalid form. (payment_date: Invalid date format " "01:21:32 Jan 25th 2015 PDT: invalid literal for int() with " "base 10: '25th')" ) # month not in Mmm format params = IPN_POST_PARAMS.copy() params.update({"next_payment_date": b("01:21:32 01 25 2015 PDT")}) self.paypal_post(params) self.assertTrue(PayPalIPN.objects.latest('id').flag) self.assertIn( PayPalIPN.objects.latest('id').flag_info, ["Invalid form. (next_payment_date: Invalid date format " "01:21:32 01 25 2015 PDT: u'01' is not in list)", "Invalid form. (next_payment_date: Invalid date format " "01:21:32 01 25 2015 PDT: '01' is not in list)"] ) # month not in Mmm format params = IPN_POST_PARAMS.copy() params.update({"retry_at": b("01:21:32 January 25 2015 PDT")}) self.paypal_post(params) self.assertTrue(PayPalIPN.objects.latest('id').flag) self.assertIn( PayPalIPN.objects.latest('id').flag_info, ["Invalid form. (retry_at: Invalid date format " "01:21:32 January 25 2015 PDT: u'January' is not in list)", "Invalid form. (retry_at: Invalid date format " "01:21:32 January 25 2015 PDT: 'January' is not in list)"] ) # no seconds in time part params = IPN_POST_PARAMS.copy() params.update({"subscr_date": b("01:28 Jan 25 2015 PDT")}) self.paypal_post(params) self.assertTrue(PayPalIPN.objects.latest('id').flag) self.assertIn( PayPalIPN.objects.latest('id').flag_info, ["Invalid form. (subscr_date: Invalid date format " "01:28 Jan 25 2015 PDT: need more than 2 values to unpack)", "Invalid form. (subscr_date: Invalid date format " "01:28 Jan 25 2015 PDT: not enough values to unpack " "(expected 3, got 2))"] ) # string not valid datetime params = IPN_POST_PARAMS.copy() params.update({"case_creation_date": b("01:21:32 Jan 49 2015 PDT")}) self.paypal_post(params) self.assertTrue(PayPalIPN.objects.latest('id').flag) self.assertEqual( PayPalIPN.objects.latest('id').flag_info, "Invalid form. (case_creation_date: Invalid date format " "01:21:32 Jan 49 2015 PDT: day is out of range for month)" )
def gen_x(hash_class, salt, username, password): username = username.encode() if hasattr(username, 'encode') else username password = password.encode() if hasattr(password, 'encode') else password return H(hash_class, salt, H(hash_class, username + six.b(':') + password))
class ServersSampleBase(api_sample_base.ApiSampleTestBaseV21): microversion = None sample_dir = 'servers' user_data_contents = six.b('#!/bin/bash\n/bin/su\necho "I am in you!"\n') user_data = base64.b64encode(user_data_contents) common_req_names = [(None, '2.36', 'server-create-req'), ('2.37', '2.56', 'server-create-req-v237'), ('2.57', None, 'server-create-req-v257')] def _get_request_name(self, use_common, sample_name=None): if not use_common: return sample_name or 'server-create-req' api_version = self.microversion or '2.1' for min, max, name in self.common_req_names: if avr.APIVersionRequest(api_version).matches( avr.APIVersionRequest(min), avr.APIVersionRequest(max)): return name def _post_server(self, use_common_server_api_samples=True, name=None, extra_subs=None, sample_name=None): # param use_common_server_api_samples: Boolean to set whether tests use # common sample files for server post request and response. # Default is True which means _get_sample_path method will fetch the # common server sample files from 'servers' directory. # Set False if tests need to use extension specific sample files subs = { 'image_id': fake.get_valid_image_id(), 'host': self._get_host(), 'compute_endpoint': self._get_compute_endpoint(), 'versioned_compute_endpoint': self._get_vers_compute_endpoint(), 'glance_host': self._get_glance_host(), 'access_ip_v4': '1.2.3.4', 'access_ip_v6': '80fe::', 'user_data': (self.user_data if six.PY2 else self.user_data.decode('utf-8')), 'uuid': '[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}' '-[0-9a-f]{4}-[0-9a-f]{12}', 'name': 'new-server-test' if name is None else name, } # If the template is requesting an explicit availability zone and # the test is setup to have AZs, use the first one in the list which # should default to "us-west". if self.availability_zones: subs['availability_zone'] = self.availability_zones[0] if extra_subs: subs.update(extra_subs) orig_value = self.__class__._use_common_server_api_samples try: self.__class__._use_common_server_api_samples = ( use_common_server_api_samples) # If using common samples, we could only put samples under # api_samples/servers. We will put a lot of samples when we # have more and more microversions. # Callers can specify the sample_name param so that we can add # samples into api_samples/servers/v2.xx. response = self._do_post( 'servers', self._get_request_name(use_common_server_api_samples, sample_name), subs) status = self._verify_response('server-create-resp', subs, response, 202) return status finally: self.__class__._use_common_server_api_samples = orig_value def setUp(self): super(ServersSampleBase, self).setUp() self.api.microversion = self.microversion
def test_read_block_enhanced_packet_bigendian(): scanner = FileScanner(io.BytesIO(six.b( # ---------- Section header "\x0a\x0d\x0d\x0a" # Magic number "\x00\x00\x00\x20" # Block size (32 bytes) "\x1a\x2b\x3c\x4d" # Magic number "\x00\x01\x00\x00" # Version "\xff\xff\xff\xff\xff\xff\xff\xff" # Undefined section length "\x00\x00\x00\x00" # Empty options "\x00\x00\x00\x20" # Block size (32 bytes) # ---------- Interface description '\x00\x00\x00\x01' # block magic '\x00\x00\x00\x40' # block syze (64 bytes) '\x00\x01' # link type '\x00\x00' # reserved block '\x00\x00\xff\xff' # size limit '\x00\x02\x00\x04''eth0' # if_name '\x00\x09\x00\x01''\x06\x00\x00\x00' # if_tsresol (+padding) '\x00\x0c\x00\x13''Linux 3.2.0-4-amd64\x00' # if_os '\x00\x00\x00\x00' # end of options '\x00\x00\x00\x40' # block syze (64 bytes) # ---------- Enhanced packet '\x00\x00\x00\x06' # block magic '\x00\x00\x00\x78' # block syze (120 bytes) '\x00\x00\x00\x00' # interface id (first one, eth0) '\x00\x04\xf8\x1e''\x3c\x3e\xd5\xa9' # timestamp (microseconds) '\x00\x00\x00\x51' # Captured length '\x00\x00\x00\x51' # Original length # Packet data (81 bytes) '\x00\x02\x157\xa2D\x00\xae\xf3R\xaa\xd1\x08\x00' # Ethernet 'E\x00\x00C\x00\x01\x00\x00@\x06x<\xc0\xa8\x05\x15B#\xfa\x97' # IP '\x00\x14\x00P\x00\x00\x00\x00\x00\x00\x00\x00P\x02 ' # TCP '\x00\xbb9\x00\x00' # TCP(cont) 'GET /index.html HTTP/1.0 \n\n' # HTTP '\x00\x00\x00' # Padding # todo: add options? '\x00\x00\x00\x00' # Empty options '\x00\x00\x00\x78' # block syze (120 bytes) ))) blocks = list(scanner) assert len(blocks) == 3 assert isinstance(blocks[0], SectionHeader) assert blocks[0].endianness == '>' assert blocks[0].interfaces == {0: blocks[1]} assert isinstance(blocks[1], InterfaceDescription) assert blocks[1].section == blocks[0] assert blocks[1].link_type == 0x01 assert blocks[1].snaplen == 0xffff assert blocks[1].options['if_name'] == 'eth0' assert blocks[1].options['if_tsresol'] == b'\x06' assert isinstance(blocks[2], EnhancedPacket) assert blocks[2].section == blocks[0] assert blocks[2].interface_id == 0 assert blocks[2].interface == blocks[1] assert blocks[2].timestamp_high == 0x0004f81e assert blocks[2].timestamp_low == 0x3c3ed5a9 assert blocks[2].timestamp_resolution == 1e-6 assert blocks[2].timestamp == 1398708650.3008409 assert blocks[2].captured_len == 0x51 assert blocks[2].packet_len == 0x51 assert blocks[2].packet_data == ( b'\x00\x02\x157\xa2D\x00\xae\xf3R\xaa\xd1\x08\x00' # Ethernet b'E\x00\x00C\x00\x01\x00\x00@\x06x<\xc0\xa8\x05\x15B#\xfa\x97' # IP b'\x00\x14\x00P\x00\x00\x00\x00\x00\x00\x00\x00P\x02 ' # TCP b'\x00\xbb9\x00\x00' # TCP(cont) b'GET /index.html HTTP/1.0 \n\n') # HTTP assert len(blocks[2].options) == 0
def _parse_date(self): try: return parse_date(self.image_text_info[six.b('SLxImageTextInfo')]) except KeyError: return None
def sign(csr, issuer_name, ca_key, ca_key_password=None, skip_validation=False): """Sign a given csr :param csr: certificate signing request object or pem encoded csr :param issuer_name: issuer name :param ca_key: private key of CA :param ca_key_password: private key password for given ca key :param skip_validation: skip csr validation if true :returns: generated certificate """ if not isinstance(ca_key, rsa.RSAPrivateKey): ca_key = serialization.load_pem_private_key(ca_key, password=ca_key_password, backend=default_backend()) if not isinstance(issuer_name, six.text_type): issuer_name = six.text_type(issuer_name.decode('utf-8')) if isinstance(csr, six.text_type): csr = six.b(str(csr)) if not isinstance(csr, x509.CertificateSigningRequest): try: csr = x509.load_pem_x509_csr(csr, backend=default_backend()) except ValueError: LOG.exception(_LE("Received invalid csr {0}.").format(csr)) raise exception.InvalidCsr(csr=csr) term_of_validity = cfg.CONF.x509.term_of_validity one_day = datetime.timedelta(1, 0, 0) expire_after = datetime.timedelta(term_of_validity, 0, 0) builder = x509.CertificateBuilder() builder = builder.subject_name(csr.subject) # issuer_name is set as common name builder = builder.issuer_name( x509.Name([ x509.NameAttribute(x509.OID_COMMON_NAME, issuer_name), ])) builder = builder.not_valid_before(datetime.datetime.today() - one_day) builder = builder.not_valid_after(datetime.datetime.today() + expire_after) builder = builder.serial_number(int(uuid.uuid4())) builder = builder.public_key(csr.public_key()) if skip_validation: extensions = csr.extensions else: extensions = validator.filter_extensions(csr.extensions) for extention in extensions: builder = builder.add_extension(extention.value, critical=extention.critical) certificate = builder.sign(private_key=ca_key, algorithm=hashes.SHA256(), backend=default_backend()).public_bytes( serialization.Encoding.PEM) return certificate
def to_bytes(s): if isinstance(s, six.string_types): return six.b(s) else: return s
def command(self, command, full_output=False): """Send a command to HAProxy over UNIX stats socket. Newline character returned from haproxy is stripped off. :param command: A valid command to execute :type command: string :param full_output: (optional) Return all output, by default returns only the 1st line of the output :type full_output: ``bool`` :return: 1st line of the output or the whole output as a list :rtype: ``string`` or ``list`` if full_output is True """ data = [] # hold data returned from socket raised = None # hold possible exception raised during connect phase attempt = 0 # times to attempt to connect after a connection failure if self.retry == 0: # 0 means retry indefinitely attempt = -1 elif self.retry is None: # None means don't retry attempt = 1 else: # any other value means retry N times attempt = self.retry + 1 while attempt != 0: try: unix_socket = None tcpsocket = None if is_unix_socket(self.sock): unix_socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) unix_socket.settimeout(0.5) unix_socket.connect(self.sock) unix_socket.send(six.b(command + '\n')) file_handle = unix_socket.makefile() data = file_handle.read().splitlines() else: tcpsocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) parts = urisplit(self.sock) if type(parts.gethost() ) == ipaddress.IPv4Address or hostname_resolves( parts.gethost()): tcpsocket.settimeout(2.0) tcpsocket.connect((parts.gethost(), parts.getport())) tcpsocket.send(six.b(command + '\n')) file_handle = tcpsocket.makefile() data = file_handle.read().splitlines() else: raise ValueError( "URI is neither a valid IPAddess nor a resolvable hostname" ) # I haven't seen a case where a running process which holds a # UNIX socket will take more than few nanoseconds to accept a # connection. But, I have seen cases where it takes ~0.5secs # to get a respone from the socket. Thus I hard-code a timeout # of 0.5ms # TODO: consider having a configuration file for it except socket.timeout: raised = SocketTimeout(sock=self.sock) except OSError as exc: # while stress testing HAProxy and querying for all frontend # metrics I sometimes get: # OSError: [Errno 106] Transport endpoint is already connected # catch this one only and reraise it withour exception if exc.errno == errno.EISCONN: raised = SocketTransportError(sock=self.sock) elif exc.errno == errno.ECONNREFUSED: raised = SocketConnectionError(self.sock) else: # for the rest of OSError exceptions just reraise them raised = exc else: # HAProxy always send an empty string at the end # we remove it as it adds noise for things like ACL/MAP and etc # We only do that when we get more than 1 line, which only # happens when we ask for ACL/MAP/etc and not for giving cmds # such as disable/enable server if len(data) > 1 and data[-1] == '': data.pop() # make sure possible previous errors are cleared raised = None # get out from the retry loop break finally: if unix_socket: unix_socket.close() elif tcpsocket: tcpsocket.close() if raised: time.sleep(self.retry_interval) attempt -= 1 if raised: raise raised elif data: if full_output: return data else: return data[0] else: raise ValueError("no data returned from socket {}".format( self.socket_file))
O[{out_idxs} : {out_dims}] = =(I[{in_idxs}]); }}""".format(in_dims=', '.join(in_dims), out_dims=', '.join(out_dims), in_idxs=', '.join(in_idxs), out_idxs=', '.join(out_idxs)) value_input = [] outshape = tile.Shape(data.shape.dtype, shape_dims) super(PadConstant, self).__init__(code, [('I', data)] + value_input, [('O', outshape)]) _CONV_PADDING_MODE = { # TODO: Implement edge and reflection padding. six.b('constant'): PadConstant, # six.b('edge'): PadEdge, # six.b('reflect'): PadReflect } class Transpose(tile.Operation): """ Transposes a tensor. """ def __init__(self, data, perm=None): if not perm: perm = range(data.shape.ndims - 1, -1, -1) ndims = data.shape.ndims