def normalize_header_key(value: typing.AnyStr, encoding: str = None) -> bytes: """ Coerce str/bytes into a strictly byte-wise HTTP header key. """ if isinstance(value, bytes): return value.lower() return value.encode(encoding or "ascii").lower()
def write_file( self, content: t.AnyStr, file_name: str, file_category: FileCategory, ids: str = None, custom_metadata: t.Dict[str, str] = None, custom_tags: t.List[str] = None, source_type: str = None, ) -> File: if type(content) == str: content = content.encode('UTF-8') self._storage[file_name] = { "metadata": { "TS_IDS": ids, "TS_SOURCE_TYPE": source_type, "TS_FILE_CATEGORY": file_category, }, "body": content, "custom_metadata": custom_metadata, "custom_tags": custom_tags, } return { "type": "s3", "bucket": "fake-unittest-bucket", "fileKey": file_name, }
def encode(_input_message: typing.AnyStr = '') -> typing.AnyStr: """ encode into base64 """ if _input_message == '': return '' _input_message = re.sub(r'\r\n', '\n', _input_message) message_bytes = _input_message.encode('utf-8') base64_bytes = base64.b64encode(message_bytes) base64_string = base64_bytes.decode('utf-8') return base64_string[4:5] + base64_string
def decode(_input_base64: typing.AnyStr = '') -> typing.AnyStr: """ decode from base64 """ if _input_base64 == '': return '' _input_base64 = _input_base64[1:] base64_bytes = _input_base64.encode('utf-8') message_bytes = base64.b64decode(base64_bytes) message_string = message_bytes.decode('utf-8') return message_string
def in_hsts_preload(host: typing.AnyStr) -> bool: """Determines if an IDNA-encoded host is on the HSTS preload list""" if isinstance(host, str): host = host.encode("ascii") labels = host.lower().split(b".") # Fast-branch for gTLDs that are registered to preload all sub-domains. if labels[-1] in _GTLD_INCLUDE_SUBDOMAINS: return True with open_pkg_binary("hstspreload.bin") as f: for layer, label in enumerate(labels[::-1]): # None of our layers are greater than 4 deep. if layer > 3: return False # Read the jump table for the layer and label jump_info = _JUMPTABLE[layer][_crc8(label)] if jump_info is None: # No entry: host is not preloaded return False # Read the set of entries for that layer and label f.seek(jump_info[0]) data = bytearray(jump_info[1]) f.readinto(data) for is_leaf, include_subdomains, ent_label in _iter_entries(data): # We found a potential leaf if is_leaf: if ent_label == host: return True if include_subdomains and host.endswith(b"." + ent_label): return True # Continue traversing as we're not at a leaf. elif label == ent_label: break else: return False return False
def in_hsts_preload(host: typing.AnyStr) -> bool: """Determines if an IDNA-encoded host is on the HSTS preload list""" if isinstance(host, str): host = host.encode("ascii") labels = host.lower().split(b".") # Fast-branch for gTLDs that are registered to preload all sub-domains. if labels[-1] in _GTLD_INCLUDE_SUBDOMAINS: return True with open(_HSTSPRELOAD_BIN_PATH, "rb") as f: for layer, label in enumerate(labels[::-1]): # None of our layers are greater than 4 deep. if layer > 3: return False # Read the jump table for the layer and label offset, size = _get_offset_and_size(f, layer, label) if offset == 0: return False # Read the set of entries for that layer f.seek(offset, 1) data = bytearray(size) f.readinto(data) for is_leaf, include_subdomains, ent_label in _iter_entries(data): # We found a potential leaf if is_leaf: if ent_label == host: return True if include_subdomains and host.endswith(b"." + ent_label): return True # Continue traversing as we're not at a leaf. elif label == ent_label: break else: return False return False
def _encode(value: t.AnyStr, encoding: str = "utf-8") -> bytes: if isinstance(value, str): return value.encode(encoding) return value
def write(self, s: typing.AnyStr): if isinstance(s, str) and 'w' in self.fd.mode: s = s.encode() self.fd.write(s)