def upload(api, input_file=sys.stdin, verbose=False, encrypt=False, webui=False): """ Upload and (optionally) encrypt a specified file. :param ipfsapi.Client api: The IPFS API client instance to use :param input_file: The file to upload :param bool verbose: Verbosity on/off :param bool encrypt: Encryption on/off :returns: A hash of the uploaded file :rtype: str """ # Read stdin... if input_file is sys.stdin: if verbose: sys.stderr.write( 'Waiting for standard input... ' + '(type your doc and press Ctrl+D)\n' ) sys.stderr.flush() file_contents = sys.stdin.buffer.read() sys.stderr.write('\n') # ...or the specified file else: with open(input_file, 'rb') as f: file_contents = f.read() suffix = '' if encrypt or webui: # WebUI implies encryption secret = Fernet.generate_key() if verbose: sys.stderr.write("Using secret %r...\n" % secret) cipher = Fernet(secret) file_contents = cipher.encrypt(ensure_bytes(file_contents)) # Convert the secret to base58 for consistence secret58 = b58encode(secret) suffix += '/#' if webui: file_contents = render(ensure_unicode( file_contents), verbose=verbose) suffix += WEBUI_KEY_PREFIX suffix += secret58 addr = api.add_bytes(ensure_bytes(file_contents)) + suffix return addr
def encode(self): """ base58-encoded buffer :return: encoded representation or CID :rtype: bytes """ return ensure_bytes(base58.b58encode(self.buffer))
def __init__(self, data, links, serialized, multihash): self._data = ensure_bytes(data) if isinstance(multihash, bytes): self._multihash = base58.b58decode(multihash) else: raise TypeError('multihash should be either a str or bytes object') self._serialized = serialized self._links = [] if links is None else links self._size = sum((link.size for link in self._links), len(self._serialized))
def create(cls, data, links=None, hash_algorithm='sha2-256', serializer=json.dumps): links = [l for l in links if isinstance(l, Link)] if links is not None else [] serialized = ensure_bytes(serializer({'data': data, 'links': links})) mh = multihash.digest(serialized, hash_algorithm).encode('base58') return Node(data, links, serialized, mh)
def decode(data): """ Decode the multibase decoded data :param data: multibase encoded data :type data: str or bytes :return: decoded data :rtype: str :raises ValueError: if the data is not multibase encoded """ data = ensure_bytes(data, 'utf8') codec = get_codec(data) return codec.converter.decode(data[CODE_LENGTH:])
def __init__(self, version, codec, multihash): """ Creates a new CID object. This class should not be used directly, use :py:class:`cid.cid.CIDv0` or :py:class:`cid.cid.CIDv1` instead. :param int version: CID version (0 or 1) :param str codec: codec to be used for encoding the hash :param str multihash: the multihash """ self._version = version self._codec = codec self._multihash = ensure_bytes(multihash)
def from_string(cidstr): """ Creates a CID object from a encoded form :param str cidstr: can be - base58-encoded multihash - multihash - multibase-encoded multihash :return: a CID object :rtype: :py:class:`cid.CIDv0` or :py:class:`cid.CIDv1` """ cidbytes = ensure_bytes(cidstr, 'utf-8') return from_bytes(cidbytes)
def get_codec(data): """ Returns the codec used to encode the given data :param data: multibase encoded data :type data: str or bytes :return: the :py:obj:`multibase.Encoding` object for the data's codec :raises ValueError: if the codec is not supported """ try: key = ensure_bytes(data[:CODE_LENGTH], 'utf8') codec = ENCODINGS_LOOKUP[key] except KeyError: raise ValueError('Can not determine encoding for {}'.format(data)) else: return codec
def encode(encoding, data): """ Encodes the given data using the encoding that is specified :param str encoding: encoding to use, should be one of the supported encoding :param data: data to encode :type data: str or bytes :return: multibase encoded data :rtype: bytes :raises ValueError: if the encoding is not supported """ data = ensure_bytes(data, 'utf8') try: return ENCODINGS_LOOKUP[encoding].code + ENCODINGS_LOOKUP[ encoding].converter.encode(data) except KeyError: raise ValueError('Encoding {} not supported.'.format(encoding))
def _encode_bytes(self, bytes_, group_bytes, encoding_bits, decoding_bits): buffer = BytesIO(bytes_) encoded_bytes = BytesIO() while True: byte_ = buffer.read(group_bytes) if not byte_: break # convert all bytes to a binary format and concatenate them into a 24bit string binstringfmt = '{{:0{}b}}'.format(encoding_bits) binstring = ''.join([binstringfmt.format(x) for x in byte_]) # break the 24 bit length string into pieces of 6 bits each and convert them to integer digits = (int(''.join(x), 2) for x in self._chunk_with_padding( binstring, decoding_bits, '0')) for digit in digits: # convert binary representation to an integer encoded_bytes.write(ensure_bytes(self.digits[digit])) return encoded_bytes.getvalue()
def test_decode(_, data, encoded_data): assert decode(encoded_data) == ensure_bytes(data)
def test_encode(encoding, data, encoded_data): assert encode(encoding, data) == ensure_bytes(encoded_data)
def encode(self, bytes): number = int.from_bytes(bytes, byteorder='big', signed=False) return ensure_bytes(super(BaseStringConverter, self).encode(number))
def encode(self, bytes): return ensure_bytes(''.join(['{:02x}'.format(byte) for byte in bytes]))
def decode(self, bytes): return self._decode_bytes(ensure_bytes(bytes), 8, 5, 8)
def encode(self, bytes): return self._encode_bytes(ensure_bytes(bytes), 5, 8, 5)