Exemple #1
0
    async def read_file(
        id: bytes,
        metadata_location: str,
        version: Optional[DropVersion] = None,
        get_latest: bool = False,
    ) -> Optional['DropMetadata']:
        """Read a drop metadata file from disk

        :param id: the drop id
        :param metadata_location: where to look for the file
        :param version: the drop version
        :return: A DropMetadata object, or maybe None
        """
        logger.debug("reading from file")
        if version is None:
            logger.debug(
                "Version is None, looking it up in %s",
                metadata_location,
            )
            if get_latest:
                logger.info("reading latest")
                file_name = await DropMetadata.read_latest(
                    id,
                    metadata_location,
                )
            else:
                logger.info("reading current")
                file_name = await DropMetadata.read_current(
                    id,
                    metadata_location,
                )
        else:
            logger.debug("Getting version %s", version)
            file_name = DropMetadata.make_filename(id, version)
        if file_name is None:
            logger.warning(
                "current drop metadata not found for %s",
                crypto_util.b64encode(id),
            )
            return None

        if not os.path.isfile(os.path.join(metadata_location, file_name)):
            logger.warning(
                "drop metadata not found for %s",
                crypto_util.b64encode(id),
            )
            return None

        async with aiofiles.open(
                os.path.join(metadata_location, file_name),
                'rb',
        ) as f:
            b = b''
            while True:
                data = await f.read()
                if not data:
                    break
                b += data
            return await DropMetadata.decode(b)
Exemple #2
0
async def write_chunk(
    filepath: str,
    position: int,
    contents: bytes,
    chunk_hash: bytes,
    chunk_size: int = DEFAULT_CHUNK_SIZE,
) -> None:
    """
    Takes a filepath, position, contents, and contents hash and writes it to
    a file correctly.  Assumes the file has been created.  Will check the hash,
    and raise a VerificationException if the provided chunk_hash doesn't match.
    May raise relevant IO exceptions.

    If the file extension indicates the file is complete, does nothing.

    :param filepath: the path of the file to write to
    :param position: the posiiton in the file to write to
    :param contents: the contents to write
    :param chunk_hash: the expected hash of contents
    :param chunk_size: (optional) override the chunk size, used to calculate \
    the position in the file
    :raises crypto_util.VerificationException: When the hash of the provided \
            bytes does not match the provided hash
    :return: None
    """
    if is_complete(filepath):
        logger.info("file %s already done, not writing", filepath)
        return

    filepath += DEFAULT_INCOMPLETE_EXT
    computed_hash = await crypto_util.hash(contents)
    if computed_hash != chunk_hash:
        raise crypto_util.VerificationException(
            "Computed: %s, expected: %s" % (
                crypto_util.b64encode(computed_hash),
                crypto_util.b64encode(chunk_hash),
            ), )
    logger.debug(
        "writing chunk with filepath %s and hash %s",
        filepath,
        crypto_util.b64encode(chunk_hash),
    )

    await write_locks[filepath].acquire()
    async with aiofiles.open(filepath, 'r+b') as f:
        pos_bytes = position * chunk_size
        await f.seek(pos_bytes)
        await f.write(contents)
        await f.flush()
    write_locks[filepath].release()
Exemple #3
0
    async def read_file(
        file_id: bytes,
        metadata_location: str,
        file_name: str,
    ) -> Optional['FileMetadata']:
        """Read a file metadata file and return FileMetadata

        :param file_id: The hash of the file to read
        :param metadata_location: drop location with default metadata location
        :return: a FileMetadata object or None if it does not exist
        """
        logger.debug("reading from file")
        file_name = crypto_util.b64encode(file_id).decode("utf-8")
        if not os.path.exists(os.path.join(metadata_location, file_name)):
            return None

        async with aiofiles.open(
                os.path.join(metadata_location, file_name),
                'rb',
        ) as f:
            b = b''
            while True:
                data = await f.read()
                if not data:
                    break
                b += data
            return FileMetadata.decode(b)
Exemple #4
0
async def start_drop_from_id(drop_id: bytes, save_dir: str) -> None:
    """Given a drop_id and save directory, sets up the directory for syncing
    and adds the info to the global dir

    Should be followed by calls to `get_drop_metadata`, `get_file_metadata` for
    each file, and `sync_file_contents`

    :param drop_id: The drop id to add
    :param save_dir: where to download the drop to
    """

    logger.info(
        "Adding drop from id %s to %s",
        crypto_util.b64encode(drop_id),
        save_dir,
    )
    os.makedirs(
        os.path.join(save_dir, DEFAULT_DROP_METADATA_LOCATION),
        exist_ok=True,
    )
    os.makedirs(
        os.path.join(save_dir, DEFAULT_FILE_METADATA_LOCATION),
        exist_ok=True,
    )
    await save_drop_location(drop_id, save_dir)
Exemple #5
0
async def send_drops_to_dps(
    ip: str,
    port: int,
    shutdown_flag: threading.Event,
) -> None:
    """For each drop tell the dps that ip/port has that drop

    :param ip: The ip/address to tell the dps
    :param port: The port to tell the dps
    :param shutdown_flag: Stop when this is set
    """
    this_node_id = await node_id_from_private_key(
        await load_private_key_from_disk(), )
    dps = await get_drop_peer_store(this_node_id)

    while not shutdown_flag.is_set():
        drops = list_drops()
        logger.info("Sending drops to dps")
        for drop in drops:
            logger.debug("Sending drop %s", crypto_util.b64encode(drop))

            await dps.add_drop_peer(drop, ip, port)
        sleep_time = TRACKER_DROP_AVAILABILITY_TTL / 2 - 1
        logger.debug("Sleeping for %s", sleep_time)
        await asyncio.sleep(sleep_time)
Exemple #6
0
 def make_filename(
     id: bytes,
     version: Union[str, DropVersion],
 ) -> str:
     """Make the filename for a drop metadata"""
     return "%s_%s" % (
         crypto_util.b64encode(id).decode("utf-8"),
         str(version),
     )
Exemple #7
0
    def unsubscribe(self) -> None:
        """Removes the refrence in the .5yncr folder therefore preventing
        future updates

        :return: None
        """
        save_path = _get_save_path()
        encoded_drop_id = crypto_util.b64encode(self.id).decode('utf-8')
        drop_loc_file = os.path.join(save_path, encoded_drop_id)
        logger.info("removing file: ", drop_loc_file)
        os.remove(drop_loc_file)
Exemple #8
0
async def drop_metadata_to_response(md: DropMetadata) -> Dict[str, Any]:
    """
    Converts dropMetadata object into frontend readable dictionary.

    :param md: DropMetadata object
    :return: Dictionary for frontend
    """
    files = await get_file_names_percent(md.id)
    response = {
        'drop_id': crypto_util.b64encode(md.id),
        'name': md.name,
        'version': "%s" % md.version,
        'previous_versions': ["%s" % v for v in md.previous_versions],
        'primary_owner': crypto_util.b64encode(md.owner),
        'other_owners': [crypto_util.b64encode(o) for o in md.other_owners],
        'signed_by': crypto_util.b64encode(md.signed_by),
        'files': {n: int(p * 100)
                  for n, p in files.items()},
    }

    return response
Exemple #9
0
async def send_my_pub_key() -> None:
    """Send the pub key for this node the the Key Store"""
    this_node_id = await node_id_from_private_key(
        await load_private_key_from_disk(), )
    logger.info(
        "Sending pub key for %s to tracker",
        crypto_util.b64encode(this_node_id),
    )
    public_key_store = await get_public_key_store(this_node_id)
    pub_key = (await load_private_key_from_disk()).public_key()
    pub_key_bytes = crypto_util.dump_public_key(pub_key)
    await public_key_store.set_key(pub_key_bytes)
Exemple #10
0
    def log(self) -> logging.Logger:
        """
        A logger for this object

        :return: a logger object for this class
        """
        if self._log is None:
            self._log = get_logger(
                '.'.join([
                    __name__,
                    self.__class__.__name__,
                    crypto_util.b64encode(self.id).decode('utf-8'),
                ]), )
        return self._log
Exemple #11
0
async def get_drop_location(drop_id: bytes) -> str:
    """Get a drop's location from the central data dir

    :param drop_id: The drop id to look up
    :return: The drops save dir
    """
    save_path = _get_save_path()

    encoded_drop_id = crypto_util.b64encode(drop_id).decode('utf-8')

    async with aiofiles.open(
            os.path.join(save_path, encoded_drop_id),
            'r',
    ) as f:
        return await f.read()
Exemple #12
0
    async def write_file(
        self,
        metadata_location: str,
    ) -> None:
        """Write this file metadata to a file

        :param metadata_location: where to save it
        """
        self.log.debug("writing file")
        file_name = crypto_util.b64encode(self.file_id).decode("utf-8")
        if not os.path.exists(metadata_location):
            os.makedirs(metadata_location)
        async with aiofiles.open(
                os.path.join(metadata_location, file_name),
                'wb',
        ) as f:
            await f.write(self.encode())
Exemple #13
0
async def save_drop_location(drop_id: bytes, location: str) -> None:
    """Save a drop's location in the central data dir

    :param drop_id: The unencoded drop id
    :param location: Where the drop is located on disk
    """
    save_path = _get_save_path()

    encoded_drop_id = crypto_util.b64encode(drop_id).decode('utf-8')

    if not os.path.exists(save_path):
        os.makedirs(save_path)

    async with aiofiles.open(
            os.path.join(save_path, encoded_drop_id),
            'w',
    ) as f:
        await f.write(location)
Exemple #14
0
async def get_file_metadata(
    drop_id: bytes,
    file_id: bytes,
    save_dir: str,
    file_name: str,
    peers: List[Tuple[str, int]],
) -> FileMetadata:
    """Get file metadata, given a file id, drop id and save dir.  If the file
    metadata is not on disk already, attempt to download from peers.

    :param drop_id: the drop id
    :param file_id: the file id
    :param save_dir: where the drop is saved
    :param peers: where to look on the network for data
    :return: A file metadata object
    """
    logger.info("getting file metadata for %s", crypto_util.b64encode(file_id))
    metadata_dir = os.path.join(save_dir, DEFAULT_FILE_METADATA_LOCATION)
    metadata = await FileMetadata.read_file(
        file_id=file_id,
        metadata_location=metadata_dir,
        file_name=file_name,
    )
    if metadata is None:
        logger.debug("file metadata not on disk, getting from network")
        if not peers:
            peers = await get_drop_peers(drop_id)
        metadata = await send_requests.do_request(
            request_fun=send_requests.send_file_metadata_request,
            peers=peers,
            fun_args={
                'drop_id': drop_id,
                'file_id': file_id
            },
        )

        if metadata is None:
            raise Exception

        await metadata.write_file(metadata_dir)

    return metadata
Exemple #15
0
async def send_drops_once(
    ip: str,
    port: int,
    dps: Optional['DropPeerStore'] = None,
) -> None:
    """For each drop tell the dps our ip and port, then exit

    :param ip: ip/address to tell the dps
    :param port: port to tell the dps
    :param dps: if provided, use this dps, else get it from config
    """
    if dps is None:
        this_node_id = await node_id_from_private_key(
            await load_private_key_from_disk(), )
        dps = await get_drop_peer_store(this_node_id)

    drops = list_drops()
    logger.info("Sending drops to dps")
    for drop in drops:
        logger.debug("Sending drop %s", crypto_util.b64encode(drop))

        await dps.add_drop_peer(drop, ip, port)
Exemple #16
0
async def get_drop_peers(drop_id: bytes) -> List[Tuple[str, int]]:
    """
    Gets the peers that have a drop. Also shuffles the list

    :param drop_id: id of drop
    :raises PeerStoreError: If peers cannot be found
    :return: A list of peers in format (ip, port)
    """
    priv_key = await node_init.load_private_key_from_disk()
    node_id = await crypto_util.node_id_from_public_key(priv_key.public_key())
    drop_peer_store_instance = await drop_peer_store.get_drop_peer_store(
        node_id, )
    success, drop_peers = await drop_peer_store_instance.request_peers(drop_id)
    if not success:
        encoded_id = crypto_util.b64encode(drop_id)
        raise PeerStoreError("No peers found for drop %s" % encoded_id)

    peers = [(ip, int(port)) for peer_name, ip, port in drop_peers]

    shuffle(peers)

    return peers
Exemple #17
0
async def get_drop_metadata(
    drop_id: bytes,
    peers: List[Tuple[str, int]],
    save_dir: Optional[str] = None,
    version: Optional[DropVersion] = None,
) -> DropMetadata:
    """Get drop metadata, given a drop id and save dir.  If the drop metadata
    is not on disk already, attempt to download from peers.

    :param drop_id: the drop id
    :param peers: where to look on the network for data
    :param save_dir: where the drop is saved
    :return: A drop metadata object
    """
    logger.info("getting drop metadata for %s", crypto_util.b64encode(drop_id))
    if save_dir is None:
        logger.debug("save_dir not set, trying to look it up")
        save_dir = await get_drop_location(drop_id)
    logger.debug("save_dir is %s", save_dir)
    metadata_dir = os.path.join(save_dir, DEFAULT_DROP_METADATA_LOCATION)
    metadata = await DropMetadata.read_file(
        id=drop_id,
        metadata_location=metadata_dir,
    )

    if metadata is None:
        logger.debug("drop metadata not on disk, getting from network")
        metadata = await do_metadata_request(drop_id, peers, version)
        # mypy can't figure out that this won't be None
        metadata = cast(DropMetadata, metadata)

        await metadata.write_file(
            is_latest=True,
            metadata_location=metadata_dir,
        )

    return metadata
Exemple #18
0
async def initialize_drop(directory: str) -> bytes:
    """
    Initialize a drop from a directory. Generates the necesssary drop and
    file metadata files and writes the drop location to the central config dif

    :param directory: The directory to initialize a drop from
    :return: The b64 encoded id of the created drop
    """
    logger.info("initializing drop in dir %s", directory)
    priv_key = await node_init.load_private_key_from_disk()
    node_id = await crypto_util.node_id_from_public_key(priv_key.public_key())
    (drop_m, files_m) = await make_drop_metadata(
        path=directory,
        drop_name=os.path.basename(directory),
        owner=node_id,
    )
    await drop_m.write_file(
        is_current=True,
        is_latest=True,
        metadata_location=os.path.join(
            directory,
            DEFAULT_DROP_METADATA_LOCATION,
        ),
    )
    for f_m in files_m.values():
        await f_m.write_file(
            os.path.join(directory, DEFAULT_FILE_METADATA_LOCATION), )
    await save_drop_location(drop_m.id, directory)
    logger.info("drop initialized with %s files", len(files_m))

    scanned_files = await fileio_util.scan_current_files(directory)
    await fileio_util.write_timestamp_file(
        scanned_files,
        directory,
    )

    return crypto_util.b64encode(drop_m.id)
Exemple #19
0
async def get_pub_key(node_id: bytes) -> crypto_util.rsa.RSAPublicKey:
    """
    Gets the public key from disk if possible otherwise request it from
    PublicKeyStore

    :param node_id: bytes for the node you want public key of
    :raises VerificationException: If the pub key cannot be retrieved
    :return: PublicKey
    """
    init_directory = get_full_init_directory(None)
    pub_key_directory = os.path.join(
        init_directory,
        DEFAULT_PUB_KEY_LOOKUP_LOCATION,
    )

    if not os.path.isdir(pub_key_directory):
        os.makedirs(pub_key_directory)

    key_file_name = "{}.pub".format(crypto_util.b64encode(node_id))
    key_path = os.path.join(pub_key_directory, key_file_name)

    if os.path.isfile(key_path):
        async with aiofiles.open(key_path, 'rb') as pub_file:
            pub_key = await pub_file.read()
            return load_public_key(pub_key)
    else:
        key_bytes = await load_private_key_from_disk()
        this_node_id = await node_id_from_private_key(key_bytes)
        public_key_store = await get_public_key_store(this_node_id)
        key_request = await public_key_store.request_key(node_id)
        if key_request[0] and key_request[1] is not None:
            pub_key = key_request[1].encode('utf-8')
            await _save_key_to_disk(key_path, pub_key)
            return load_public_key(pub_key)
        else:
            raise VerificationException()
Exemple #20
0
async def read_chunk(
    filepath: str,
    position: int,
    file_hash: Optional[bytes] = None,
    chunk_size: int = DEFAULT_CHUNK_SIZE,
) -> Tuple[bytes, bytes]:
    """Reads a chunk for a file, returning the contents and its hash.  May
    raise relevant IO exceptions

    If file_hash is provided, will check the chunk that is read

    :param filepath: the path of the file to read from
    :param position: where to read from
    :param file_hash: if provided, will check the file hash
    :param chunk_size: (optional) override the chunk size
    :raises crypto_util.VerificationException: If the hash of the bytes read \
            does not match the provided hash
    :return: a double of (contents, hash), both bytes
    """
    if not is_complete(filepath):
        logger.debug("file %s not done, adding extention", filepath)
        filepath += DEFAULT_INCOMPLETE_EXT

    async with aiofiles.open(filepath, 'rb') as f:
        logger.debug("async reading %s", filepath)
        pos_bytes = position * chunk_size
        await f.seek(pos_bytes)
        data = await f.read(chunk_size)

    h = await crypto_util.hash(data)
    logger.debug("async read hash: %s", crypto_util.b64encode(h))
    if file_hash is not None:
        logger.info("input file_hash is not None, checking")
        if h != file_hash:
            raise crypto_util.VerificationException()
    return (data, h)