def delete_bucket_on_project(current_session, project_name, bucket_name):
    """
    Remove a bucket and its relationship to a project
    """
    bucket = current_session.query(Bucket).filter_by(name=bucket_name).first()
    if not bucket:
        msg = "".join(["Bucket name ", bucket_name, " not found"])
        raise NotFound(msg)
    provider = (current_session.query(CloudProvider).filter(
        CloudProvider.id == bucket.provider_id).first())
    project = (current_session.query(Project).filter(
        Project.name == project_name).first())
    if not project:
        msg = "".join(["Project name ", project_name, " not found"])
        raise NotFound(msg)
    proj_to_bucket = (current_session.query(ProjectToBucket).filter(
        ProjectToBucket.bucket_id == bucket.id,
        ProjectToBucket.project_id == project.id,
    ).first())
    if proj_to_bucket:
        current_session.delete(proj_to_bucket)
        current_session.delete(bucket)
        return {"result": "success", "provider": provider}
    else:
        current_session.delete(bucket)
        msg = ("WARNING: Project-to-bucket "
               "relationship not found, deleting bucket anyway")
        return {"result": msg, "provider": provider}
Ejemplo n.º 2
0
 def index_document(self):
     indexd_server = config.get("INDEXD") or config["BASE_URL"] + "/index"
     url = indexd_server + "/index/"
     try:
         res = requests.get(url + self.file_id)
     except Exception as e:
         logger.error("failed to reach indexd at {0}: {1}".format(
             url + self.file_id, e))
         raise UnavailableError(
             "Fail to reach id service to find data location")
     if res.status_code == 200:
         try:
             json_response = res.json()
             if "urls" not in json_response:
                 logger.error("URLs are not included in response from "
                              "indexd: {}".format(url + self.file_id))
                 raise InternalError("URLs and metadata not found")
             return res.json()
         except Exception as e:
             logger.error("indexd response missing JSON field {}".format(
                 url + self.file_id))
             raise InternalError("internal error from indexd: {}".format(e))
     elif res.status_code == 404:
         logger.error("Not Found. indexd could not find {}: {}".format(
             url + self.file_id, res.text))
         raise NotFound("No indexed document found with id {}".format(
             self.file_id))
     else:
         raise UnavailableError(res.text)
Ejemplo n.º 3
0
def create_user_by_username_project(current_session, new_user, proj):
    """
    Create a user for a specific project
    """
    project = (current_session.query(Project).filter(
        Project.auth_id == proj["auth_id"]).first())
    if not project:
        msg = "".join(["error: auth_id name ", proj["auth_id"], " not found"])
        raise NotFound(msg)

    # If am enforcing a full match.
    # The table has keys that only comprehend two of the arguments
    # I will address that option later.
    # For now, we need a full match to replace or update
    priv = (current_session.query(AccessPrivilege).filter(
        AccessPrivilege.user_id == new_user.id,
        AccessPrivilege.project_id == project.id,
    ).first())
    if priv:
        # I update the only updatable field
        priv.privilege = proj["privilege"]
    else:
        priv = AccessPrivilege(user_id=new_user.id,
                               project_id=project.id,
                               privilege=proj["privilege"])
        current_session.add(priv)
        current_session.flush()

    return {"user": new_user, "project": project, "privileges": priv}
Ejemplo n.º 4
0
def delete_keypair(user, current_session, access_key):
    result = (current_session.query(HMACKeyPair).filter(
        HMACKeyPair.access_key == access_key).filter(
            HMACKeyPair.user_id == user.id).first())
    if not result:
        raise NotFound("Access key doesn't exist")
    result.archive_keypair(current_session)
Ejemplo n.º 5
0
    def _get_or_create_storage_user(self, username, provider, session):
        """
        Return a user.

        Depending on the provider, may call to get or create or just
        search amanuensis's db.

        Args:
            username (str): User's name
            provider (str): backend provider
            session (userdatamodel.driver.SQLAlchemyDriver.session): amanuensis's db
                session to query for Users

        Returns:
            amanuensis.models.User: User with username
        """
        if provider == GOOGLE_PROVIDER:
            user = query_for_user(session=session, username=username.lower())

            if not user:
                raise NotFound(
                    "User not found with username {}. For Google Storage "
                    "Backend user's must already exist in the db and have a "
                    "Google Proxy Group.".format(username))
            return user

        return self.clients[provider].get_or_create_user(username)
Ejemplo n.º 6
0
def remove_user_from_project(current_session, user, project):
    access = udm.get_user_project_access_privilege(current_session, user,
                                                   project)
    if access:
        current_session.delete(access)
    else:
        raise NotFound("Project {0} not connected to user {1}".format(
            project.name, user.username))
Ejemplo n.º 7
0
def remove_project_from_group(current_session, group, project):
    to_be_removed = udm.get_project_group_access_privilege(
        current_session, project, group)
    if to_be_removed:
        current_session.delete(to_be_removed)
        msg = "Project: {0} SUCCESFULLY removed from Group: {1}".format(
            project.name, group.name)
        return {"result": msg}
    else:
        raise NotFound("Project {0} and Group {1} are not linked".format(
            project.name, group.name))
def create_bucket_on_project(current_session, project_name, bucket_name,
                             provider_name):
    """
    Create a bucket and assign it to a project
    """
    project = (current_session.query(Project).filter(
        Project.name == project_name).first())
    if not project:
        msg = "".join(["Project ", project_name, " not found"])
        raise NotFound(msg)
    provider = (current_session.query(CloudProvider).filter(
        CloudProvider.name == provider_name).first())
    if not provider:
        msg = "".join(["Provider ", provider_name, " not found"])
        raise NotFound(msg)
    bucket = (current_session.query(Bucket).filter(
        Bucket.name == bucket_name, Bucket.provider_id == provider.id).first())
    if not bucket:
        bucket = Bucket(name=bucket_name, provider_id=provider.id)
        current_session.add(bucket)
        current_session.flush()
        proj_to_bucket = ProjectToBucket(project_id=project.id,
                                         bucket_id=bucket.id,
                                         privilege=["owner"])
        current_session.add(proj_to_bucket)
        # Find the users that need to be updated
        users_in_project = current_session.query(AccessPrivilege).filter(
            AccessPrivilege.project_id == project.id)
        users_to_update = []
        for row in users_in_project:
            usr = current_session.query(User).filter(
                User.id == row.user_id).first()
            users_to_update.append((usr, row.privilege))
        return {
            "result": "success",
            "provider": provider,
            "bucket": bucket,
            "users_to_update": users_to_update,
        }
    else:
        raise UserError("Error, name already in use for that storage system")
Ejemplo n.º 9
0
    def _get_signed_url(self, protocol, action, expires_in, force_signed_url,
                        r_pays_project, file_name):
        if action == "upload":
            # NOTE: self.index_document ensures the GUID exists in indexd and raises
            #       an error if not (which is expected to be caught upstream in the
            #       app)
            blank_record = BlankIndex(uploader="",
                                      guid=self.index_document.get("did"))
            return blank_record.make_signed_url(file_name=file_name,
                                                expires_in=expires_in)

        if not protocol:
            # no protocol specified, return first location as signed url
            try:
                return self.indexed_file_locations[0].get_signed_url(
                    action,
                    expires_in,
                    public_data=self.public,
                    force_signed_url=force_signed_url,
                    r_pays_project=r_pays_project,
                )
            except IndexError:
                raise NotFound("Can't find any file locations.")

        for file_location in self.indexed_file_locations:
            # allow file location to be https, even if they specific http
            if (file_location.protocol
                    == protocol) or (protocol == "http"
                                     and file_location.protocol == "https"):
                return file_location.get_signed_url(
                    action,
                    expires_in,
                    public_data=self.public,
                    force_signed_url=force_signed_url,
                    r_pays_project=r_pays_project,
                )

        raise NotFound("File {} does not have a location with specified "
                       "protocol {}.".format(self.file_id, protocol))
Ejemplo n.º 10
0
def remove_user_from_group(current_session, user, group):
    to_be_removed = udm.get_user_group_access_privilege(
        current_session, user, group)
    if to_be_removed:
        current_session.delete(to_be_removed)
        return {
            "result":
            ("User: {0} SUCCESFULLY "
             "removed from Group: {1}".format(user.username, group.name))
        }
    else:
        raise NotFound("User {0} and Group {1} are not linked".format(
            user.username, group.name))
Ejemplo n.º 11
0
def download_certificate(certificate):
    if not flask.g.user.application:
        flask.g.user.application = Application()
        current_session.merge(flask.g.user)
    cert = (current_session.query(Certificate).filter(
        Certificate.name == certificate).filter(
            Certificate.application_id == flask.g.user.application.id).first())
    if cert:
        resp = flask.make_response(cert.data)
        resp.headers["Content-Type"] = "application/octet-stream"
        resp.headers[
            "Content-Disposition"] = "attachment; filename={}.{}".format(
                cert.name, cert.extension)
        return resp
    else:
        raise NotFound("No certificate with name {} found".format(certificate))
def get_provider(current_session, provider_name):
    """
    Get the provider info from the userdatamodel
    """
    provider = (current_session.query(CloudProvider).filter(
        CloudProvider.name == provider_name).first())
    if not provider:
        msg = "".join(["error, cloud provider ", provider_name, " not found"])
        raise NotFound(msg)
    info = {
        "name": provider.name,
        "backend": provider.backend,
        "endpoint": provider.endpoint,
        "description": provider.description,
        "service": provider.service,
    }
    return info
Ejemplo n.º 13
0
def create_project(current_session, name, auth_id, storage_accesses):
    """
    Creates a project with an associated auth_id and storage access
    """
    new_project = Project(name=name, auth_id=auth_id)
    current_session.add(new_project)
    current_session.flush()
    for storage in storage_accesses:
        provider = (current_session.query(CloudProvider).filter(
            CloudProvider.name == storage).first())
        if provider:
            new_storage_access = StorageAccess(provider_id=provider.id,
                                               project_id=new_project.id)
            current_session.add(new_storage_access)
        else:
            raise NotFound()
    return new_project
Ejemplo n.º 14
0
def send_email(from_email, to_emails, subject, text, smtp_domain):
    """
    Send email to group of emails using mail gun api.

    https://app.mailgun.com/

    Args:
        from_email(str): from email
        to_emails(list): list of emails to receive the messages
        text(str): the text message
        smtp_domain(dict): smtp domain server

            {
                "smtp_hostname": "smtp.mailgun.org",
                "default_login": "******",
                "api_url": "https://api.mailgun.net/v3/mailgun.planx-pla.net",
                "smtp_password": "******",
                "api_key": "api key"
            }

    Returns:
        Http response

    Exceptions:
        KeyError

    """
    if smtp_domain not in config["GUN_MAIL"] or not config["GUN_MAIL"].get(
        smtp_domain
    ).get("smtp_password"):
        raise NotFound(
            "SMTP Domain '{}' does not exist in configuration for GUN_MAIL or "
            "smtp_password was not provided. "
            "Cannot send email.".format(smtp_domain)
        )

    api_key = config["GUN_MAIL"][smtp_domain].get("api_key", "")
    email_url = config["GUN_MAIL"][smtp_domain].get("api_url", "") + "/messages"

    return requests.post(
        email_url,
        auth=("api", api_key),
        data={"from": from_email, "to": to_emails, "subject": subject, "text": text},
    )
def delete_provider(current_session, provider_name):
    """
    Delete a cloud provider if it has not
    ongoing relationships
    """
    provider = (current_session.query(CloudProvider).filter(
        CloudProvider.name == provider_name).first())
    if not provider:
        msg = "provider name {}, not found"
        raise NotFound(msg.format(provider_name))

    projects = (current_session.query(StorageAccess).filter(
        StorageAccess.provider_id == provider.id).first())
    if projects:
        msg = ("Provider name {} in use in projects."
               " Please remove these references and retry")
        raise UserError(msg.format(provider_name))

    current_session.delete(provider)
    return {"response": "success"}
Ejemplo n.º 16
0
def list_buckets_on_project(current_session, project_name):
    """
    List all the buckets assigned to a project
    """
    project = (current_session.query(Project).filter(
        Project.name == project_name).first())
    if not project:
        msg = "".join(["Project name ", project_name, " not found"])
        raise NotFound(msg)
    buckets = current_session.query(ProjectToBucket).filter(
        ProjectToBucket.project_id == project.id)
    response = {"buckets": []}
    for bucket in buckets:
        buck = (current_session.query(Bucket).filter(
            Bucket.id == bucket.bucket_id).first())
        provider = (current_session.query(CloudProvider).filter(
            CloudProvider.id == buck.provider_id).first())
        new_buck = {"name": buck.name, "provider": provider.name}
        response["buckets"].append(new_buck)
    return response
Ejemplo n.º 17
0
def get_project_info(current_session, project_name):
    """
    Get project info from userdatamodel
    from its name
    """
    proj = get_project(current_session, project_name)
    if not proj:
        msg = "".join(["Error: project ", project_name, " not found"])
        raise NotFound(msg)
    info = {
        "id": proj.id,
        "name": proj.name,
        "auth_id": proj.auth_id,
        "description": proj.description,
        "associated buckets": [],
    }
    buckets = current_session.query(ProjectToBucket).filter(
        ProjectToBucket.project_id == proj.id)
    for row in buckets:
        bucket = (current_session.query(Bucket).filter(
            Bucket.id == row.bucket_id).first())
        info["associated buckets"].append(bucket.name)
    return info
Ejemplo n.º 18
0
def find_user(username, session):
    user = query_for_user(session=session, username=username)
    if not user:
        raise NotFound("user {} not found".format(username))
    return user
Ejemplo n.º 19
0
def send_email_ses(body, to_emails, subject):
    """
    Send email to group of emails using AWS SES api.

    Args:
        from_email(str): from email
        to_emails(list): list of emails to receive the messages
        text(str): the text message
        smtp_domain(dict): smtp domain server

            {
                "smtp_hostname": "smtp.mailgun.org",
                "default_login": "******",
                "api_url": "https://api.mailgun.net/v3/mailgun.planx-pla.net",
                "smtp_password": "******",
                "api_key": "api key"
            }

    Returns:
        Http response

    Exceptions:
        KeyError

    """
    #TODO add import for boto

    logging.warning( sender in config["AWS_SES"])
    logging.warning( SENDER in config["AWS_SES"])
    if not config["AWS_SES"]:
        raise NotFound("AWS SES '{}' does not exist in configuration. Cannot send email.")
    if sender not in config["AWS_SES"]:
        raise NotFound("AWS SES sender does not exist in configuration. Cannot send email.")
    if AWS_ACCESS_KEY not in config["AWS_SES"] or AWS_SECRET_KEY not in config["AWS_SES"]:
        raise NotFound("AWS SES credentials are missing in configuration. Cannot send email.")

    body = "try the text." #TODO retrieve body from template (pass as external param above)
    if not body:
        raise Exception('You must provide a text or html body.')

    sender = config["AWS_SES"]["sender"]
    AWS_ACCESS_KEY = config["AWS_SES"]["AWS_ACCESS_KEY"]
    AWS_SECRET_KEY = config["AWS_SES"]["AWS_SECRET_KEY"]
    region = config["AWS_SES"]["AWS_REGION"] if config["AWS_SES"]["AWS_REGION"] is not None else "us-east-1"
    #TODO get a general team email for this
    RECIPIENT = config["AWS_SES"]["RECIPIENT"] if config["AWS_SES"]["RECIPIENT"] is not None else "*****@*****.**"
        
        # if not self._html:
        #     self._format = 'text'
        #     body = self._text

    client = boto3.client(
        'ses',
        region_name=region,
        aws_access_key_id=AWS_ACCESS_KEY,
        aws_secret_access_key=AWS_SECRET_KEY
    )
    try:
        response = client.send_email(
            Destination={
                'ToAddresses': [RECIPIENT],
            },
            Message={
                'Body': {
                    'Text': {
                        'Charset': 'UTF-8',
                        'Data': 'email body string',
                    },
                },
                'Subject': {
                    'Charset': 'UTF-8',
                    'Data': 'email subject string',
                },
            },
            Source=sender,
        )
    except ClientError as e:
        print(e.response['Error']['Message'])
    else:
        print("Email sent! Message ID:"),
        print(response['MessageId'])
    logging.warning("LUCA EMAIL")
    logging.warning(json.dumps(response))
    return response
Ejemplo n.º 20
0
def delete_user(current_session, username):
    """
    Remove a user from both the userdatamodel
    and the associated storage for that project/bucket.
    Returns a dictionary.

    The amanuensis db may not always be in perfect sync with Google.  We err on the
    side of safety (we prioritise making sure the user is really cleared out of
    Google to prevent unauthorized data access issues; we prefer cirrus/Google
    over the amanuensis db as the source of truth.) So, if the amanuensis-Google sync
    situation changes, do edit this code accordingly.
    """

    logger.debug("Beginning delete user.")

    with GoogleCloudManager() as gcm:
        # Delete user's service accounts, SA keys, user proxy group from Google.
        # Noop if Google not in use.

        user = query_for_user(session=current_session, username=username)
        if not user:
            raise NotFound("user name {} not found".format(username))

        logger.debug("Found user in amanuensis db: {}".format(user))

        # First: Find this user's proxy group.
        google_proxy_group_from_fence_db = (
            current_session.query(GoogleProxyGroup).filter(
                GoogleProxyGroup.id == user.google_proxy_group_id).first()
            # one_or_none() would be better, but is only in sqlalchemy 1.0.9
        )

        if google_proxy_group_from_fence_db:
            gpg_email = google_proxy_group_from_fence_db.email
            logger.debug(
                "Found Google proxy group in amanuensis db: {}".format(
                    gpg_email))
        else:
            # Construct the proxy group name that would have been used
            # and check if it exists in cirrus, in case amanuensis db just
            # didn't know about it.
            logger.debug(
                "Could not find Google proxy group for this user in amanuensis db. Checking cirrus..."
            )
            pgname = get_proxy_group_name_for_user(user.id,
                                                   user.username,
                                                   prefix='a')
            google_proxy_group_from_google = gcm.get_group(pgname)
            gpg_email = (google_proxy_group_from_google.get("email")
                         if google_proxy_group_from_google else None)

        if not gpg_email:
            logger.info(
                "Could not find Google proxy group for user in amanuensis db or in cirrus. "
                "Assuming Google not in use as IdP. Proceeding with amanuensis deletes."
            )
        else:
            logger.debug(
                "Found Google proxy group email of user to delete: {}."
                "Proceeding with Google deletions.".format(gpg_email))
            # Note: amanuensis db deletes here are interleaved with Google deletes.
            # This is so that if (for example) Google succeeds in deleting one SA
            # and then fails on the next, and the deletion process aborts, there
            # will not remain a record in amanuensis of the first, now-nonexistent SA.

            delete_google_service_accounts_and_keys(current_session, gcm,
                                                    gpg_email)
            delete_google_proxy_group(current_session, gcm, gpg_email,
                                      google_proxy_group_from_fence_db, user)

    logger.debug("Deleting all user data from amanuensis database...")
    current_session.delete(user)
    current_session.commit()
    logger.info("Deleted all user data from amanuensis database. Returning.")

    return {"result": "success"}
Ejemplo n.º 21
0
    def _update_access_to_bucket(
        self,
        bucket,
        provider,
        storage_user,
        storage_username,
        access,
        session,
        google_bulk_mapping=None,
    ):
        # Need different logic for google (since buckets can have multiple
        # access groups)
        if not provider == GOOGLE_PROVIDER:
            self.clients[provider].add_bucket_acl(bucket.name,
                                                  storage_username,
                                                  access=access)
            return

        if not bucket.google_bucket_access_groups:
            raise NotFound(
                "Google bucket {} does not have any access groups.".format(
                    bucket.name))

        access = StorageManager._get_bucket_access_privileges(access)

        for bucket_access_group in bucket.google_bucket_access_groups:
            bucket_privileges = bucket_access_group.privileges or []
            if set(bucket_privileges).issubset(access):
                bucket_name = bucket_access_group.email

                if google_bulk_mapping is not None:
                    google_bulk_mapping.setdefault(bucket_name,
                                                   []).append(storage_username)
                    self.logger.info(
                        "User {}'s Google proxy group ({}) added to bulk mapping for Google Bucket Access Group {}."
                        .format(storage_user.email, storage_username,
                                bucket_name))
                else:
                    # NOTE: bucket_name for Google is the Google Access Group's
                    #       email address.
                    # TODO Update storageclient API for more clarity
                    self.clients[provider].add_bucket_acl(
                        bucket_name, storage_username)

                    self.logger.info(
                        "User {}'s Google proxy group ({}) added to Google Bucket Access Group {}."
                        .format(storage_user.email, storage_username,
                                bucket_name))

                StorageManager._add_google_db_entry_for_bucket_access(
                    storage_user, bucket_access_group, session)

            else:
                # In the case of google, since we have multiple groups
                # with access to the bucket, we need to also remove access
                # here in case a users permissions change from read & write
                # to just read.
                StorageManager._remove_google_db_entry_for_bucket_access(
                    storage_user, bucket_access_group, session)

                bucket_name = bucket_access_group.email

                if google_bulk_mapping is not None:
                    google_bulk_mapping.setdefault(bucket_name, [])
                    while storage_username in google_bulk_mapping[bucket_name]:
                        google_bulk_mapping[bucket_name].remove(
                            storage_username)
                        self.logger.debug(
                            "User {}'s Google proxy group ({}) removed from bulk mapping in Google Bucket Access Group {}."
                            .format(storage_user.email, storage_username,
                                    bucket_name))

                else:
                    self.clients[provider].delete_bucket_acl(
                        bucket_name, storage_username)

                    self.logger.info(
                        "User {}'s Google proxy group ({}) removed or never existed in Google Bucket Access Group {}."
                        .format(storage_user.email, storage_username,
                                bucket_name))
Ejemplo n.º 22
0
def get_group_projects(current_session, groupname):
    group = get_group(current_session, groupname)
    if not group:
        raise NotFound("Group {0} does not exist".format(groupname))
    return udm.get_group_projects(current_session, group)
Ejemplo n.º 23
0
def _force_update_user_google_account(
    user_id, google_email, proxy_group_id, _allow_new=False, requested_expires_in=None
):
    """
    Adds user's google account to proxy group and/or updates expiration for
    that google account's access.

    WARNING: This assumes that provided arguments represent valid information.
             This BLINDLY adds without verification. Do verification
             before this.

    Specifically, this ASSUMES that the proxy group provided belongs to the
    given user and that the user has ALREADY authenticated to prove the
    provided google_email is also their's.

    Args:
        user_id (str): User's identifier
        google_email (str): User's Google email
        proxy_group_id (str): User's Proxy Google group id
        _allow_new (bool, optional): Whether or not a new linkage between
            Google email and the given user should be allowed
        requested_expires_in (int, optional): Requested time (in seconds)
            during which the link will be valid

    Raises:
        NotFound: Linked Google account not found
        Unauthorized: Couldn't determine user

    Returns:
        Expiration time of the newly updated google account's access
    """
    user_google_account = (
        current_session.query(UserGoogleAccount)
        .filter(UserGoogleAccount.email == google_email)
        .first()
    )

    if not user_google_account:
        if _allow_new:
            if user_id is not None:
                user_google_account = add_new_user_google_account(
                    user_id, google_email, current_session
                )
                logger.info(
                    "Linking Google account {} to user with id {}.".format(
                        google_email, user_id
                    )
                )
            else:
                raise Unauthorized(
                    "Could not determine authed user "
                    "from session. Unable to link Google account."
                )
        else:
            raise NotFound(
                "User does not have a linked Google account. Update "
                "was attempted and failed."
            )

    # timestamp at which the link will expire
    expiration = get_default_google_account_expiration()
    if requested_expires_in:
        requested_expiration = int(time.time()) + requested_expires_in
        expiration = min(requested_expiration, expiration)

    force_update_user_google_account_expiration(
        user_google_account, proxy_group_id, google_email, expiration, current_session
    )

    logger.info(
        "Adding user's (id: {}) Google account to their proxy group (id: {})."
        " Expiration: {}".format(
            user_google_account.user_id, proxy_group_id, expiration
        )
    )

    current_session.commit()

    return expiration
Ejemplo n.º 24
0
def get_user(current_session, username):
    user = udm.get_user(current_session, username)
    if not user:
        raise NotFound("user {} not found".format(username))
    return user