Exemplo n.º 1
0
    def __get_presentation_credential_info(self,
                                           presentation_json,
                                           props=None):
        if "verifiableCredential" not in presentation_json:
            raise BadRequestException(
                msg='Verifiable credentials do not exist.')

        vcs_json = presentation_json["verifiableCredential"]
        if not isinstance(vcs_json, list):
            raise BadRequestException(
                msg="Verifiable credentials are not the list.")

        vc_json = vcs_json[0]
        if not vc_json:
            raise BadRequestException(msg='The credential is invalid.')
        if "credentialSubject" not in vc_json or type(vc_json["credentialSubject"]) != dict\
                or "issuer" not in vc_json:
            raise BadRequestException(
                'The credential subject is invalid or the issuer does not exist.'
            )
        credential_info = vc_json["credentialSubject"]

        required_props = [
            'id',
        ]
        if props:
            required_props.extend(props)
        not_exist_props = list(
            filter(lambda p: p not in credential_info, required_props))
        if not_exist_props:
            raise BadRequestException(
                f"The credentialSubject's prop ({not_exist_props}) does not exists."
            )

        credential_info[
            "expTime"] = self.__get_presentation_credential_expire_time(
                vcs_json)
        credential_info["userDid"] = vc_json["issuer"]
        return credential_info
Exemplo n.º 2
0
 def __create_challenge(self, app_instance_did, nonce, expire_time):
     """
     Create challenge for sign in response.
     """
     builder = lib.DIDDocument_GetJwtBuilder(
         self.doc)  # service instance doc
     if not builder:
         raise BadRequestException(
             msg=
             f'Can not get challenge builder: {self.get_error_message()}.')
     lib.JWTBuilder_SetHeader(builder, "type".encode(), "JWT".encode())
     lib.JWTBuilder_SetHeader(builder, "version".encode(), "1.0".encode())
     lib.JWTBuilder_SetSubject(builder, "DIDAuthChallenge".encode())
     lib.JWTBuilder_SetAudience(builder, app_instance_did.encode())
     lib.JWTBuilder_SetClaim(builder, "nonce".encode(), nonce.encode())
     lib.JWTBuilder_SetExpiration(builder, expire_time)
     lib.JWTBuilder_Sign(builder, ffi.NULL, self.storepass)
     token = lib.JWTBuilder_Compact(builder)
     lib.JWTBuilder_Destroy(builder)
     if not token:
         raise BadRequestException(msg="Failed to create challenge token.")
     return ffi.string(token).decode()
Exemplo n.º 3
0
    def _upload_file_from_request_stream(self, user_did, app_did, path):
        full_path, err = query_upload_get_filepath(user_did, app_did, path)
        if err:
            raise BadRequestException(
                msg=f'Failed to get upload file full path: "{str(err)}"')

        temp_file, old_file_size = self._upload_file2temp(), 0
        if full_path.exists():
            old_file_size = os.path.getsize(full_path.as_posix())
            full_path.unlink()
        shutil.move(temp_file.as_posix(), full_path.as_posix())

        return full_path, old_file_size
    def check_backup_status(self, user_did, is_restore=False):
        doc = cli.find_one_origin(DID_INFO_DB_NAME,
                                  VAULT_BACKUP_INFO_COL, {USER_DID: user_did},
                                  create_on_absence=True)
        if doc and doc[VAULT_BACKUP_INFO_STATE] != VAULT_BACKUP_STATE_STOP \
                and doc[VAULT_BACKUP_INFO_TIME] < (datetime.utcnow().timestamp() - 60 * 60 * 24):
            raise BackupIsInProcessingException(
                'The backup/restore is in process.')

        if is_restore and not (
                doc[VAULT_BACKUP_INFO_STATE] == VAULT_BACKUP_STATE_STOP
                or doc[VAULT_BACKUP_INFO_MSG] == VAULT_BACKUP_MSG_SUCCESS):
            raise BadRequestException(
                msg='No successfully backup for restore.')
Exemplo n.º 5
0
    def write_file_by_response(self, response, file_path: Path, is_temp=False):
        if not self.create_parent_dir(file_path):
            raise BadRequestException(msg=f'Failed to create parent folder for file {file_path.name}')

        if is_temp:
            def on_save_to_temp(temp_file):
                self.write_file_by_response(response, temp_file)
            self.__save_with_temp_file(file_path, on_save_to_temp)
        else:
            with open(file_path.as_posix(), 'bw') as f:
                f.seek(0)
                for chunk in response.iter_content(chunk_size=CHUNK_SIZE):
                    if chunk:
                        f.write(chunk)
Exemplo n.º 6
0
 def __save_nonce_to_db(self, app_instance_did):
     nonce, expire_time = create_nonce(), int(
         datetime.now().timestamp()) + hive_setting.AUTH_CHALLENGE_EXPIRED
     try:
         if not get_did_info_by_app_instance_did(app_instance_did):
             add_did_nonce_to_db(app_instance_did, nonce, expire_time)
         else:
             update_did_info_by_app_instance_did(app_instance_did, nonce,
                                                 expire_time)
     except Exception as e:
         logging.getLogger("HiveAuth").error(
             f"Exception in __save_nonce_to_db: {e}")
         raise BadRequestException(msg='Failed to generate nonce.')
     return nonce, expire_time
Exemplo n.º 7
0
    def __validate_type(json_data, layer):
        if layer > 5:
            raise BadRequestException(msg='Too more nested conditions.')

        validate_exists(json_data, 'condition', ['name', 'type', 'body'])

        condition_type = json_data['type']
        if condition_type not in ['or', 'and', 'queryHasResults']:
            raise BadRequestException(
                msg=f"Unsupported condition type {condition_type}")

        if condition_type in ['and', 'or']:
            if not isinstance(json_data['body'], list)\
                    or json_data['body'].length < 1:
                raise BadRequestException(
                    msg=f"Condition body MUST be list "
                    f"and at least contain one element for the type '{condition_type}'"
                )
            for data in json_data['body']:
                Condition.__validate_type(data, layer + 1)
        else:
            validate_exists(json_data['body'], 'condition.body', [
                'collection',
            ])
Exemplo n.º 8
0
    def __create_access_token(self, credential_info, subject):
        doc = lib.DIDStore_LoadDID(self.store, self.did)
        if not doc:
            raise BadRequestException(
                'Can not load service instance document in creating access token.'
            )

        builder = lib.DIDDocument_GetJwtBuilder(doc)
        if not builder:
            raise BadRequestException(
                msg='Can not get builder from doc in creating access token.')

        lib.JWTBuilder_SetHeader(builder, "typ".encode(), "JWT".encode())
        lib.JWTBuilder_SetHeader(builder, "version".encode(), "1.0".encode())
        lib.JWTBuilder_SetSubject(builder, subject.encode())
        lib.JWTBuilder_SetAudience(builder, credential_info["id"].encode())
        lib.JWTBuilder_SetExpiration(builder, credential_info["expTime"])

        props = {
            k: credential_info[k]
            for k in credential_info if k not in ['id', 'expTime']
        }
        if not lib.JWTBuilder_SetClaim(builder, "props".encode(),
                                       json.dumps(props).encode()):
            lib.JWTBuilder_Destroy(builder)
            raise BadRequestException(
                msg='Can not set claim in creating access token.')

        lib.JWTBuilder_Sign(builder, ffi.NULL, self.storepass)
        token = lib.JWTBuilder_Compact(builder)
        lib.JWTBuilder_Destroy(builder)
        if not token:
            raise BadRequestException(
                msg='Can not build token in creating access token.')

        return ffi.string(token).decode()
    def ipfs_promotion(self):
        user_did, app_did, backup = self._check_auth_backup()
        if backup[STATE] != STATE_FINISH:
            raise BadRequestException(msg='No backup data exists.')

        from src.view.subscription import vault_subscription
        vault = vault_subscription.get_checked_vault(user_did,
                                                     throw_exception=False)
        if vault:
            raise AlreadyExistsException(
                msg='The vault already exists, no need promotion.')

        vault_subscription.create_vault(
            user_did, vault_subscription.get_price_plan('vault', 'Free'), True)
        cli.import_mongodb_in_backup_server(user_did)
Exemplo n.º 10
0
 def execute(self):
     cli.check_vault_access(self.script.user_did, VAULT_ACCESS_R)
     body = self.get_populated_body()
     logging.info(
         f'get file hash: is_ipfs={self.is_ipfs}, path={body["path"]}')
     if self.is_ipfs:
         doc = self.ipfs_files.get_file_metadata(self.get_target_did(),
                                                 self.get_target_app_did(),
                                                 body['path'])
         return self.get_output_data({"SHA256": doc[COL_IPFS_FILES_SHA256]})
     data, err = query_hash(self.get_target_did(),
                            self.get_target_app_did(), body['path'])
     if err:
         raise BadRequestException(
             'Failed to get file hash code with error message: ' + str(err))
     return self.get_output_data(data)
Exemplo n.º 11
0
 def create_vault(self, user_did, price_plan, is_upgraded=False):
     now = datetime.utcnow().timestamp()  # seconds in UTC
     end_time = -1 if price_plan['serviceDays'] == -1 else now + price_plan['serviceDays'] * 24 * 60 * 60
     doc = {VAULT_SERVICE_DID: user_did,
            VAULT_SERVICE_MAX_STORAGE: int(price_plan["maxStorage"]) * 1024 * 1024,
            VAULT_SERVICE_FILE_USE_STORAGE: 0,
            VAULT_SERVICE_DB_USE_STORAGE: 0,
            IS_UPGRADED: is_upgraded,
            VAULT_SERVICE_START_TIME: now,
            VAULT_SERVICE_END_TIME: end_time,
            VAULT_SERVICE_MODIFY_TIME: now,
            VAULT_SERVICE_STATE: VAULT_SERVICE_STATE_RUNNING,
            VAULT_SERVICE_PRICING_USING: price_plan['name']}
     cli.insert_one_origin(DID_INFO_DB_NAME, VAULT_SERVICE_COL, doc, create_on_absence=True, is_extra=False)
     # INFO: user database will create with first collection creation.
     if not fm.create_dir(get_vault_path(user_did)):
         raise BadRequestException('Failed to create folder for the user.')
     return doc
Exemplo n.º 12
0
    def restore_database(self, root_dir: Path):
        if not root_dir.exists():
            logging.info('The backup root dir does not exist, skip restore.')
            return

        # restore the data of the database from every 'dump_file'.
        dump_files = [x for x in root_dir.iterdir() if x.suffix == BACKUP_FILE_SUFFIX]
        for dump_file in dump_files:
            if self.is_mongo_atlas:
                line2 = f"mongorestore --uri={self.host}" \
                        f" --drop --archive='{dump_file.as_posix()}'"
            else:
                line2 = f"mongorestore -h {self.host} --port {self.port}" \
                        f" --drop --archive='{dump_file.as_posix()}'"
            logging.info(f'[db_client] restore database from file {line2}.')
            return_code = subprocess.call(line2, shell=True)
            if return_code != 0:
                raise BadRequestException(msg=f'Failed to restore mongodb data from file {dump_file.as_posix()}.')
Exemplo n.º 13
0
    def dump_database_data_to_backup_cids(self, user_did):
        names = cli.get_all_user_database_names(user_did)
        metadata_list = list()
        for name in names:
            d = {'path': gene_temp_file_name(), 'name': name}
            ## dump the database data to snapshot file.
            succeeded = export_mongo_db_to_full_path(d['name'], d['path'])
            if not succeeded:
                raise BadRequestException(
                    f'Failed to dump {d["name"]} for {user_did}')

            ## upload this snapshot file onto IPFS node.
            d['cid'] = fm.ipfs_upload_file_from_path(d['path'])
            d['sha256'] = fm.get_file_content_sha256(d['path'])
            d['size'] = d['path'].stat().st_size
            d['path'].unlink()

            metadata_list.append(d)
        return metadata_list
 def internal_backup(self, cid, sha256, size, is_force):
     user_did, app_did, doc = self._check_auth_backup()
     if not is_force and doc.get(
             BKSERVER_REQ_STATE) == BACKUP_REQUEST_STATE_INPROGRESS:
         raise BadRequestException(
             msg='Failed because backup is in processing.')
     fm.ipfs_pin_cid(cid)
     update = {
         BKSERVER_REQ_ACTION: BACKUP_REQUEST_ACTION_BACKUP,
         BKSERVER_REQ_STATE: BACKUP_REQUEST_STATE_INPROGRESS,
         BKSERVER_REQ_STATE_MSG: None,
         BKSERVER_REQ_CID: cid,
         BKSERVER_REQ_SHA256: sha256,
         BKSERVER_REQ_SIZE: size
     }
     self.update_backup_request(user_did, update)
     BackupServerExecutor(user_did, self,
                          self.find_backup_request(user_did,
                                                   False)).start()
Exemplo n.º 15
0
    def handle_transaction(self, transaction_id, is_download=False):
        check_auth_and_vault(
            VAULT_ACCESS_R if is_download else VAULT_ACCESS_WR)

        # check by transaction id
        row_id, target_did, target_app_did = self.parse_transaction_id(
            transaction_id)
        col_filter = {"_id": ObjectId(row_id)}
        trans = cli.find_one(target_did, target_app_did,
                             SCRIPTING_SCRIPT_TEMP_TX_COLLECTION, col_filter)
        if not trans:
            raise BadRequestException("Cannot find the transaction by id.")

        # executing uploading or downloading
        data = None
        logging.info(
            f'handle transaction by id: is_ipfs={self.is_ipfs}, '
            f'is_download={is_download}, file_name={trans["document"]["file_name"]}'
        )
        if self.is_ipfs:
            if is_download:
                data = self.ipfs_files.download_file_with_path(
                    target_did, target_app_did, trans['document']['file_name'])
            else:
                self.ipfs_files.upload_file_with_path(
                    target_did, target_app_did, trans['document']['file_name'])
        else:
            if is_download:
                data = self.get_files().download_file_by_did(
                    target_did, target_app_did, trans['document']['file_name'])
            else:
                self.get_files().upload_file_by_did(
                    target_did, target_app_did, trans['document']['file_name'])

        # recalculate the storage usage of the database
        cli.delete_one(target_did, target_app_did,
                       SCRIPTING_SCRIPT_TEMP_TX_COLLECTION, col_filter)
        update_used_storage_for_mongodb_data(
            target_did, get_mongo_database_size(target_did, target_app_did))

        # return the content of the file
        return data
Exemplo n.º 16
0
    def execute(self):
        cli.check_vault_access(self.get_target_did(), VAULT_ACCESS_WR)

        document = self.get_document()
        msg = populate_with_params_values(self.get_did(), self.get_app_id(),
                                          document, self.get_params())
        if msg:
            raise BadRequestException(
                msg='Cannot get parameter value for the executable document: '
                + msg)

        data = cli.insert_one(self.get_target_did(), self.get_target_app_did(),
                              self.get_collection_name(), document,
                              populate_options_insert_one(self.body))

        update_used_storage_for_mongodb_data(
            self.get_did(),
            get_mongo_database_size(self.get_target_did(),
                                    self.get_target_app_did()))

        return self.get_output_data(data)
Exemplo n.º 17
0
    def backup_finish(self, checksum_list):
        user_did, _, doc = self._check_auth_backup()

        backup_root = get_vault_backup_path(user_did)
        # TODO: remove this check.
        if not backup_root.exists():
            create_full_path_dir(backup_root)

        local_checksum_list = get_file_checksum_list(backup_root)
        for checksum in checksum_list:
            if checksum not in local_checksum_list:
                raise BadRequestException(
                    msg='Failed to finish backup process.')

        cli.update_one_origin(DID_INFO_DB_NAME, VAULT_BACKUP_SERVICE_COL,
                              {VAULT_BACKUP_SERVICE_DID: user_did}, {
                                  "$set": {
                                      VAULT_BACKUP_SERVICE_USE_STORAGE:
                                      get_dir_size(backup_root.as_posix(), 0)
                                  }
                              })
Exemplo n.º 18
0
 def restore_database_by_dump_files(self, request_metadata):
     databases = request_metadata['databases']
     if not databases:
         logging.info(
             '[IpfsBackupClient] No user databases dump files, skip.')
         return
     for d in databases:
         temp_file = gene_temp_file_name()
         msg = fm.ipfs_download_file_to_path(d['cid'],
                                             temp_file,
                                             is_proxy=True,
                                             sha256=d['sha256'],
                                             size=d['size'])
         if msg:
             logging.error(
                 f'[IpfsBackupClient] Failed to download dump file for database {d["name"]}.'
             )
             temp_file.unlink()
             raise BadRequestException(msg=msg)
         import_mongo_db_by_full_path(temp_file)
         temp_file.unlink()
         logging.info(
             f'[IpfsBackupClient] Success to restore the dump file for database {d["name"]}.'
         )
Exemplo n.º 19
0
def reading_operation(path):
    """ Download/get the properties of/get the hash of the file, list the files of the folder.
    Download the content of the file by path if no URL parameter.

    .. :quickref: 04 Files; Download/properties/hash/list

    **Request**:

    .. sourcecode:: http

        None

    **Response OK**:

    .. sourcecode:: http

        HTTP/1.1 200 OK

    .. code-block:: json

        <The bytes of the content of the file.>

    **Response Error**:

    .. sourcecode:: http

        HTTP/1.1 400 Bad Request

    .. sourcecode:: http

        HTTP/1.1 401 Unauthorized

    .. sourcecode:: http

        HTTP/1.1 403 Forbidden

    .. sourcecode:: http

        HTTP/1.1 404 Not Found

    List the files of the directory by the path if the URL parameter is 'comp=children'.

    **Request**:

    .. sourcecode:: http

        None

    **Response OK**:

    .. sourcecode:: http

        HTTP/1.1 200 OK

    .. code-block:: json

        {
            “value”: [{
                “name”: “<path/to/res>”
                “is_file”: false,
                “size”: <Integer>
            }, {
                “name”: “<path/to/dir>”
                “is_file”: true
            }]
        }

    **Response Error**:

    .. sourcecode:: http

        HTTP/1.1 400 Bad Request

    .. sourcecode:: http

        HTTP/1.1 401 Unauthorized

    .. sourcecode:: http

        HTTP/1.1 403 Forbidden

    .. sourcecode:: http

        HTTP/1.1 404 Not Found

    Get the properties of the file by the path if the URL parameter is 'comp=metadata'.

    **Request**:

    .. sourcecode:: http

        None

    **Response OK**:

    .. sourcecode:: http

        HTTP/1.1 200 OK

    .. code-block:: json

        {
            “name”: <path/to/res>,
            “is_file”: <true: file, false: folder>,
            “size”: <size>,
            “created”: <created timestamp>
            “updated”: <updated timestamp>
        }

    **Response Error**:

    .. sourcecode:: http

        HTTP/1.1 400 Bad Request

    .. sourcecode:: http

        HTTP/1.1 401 Unauthorized

    .. sourcecode:: http

        HTTP/1.1 403 Forbidden

    .. sourcecode:: http

        HTTP/1.1 404 Not Found

    Get the hash of the file by the path if the URL parameter is 'comp=hash'.

    **Request**:

    .. sourcecode:: http

        None

    **Response OK**:

    .. sourcecode:: http

        HTTP/1.1 200 OK

    .. code-block:: json

        {
            "name": <the path of the file>
            “algorithm”: <“algorithm name: currently support SHA256”>
            "hash":  <SHA-256 computation value of the file content>
        }


    **Response Error**:

    .. sourcecode:: http

        HTTP/1.1 400 Bad Request

    .. sourcecode:: http

        HTTP/1.1 401 Unauthorized

    .. sourcecode:: http

        HTTP/1.1 403 Forbidden

    .. sourcecode:: http

        HTTP/1.1 404 Not Found

    """

    component, _ = rqargs.get_str('comp')
    if not path and component != 'children':
        return InvalidParameterException(
            msg='Resource path is mandatory, but its missing.'
        ).get_error_response()

    if not component:
        return ipfs_files.download_file(path)
    elif component == 'children':
        return ipfs_files.list_folder(path)
    elif component == 'metadata':
        return ipfs_files.get_properties(path)
    elif component == 'hash':
        return ipfs_files.get_hash(path)
    else:
        return BadRequestException(
            msg=f'Unsupported parameter "comp" value {component}'
        ).get_error_response()