class HiveManage:
    def __init__(self):
        self.app = None
        self.response = ServerResponse("HiveManage")

    def init_app(self, app):
        self.app = app

    def get_hive_version(self):
        data = {"version": hive_setting.HIVE_VERSION}
        print("version:" + hive_setting.HIVE_VERSION)
        logger.debug("version:" + hive_setting.HIVE_VERSION)
        return self.response.response_ok(data)

    def get_hive_commit_hash(self):
        data = {"commit_hash": hive_setting.HIVE_COMMIT_HASH}
        print("commit_hash:" + hive_setting.HIVE_COMMIT_HASH)
        logger.debug("commit_hash:" + hive_setting.HIVE_COMMIT_HASH)
        return self.response.response_ok(data)
class HivePubSub:
    def __init__(self):
        self.app = None
        self.response = ServerResponse("HivePubSub")

    def init_app(self, app):
        self.app = app

    def publish_channel(self):
        did, app_id, content, err = post_json_param_pre_proc(
            self.response, "channel_name")
        if err:
            return err
        channel_name = content["channel_name"]
        channel_id = pub_setup_channel(did, app_id, channel_name)
        if channel_id:
            return self.response.response_ok()
        else:
            return self.response.response_err(
                ALREADY_EXIST, f"Channel {channel_name} has exist")

    def remove_channel(self):
        did, app_id, content, err = post_json_param_pre_proc(
            self.response, "channel_name")
        if err:
            return err
        channel_name = content["channel_name"]
        pub_remove_channel(did, app_id, channel_name)
        return self.response.response_ok()

    def get_pub_channels(self):
        did, app_id, err = pre_proc(self.response)
        if err:
            return err

        channel_list = pub_get_pub_channels(did, app_id)
        if not channel_list:
            return self.response.response_err(NOT_FOUND,
                                              "not found channel of " + did)

        channel_name_list = list()
        for channel in channel_list:
            channel_name_list.append(channel[PUB_CHANNEL_NAME])

        data = {"channels": channel_name_list}
        return self.response.response_ok(data)

    def get_sub_channels(self):
        did, app_id, err = pre_proc(self.response)
        if err:
            return err

        channel_list = pub_get_sub_channels(did, app_id)
        if not channel_list:
            return self.response.response_err(NOT_FOUND,
                                              "not found channel of " + did)

        channel_name_list = list()
        for channel in channel_list:
            channel_name_list.append(channel[PUB_CHANNEL_NAME])

        data = {"channels": channel_name_list}
        return self.response.response_ok(data)

    def subscribe_channel(self):
        did, app_id, content, err = post_json_param_pre_proc(
            self.response, "pub_did", "pub_app_id", "channel_name")
        if err:
            return err
        pub_did = content["pub_did"]
        pub_appid = content["pub_app_id"]
        channel_name = content["channel_name"]

        info = pub_get_channel(pub_did, pub_appid, channel_name)
        if not info:
            return self.response.response_err(
                NOT_FOUND,
                f"There is no channel:{channel_name} published by did:{pub_did}, appid:{pub_appid}"
            )

        pub_add_subscriber(pub_did, pub_appid, channel_name, did, app_id)
        return self.response.response_ok()

    def unsubscribe_channel(self):
        did, app_id, content, err = post_json_param_pre_proc(
            self.response, "pub_did", "pub_app_id", "channel_name")
        if err:
            return err
        pub_did = content["pub_did"]
        pub_appid = content["pub_app_id"]
        channel_name = content["channel_name"]
        pub_remove_subscribe(pub_did, pub_appid, channel_name, did, app_id)
        return self.response.response_ok()

    def push_message(self):
        did, app_id, content, err = post_json_param_pre_proc(
            self.response, "channel_name", "message")
        if err:
            return err
        channel_name = content["channel_name"]
        message = content["message"]
        info = pub_get_channel(did, app_id, channel_name)
        if not info:
            return self.response.response_err(
                NOT_FOUND,
                f"There is no channel:{channel_name} published by did:{did}, appid:{app_id}"
            )
        pubsub_push_message(did, app_id, channel_name, message,
                            datetime.utcnow().timestamp())
        return self.response.response_ok()

    def pop_messages(self):
        did, app_id, content, err = post_json_param_pre_proc(
            self.response, "pub_did", "pub_app_id", "channel_name",
            "message_limit")
        if err:
            return err
        pub_did = content["pub_did"]
        pub_appid = content["pub_app_id"]
        channel_name = content["channel_name"]
        limit = int(content["message_limit"])
        message_list = sub_pop_messages(pub_did, pub_appid, channel_name, did,
                                        app_id, limit)
        data = {"messages": message_list}
        return self.response.response_ok(data)
class HiveInternal:
    mode = HIVE_MODE_DEV

    def __init__(self):
        self.app = None
        self.response = ServerResponse("HiveInternal")
        self.backup_ftp = None

    def init_app(self, app, mode):
        backup_path = Path(hive_setting.BACKUP_VAULTS_BASE_DIR)
        if not backup_path.exists:
            create_full_path_dir(backup_path)
        self.app = app
        HiveInternal.mode = mode

    def backup_save_finish(self):
        did, content, err = did_post_json_param_pre_proc(self.response, "checksum_list")
        if err:
            return err

        checksum_list = content["checksum_list"]
        backup_path = get_vault_backup_path(did)
        if not backup_path.exists():
            return self.response.response_err(NOT_FOUND, f"{did} backup vault not found")

        backup_checksum_list = get_file_checksum_list(backup_path)
        for checksum in checksum_list:
            if checksum not in backup_checksum_list:
                return self.response.response_err(CHECKSUM_FAILED, f"{did} backup file checksum failed")

        total_size = 0.0
        total_size = get_dir_size(backup_path.as_posix(), total_size)
        update_vault_backup_service_item(did, VAULT_BACKUP_SERVICE_USE_STORAGE, total_size)
        return self.response.response_ok()

    def backup_restore_finish(self):
        did, content, err = did_post_json_param_pre_proc(self.response)
        if err:
            return err

        backup_path = get_vault_backup_path(did)
        if not backup_path.exists():
            return self.response.response_err(NOT_FOUND, f"{did} backup vault not found")

        backup_checksum_list = get_file_checksum_list(backup_path)
        data = {"checksum_list": backup_checksum_list}
        return self.response.response_ok(data)

    def get_backup_service(self):
        did, content, err = did_get_param_pre_proc(self.response)
        if err:
            return self.response.response_err(UNAUTHORIZED, "Backup internal backup_communication_start auth failed")

        # check backup service exist
        info = get_vault_backup_service(did)
        if not info:
            return self.response.response_err(BAD_REQUEST, "There is no backup service of " + did)

        backup_path = get_vault_backup_path(did)
        if not backup_path.exists():
            create_full_path_dir(backup_path)

        del info["_id"]

        data = {"backup_service": info}
        return self.response.response_ok(data)

    def get_backup_files(self):
        did, content, err = did_get_param_pre_proc(self.response,  access_backup=BACKUP_ACCESS)
        if err:
            return self.response.response_err(UNAUTHORIZED, "Backup internal get_transfer_files auth failed")

        backup_path = get_vault_backup_path(did)
        if not backup_path.exists():
            self.response.response_ok({"backup_files": list()})

        file_md5_gene = deal_dir(backup_path.as_posix(), get_file_md5_info)
        file_md5_list = list()
        for md5 in file_md5_gene:
            md5_info = [md5[0], Path(md5[1]).relative_to(backup_path).as_posix()]
            file_md5_list.append(md5_info)
        return self.response.response_ok({"backup_files": file_md5_list})

    def put_file(self):
        did, content, response = did_get_param_pre_proc(self.response, "file", access_backup=BACKUP_ACCESS)
        if response is not None:
            return response

        file_name = filter_path_root(content["file"])

        backup_path = get_vault_backup_path(did)
        full_path_name = (backup_path / file_name).resolve()

        if not full_path_name.parent.exists():
            if not create_full_path_dir(full_path_name.parent):
                return self.response.response_err(SERVER_MKDIR_ERROR,
                                                  "internal put_file error to create dir:" + full_path_name.parent.as_posix())

        temp_file = gene_temp_file_name()
        try:
            with open(temp_file, "bw") as f:
                chunk_size = CHUNK_SIZE
                while True:
                    chunk = request.stream.read(chunk_size)
                    if len(chunk) == 0:
                        break
                    f.write(chunk)
        except Exception as e:
            logger.error(f"exception of put_file error is {str(e)}")
            return self.response.response_err(SERVER_SAVE_FILE_ERROR, f"Exception: {str(e)}")

        if full_path_name.exists():
            full_path_name.unlink()
        shutil.move(temp_file.as_posix(), full_path_name.as_posix())
        return self.response.response_ok()

    def __get_backup_file_check(self, resp):
        did, app_id = did_auth()
        if did is None:
            resp.status_code = UNAUTHORIZED
            return resp, None
        r, msg = can_access_backup(did)
        if r != SUCCESS:
            resp.status_code = r
            return resp, None

        file_name = request.args.get('file')
        file_name = filter_path_root(file_name)
        backup_path = get_vault_backup_path(did)
        file_full_name = (backup_path / file_name).resolve()

        if not file_full_name.exists():
            resp.status_code = NOT_FOUND
            return resp, None

        if not file_full_name.is_file():
            resp.status_code = FORBIDDEN
            return resp, None

        return resp, file_full_name

    def get_file(self):
        resp = Response()
        resp, file_full_name = self.__get_backup_file_check(resp)
        if not file_full_name:
            return resp

        size = file_full_name.stat().st_size
        with open(file_full_name, 'rb') as f:
            etag = RangeRequest.make_etag(f)
        last_modified = datetime.utcnow()

        data = RangeRequest(open(file_full_name, 'rb'),
                            etag=etag,
                            last_modified=last_modified,
                            size=size).make_response()
        return data

    def move_file(self, is_copy):
        did, content, response = did_post_json_param_pre_proc(self.response, "src_file", "dst_file",
                                                              access_backup=BACKUP_ACCESS)
        if response is not None:
            return response

        src_name = content.get('src_file')
        src_name = filter_path_root(src_name)

        dst_name = content.get('dst_file')
        dst_name = filter_path_root(dst_name)

        backup_path = get_vault_backup_path(did)

        src_full_path_name = (backup_path / src_name).resolve()
        dst_full_path_name = (backup_path / dst_name).resolve()

        if not src_full_path_name.exists():
            return self.response.response_err(NOT_FOUND, "src_name not exists")

        if dst_full_path_name.exists():
            dst_full_path_name.unlink()

        dst_parent_folder = dst_full_path_name.parent
        if not dst_parent_folder.exists():
            if not create_full_path_dir(dst_parent_folder):
                return self.response.response_err(SERVER_MKDIR_ERROR, "move_file make dst parent path dir error")
        try:
            if is_copy:
                shutil.copy2(src_full_path_name.as_posix(), dst_full_path_name.as_posix())
            else:
                shutil.move(src_full_path_name.as_posix(), dst_full_path_name.as_posix())
        except Exception as e:
            logger.error(f"exception of move_file error is {str(e)}")
            return self.response.response_err(SERVER_MOVE_FILE_ERROR, "Exception:" + str(e))

        return self.response.response_ok()

    def delete_file(self):
        did, content, response = did_get_param_pre_proc(self.response, "file", access_backup=BACKUP_ACCESS)
        if response is not None:
            return response

        file_name = content.get('file')
        file_name = filter_path_root(file_name)

        backup_path = get_vault_backup_path(did)
        full_path_name = (backup_path / file_name).resolve()

        if full_path_name.exists():
            full_path_name.unlink()
        # todo delete all empty path dir
        return self.response.response_ok()

    def get_file_patch_hash(self):
        resp = Response()
        resp, file_full_name = self.__get_backup_file_check(resp)
        if not file_full_name:
            return resp

        open_file = open(file_full_name, 'rb')
        resp = Response(gene_blockchecksums(open_file, blocksize=CHUNK_SIZE))
        resp.status_code = SUCCESS
        return resp

    def patch_file_delta(self):
        resp = Response()
        resp, file_full_name = self.__get_backup_file_check(resp)
        if not file_full_name:
            return resp

        patch_delta_file = gene_temp_file_name()
        try:
            with open(patch_delta_file, "bw") as f:
                chunk_size = CHUNK_SIZE
                while True:
                    chunk = request.stream.read(chunk_size)
                    if len(chunk) == 0:
                        break
                    f.write(chunk)
        except Exception as e:
            logger.error(f"exception of post_file_patch_delta read error is {str(e)}")
            resp.status_code = SERVER_SAVE_FILE_ERROR
            return resp

        with open(patch_delta_file, "rb") as f:
            delta_list = pickle.load(f)

        try:
            new_file = gene_temp_file_name()
            with open(file_full_name, "br") as unpatched:
                with open(new_file, "bw") as save_to:
                    unpatched.seek(0)
                    patchstream(unpatched, save_to, delta_list)
            patch_delta_file.unlink()
            if file_full_name.exists():
                file_full_name.unlink()
            shutil.move(new_file.as_posix(), file_full_name.as_posix())
        except Exception as e:
            logger.error(f"exception of post_file_patch_delta patch error is {str(e)}")
            resp.status_code = SERVER_PATCH_FILE_ERROR
            return resp

        resp.status_code = SUCCESS
        return resp

    def get_file_delta(self):
        resp = Response()
        resp, file_full_name = self.__get_backup_file_check(resp)
        if not file_full_name:
            return resp

        data = request.get_data()
        lines = data.split(b'\n')
        hashes = list()
        for line in lines:
            if not line:
                continue
            data = line.split(b',')
            h = (int(data[0]), data[1].decode("utf-8"))
            hashes.append(h)

        with open(file_full_name, "rb") as f:
            delta_list = rsyncdelta(f, hashes, blocksize=CHUNK_SIZE)

        patch_delta_file = gene_temp_file_name()
        try:
            with open(patch_delta_file, "wb") as f:
                pickle.dump(delta_list, f)
        except Exception as e:
            logging.getLogger("HiveBackup").error(
                f"get_file_delta dump {file_full_name} delta exception:{str(e)}")
            patch_delta_file.unlink()
            resp.status_code = SERVER_SAVE_FILE_ERROR
            return resp

        size = patch_delta_file.stat().st_size
        with open(patch_delta_file, 'rb') as f:
            etag = RangeRequest.make_etag(f)
        last_modified = datetime.utcnow()

        data = RangeRequest(open(patch_delta_file, 'rb'),
                            etag=etag,
                            last_modified=last_modified,
                            size=size).make_response()

        patch_delta_file.unlink()
        print("patch file name:" + patch_delta_file.as_posix())
        return data
class HiveFile:
    def __init__(self, app=None):
        self.app = app
        self.response = ServerResponse("HiveFile")

    def init_app(self, app):
        self.app = app
        self.app.config['UPLOAD_FOLDER'] = "./temp_file"
        self.app.config['MAX_CONTENT_PATH'] = 10000000
        self.app.config['MAX_CONTENT_LENGTH'] = 100 * 1024 * 1024

    def move(self, is_copy):
        did, app_id, content, response = post_json_param_pre_proc(
            self.response,
            "src_path",
            "dst_path",
            access_vault=VAULT_ACCESS_WR)
        if response is not None:
            return response

        src_name = content.get('src_path')
        src_name = filter_path_root(src_name)

        dst_name = content.get('dst_path')
        dst_name = filter_path_root(dst_name)

        path = get_save_files_path(did, app_id)
        src_full_path_name = (path / src_name).resolve()
        dst_full_path_name = (path / dst_name).resolve()

        if not src_full_path_name.exists():
            return self.response.response_err(NOT_FOUND, "src_name not exists")

        if dst_full_path_name.exists() and dst_full_path_name.is_file():
            return self.response.response_err(METHOD_NOT_ALLOWED,
                                              "dst_name file exists")

        dst_parent_folder = dst_full_path_name.parent
        if not dst_parent_folder.exists():
            if not create_full_path_dir(dst_parent_folder):
                return self.response.response_err(
                    INTERNAL_SERVER_ERROR, "make dst parent path dir error")
        try:
            if is_copy:
                if src_full_path_name.is_file():
                    shutil.copy2(src_full_path_name.as_posix(),
                                 dst_full_path_name.as_posix())
                    file_size = os.path.getsize(dst_full_path_name.as_posix())
                    inc_vault_file_use_storage_byte(did, file_size)
                else:
                    shutil.copytree(src_full_path_name.as_posix(),
                                    dst_full_path_name.as_posix())
                    dir_size = 0.0
                    get_dir_size(dst_full_path_name.as_posix(), dir_size)
                    inc_vault_file_use_storage_byte(did, dir_size)
            else:
                shutil.move(src_full_path_name.as_posix(),
                            dst_full_path_name.as_posix())
        except Exception as e:
            return self.response.response_err(INTERNAL_SERVER_ERROR,
                                              "Exception:" + str(e))

        return self.response.response_ok()

    def upload_file(self, file_name):
        did, app_id, response = pre_proc(self.response,
                                         access_vault=VAULT_ACCESS_WR)
        if response is not None:
            return response

        file_name = filter_path_root(file_name)

        full_path_name, err = query_upload_get_filepath(did, app_id, file_name)
        if err:
            return self.response.response_err(err["status_code"],
                                              err["description"])
        try:
            with open(full_path_name, "bw") as f:
                chunk_size = 4096
                while True:
                    chunk = request.stream.read(chunk_size)
                    if len(chunk) == 0:
                        break
                    f.write(chunk)
            file_size = os.path.getsize(full_path_name.as_posix())
            inc_vault_file_use_storage_byte(did, file_size)
        except Exception as e:
            return self.response.response_err(INTERNAL_SERVER_ERROR,
                                              f"Exception: {str(e)}")

        return self.response.response_ok()

    def download_file(self):
        resp = Response()
        did, app_id = did_auth()
        if (did is None) or (app_id is None):
            resp.status_code = UNAUTHORIZED
            return resp
        r, msg = can_access_vault(did, VAULT_ACCESS_R)
        if not r:
            resp.status_code = FORBIDDEN
            return resp

        file_name = request.args.get('path')
        data, status_code = query_download(did, app_id, file_name)
        if status_code != SUCCESS:
            resp.status_code = status_code
            return resp

        return data

    def get_property(self):
        did, app_id, content, response = get_pre_proc(
            self.response, "path", access_vault=VAULT_ACCESS_R)
        if response is not None:
            return response

        name = content['path']
        data, err = query_properties(did, app_id, name)
        if err:
            return self.response.response_err(err["status_code"],
                                              err["description"])

        return self.response.response_ok(data)

    def list_files(self):
        did, app_id = did_auth()
        if (did is None) or (app_id is None):
            return self.response.response_err(UNAUTHORIZED, "auth failed")

        r, msg = can_access_vault(did, VAULT_ACCESS_R)
        if not r:
            return self.response.response_err(BAD_REQUEST, msg)

        path = get_save_files_path(did, app_id)

        name = request.args.get('path')
        if name is None:
            full_path_name = path
        else:
            name = filter_path_root(name)
            full_path_name = (path / name).resolve()

        if not (full_path_name.exists() and full_path_name.is_dir()):
            return self.response.response_err(NOT_FOUND, "folder not exists")

        try:
            files = os.listdir(full_path_name.as_posix())
        except Exception as e:
            return self.response.response_ok({"files": []})

        file_info_list = list()
        for file in files:
            full_file = full_path_name / file
            stat_info = full_file.stat()
            file_info = {
                "type": "file" if full_file.is_file() else "folder",
                "name": file,
                "size": stat_info.st_size,
                "last_modify": stat_info.st_mtime,
            }
            file_info_list.append(file_info)

        return self.response.response_ok({"file_info_list": file_info_list})

    def file_hash(self):
        did, app_id, content, response = get_pre_proc(
            self.response, "path", access_vault=VAULT_ACCESS_R)
        if response is not None:
            return response

        name = content['path']
        data, err = query_hash(did, app_id, name)
        if err:
            return self.response.response_err(err["status_code"],
                                              err["description"])

        return self.response.response_ok(data)

    def delete(self):
        did, app_id, content, response = post_json_param_pre_proc(
            self.response, "path", access_vault=VAULT_ACCESS_DEL)
        if response is not None:
            return response

        filename = content.get('path')
        filename = filter_path_root(filename)

        path = get_save_files_path(did, app_id)
        file_full_name = (path / filename).resolve()
        if file_full_name.exists():
            if file_full_name.is_dir():
                dir_size = 0.0
                get_dir_size(file_full_name.as_posix(), dir_size)
                shutil.rmtree(file_full_name)
                inc_vault_file_use_storage_byte(did, -dir_size)
            else:
                file_size = os.path.getsize(file_full_name.as_posix())
                file_full_name.unlink()
                inc_vault_file_use_storage_byte(did, -file_size)

        return self.response.response_ok()
class HivePayment:
    def __init__(self):
        self.app = None
        self.response = ServerResponse("HivePayment")

    def init_app(self, app):
        self.app = app
        PaymentConfig.init_config()

    def get_version(self):
        did, app_id, err = pre_proc(self.response)
        if err:
            return err
        version = PaymentConfig.get_version()
        return self.response.response_ok({"version": version})

    def get_vault_package_info(self):
        did, app_id, err = pre_proc(self.response)
        if err:
            return err
        data = PaymentConfig.get_all_package_info()
        return self.response.response_ok(data)

    def get_vault_pricing_plan(self):
        did, app_id, content, err = get_pre_proc(self.response, "name")
        if err:
            return err

        data = PaymentConfig.get_pricing_plan(content["name"])
        if data:
            return self.response.response_ok(data)
        else:
            return self.response.response_err(
                NOT_FOUND, "not found pricing name of:" + content["name"])

    def get_vault_backup_plan(self):
        did, app_id, content, err = get_pre_proc(self.response, "name")
        if err:
            return err

        data = PaymentConfig.get_backup_plan(content["name"])
        if data:
            return self.response.response_ok(data)
        else:
            return self.response.response_err(
                BAD_REQUEST, "not found backup name of:" + content["name"])

    def create_vault_package_order(self):
        did, app_id, content, err = post_json_param_pre_proc(self.response)
        if err:
            return err

        if "pricing_name" in content:
            package_info = PaymentConfig.get_pricing_plan(
                content["pricing_name"])
            if not package_info:
                return self.response.response_err(
                    NOT_FOUND,
                    "not found pricing_name of:" + content["pricing_name"])
            order_id = create_order_info(did,
                                         app_id,
                                         package_info,
                                         order_type=VAULT_ORDER_TYPE_VAULT)
            return self.response.response_ok({"order_id": str(order_id)})
        elif "backup_name" in content:
            backup_info = PaymentConfig.get_backup_plan(content["backup_name"])
            if not backup_info:
                return self.response.response_err(
                    NOT_FOUND,
                    "not found backup_name of:" + content["backup_name"])
            order_id = create_order_info(did,
                                         app_id,
                                         backup_info,
                                         order_type=VAULT_ORDER_TYPE_BACKUP)
            return self.response.response_ok({"order_id": str(order_id)})
        else:
            return self.response.response_err(
                BAD_REQUEST, "parameter pricing_name and backup_name is null")

    def pay_vault_package_order(self):
        did, app_id, content, err = post_json_param_pre_proc(
            self.response, "order_id", "pay_txids")
        if err:
            return err

        # if the order is success or have been put txid no more pay again
        info = get_order_info_by_id(ObjectId(content["order_id"]))
        if info:
            if info[VAULT_ORDER_STATE] == VAULT_ORDER_STATE_SUCCESS:
                return self.response.response_ok(
                    {"message": "order has been effective"})
            if info[VAULT_ORDER_TXIDS]:
                return self.response.response_ok(
                    {"message": "order has been payed no need to pay again"})

        # check whether txids have been used by other order which not be canceled
        for txid in content["pay_txids"]:
            info_cursor = find_txid(txid)
            for info_c in info_cursor:
                if (info_c["_id"] != content["order_id"]) \
                        and ((info_c[VAULT_ORDER_STATE] != VAULT_ORDER_STATE_CANCELED) \
                             or (info_c[VAULT_ORDER_DID] != did)):
                    return self.response.response_err(
                        BAD_REQUEST, "txid:" + txid + " has been used")

        info[VAULT_ORDER_TXIDS] = content["pay_txids"]
        info[VAULT_ORDER_STATE] = VAULT_ORDER_STATE_WAIT_TX
        info[VAULT_ORDER_PAY_TIME] = datetime.utcnow().timestamp()
        update_order_info(info["_id"], info)
        return self.response.response_ok()

    def __id_to_order_id(self, info):
        info["order_id"] = str(info["_id"])
        del info["_id"]
        return info

    def get_vault_package_order(self):
        did, app_id, content, err = get_pre_proc(self.response, "order_id")
        if err is not None:
            return err

        order_id = content['order_id']

        info = get_order_info_by_id(ObjectId(order_id))
        self.__id_to_order_id(info)
        return self.response.response_ok({"order_info": info})

    def get_vault_package_order_list(self):
        did, app_id, err = pre_proc(self.response)
        if err:
            return err
        info_list = list(get_order_info_list(did, app_id))
        for info in info_list:
            self.__id_to_order_id(info)
        return self.response.response_ok({"order_info_list": info_list})

    def cancel_vault_package_order(self):
        did, app_id, content, err = post_json_param_pre_proc(
            self.response, "order_id")
        if err:
            return err
        order_id = content['order_id']
        info = get_order_info_by_id(ObjectId(order_id))
        if info[VAULT_ORDER_STATE] == VAULT_ORDER_STATE_WAIT_TX \
                or info[VAULT_ORDER_STATE] == VAULT_ORDER_STATE_WAIT_PAY:
            info[VAULT_ORDER_STATE] = VAULT_ORDER_STATE_CANCELED

    def create_free_vault(self):
        did, app_id, err = pre_proc(self.response)
        if err:
            return err

        service = get_vault_service(did)
        if service:
            data = {"existing": True}
            return self.response.response_ok(data)

        free_info = PaymentConfig.get_free_vault_info()

        setup_vault_service(did, free_info["maxStorage"],
                            free_info["serviceDays"])
        return self.response.response_ok()

    def remove_vault(self):
        did, app_id, err = pre_proc(self.response)
        if err:
            return err
        delete_user_vault(did)
        return self.response.response_ok()

    def freeze_vault(self):
        did, app_id, err = pre_proc(self.response)
        if err:
            return err
        freeze_vault(did)
        return self.response.response_ok()

    def unfreeze_vault(self):
        did, app_id, err = pre_proc(self.response)
        if err:
            return err
        unfreeze_vault(did)
        return self.response.response_ok()

    def get_vault_service_info(self):
        did, app_id, err = pre_proc(self.response)
        if err:
            return err
        info = get_vault_service(did)
        if not info:
            return self.response.response_err(NOT_FOUND,
                                              "vault service not found")
        else:
            del info["_id"]
            data = dict()
            info[VAULT_SERVICE_MAX_STORAGE] = float(info[VAULT_SERVICE_MAX_STORAGE]) \
                if info[VAULT_SERVICE_MAX_STORAGE] < 1024 * 1024 \
                else info[VAULT_SERVICE_MAX_STORAGE] / (1024 * 1024)
            info[VAULT_SERVICE_FILE_USE_STORAGE] = info[
                VAULT_SERVICE_FILE_USE_STORAGE] / (1024 * 1024)
            info[VAULT_SERVICE_DB_USE_STORAGE] = info[
                VAULT_SERVICE_DB_USE_STORAGE] / (1024 * 1024)
            data["vault_service_info"] = info

            return self.response.response_ok(data)

    def create_free_vault_backup(self):
        did, app_id, err = pre_proc(self.response)
        if err:
            return err

        service = get_vault_backup_service(did)
        if service:
            data = {"existing": True}
            return self.response.response_ok(data)

        free_info = PaymentConfig.get_free_backup_info()

        setup_vault_backup_service(did, free_info["maxStorage"],
                                   free_info["serviceDays"])
        return self.response.response_ok()

    def get_vault_backup_service_info(self):
        did, app_id, err = pre_proc(self.response)
        if err:
            return err
        info = get_vault_backup_service(did)
        if not info:
            return self.response.response_err(
                NOT_FOUND, "vault backup service not found")
        else:
            del info["_id"]
            data = dict()
            info[VAULT_BACKUP_SERVICE_USE_STORAGE] = info[
                VAULT_BACKUP_SERVICE_USE_STORAGE] / (1024 * 1024)
            data["vault_service_info"] = info
            return self.response.response_ok(data)
Example #6
0
class HiveAuth(Entity):
    access_token = None

    def __init__(self):
        self.app = None
        self.response = ServerResponse("HiveSync")

    def init_app(self, app):
        self.app = app
        self.mnemonic = hive_setting.DID_MNEMONIC
        self.passphrase = hive_setting.DID_PASSPHRASE
        self.storepass = hive_setting.DID_STOREPASS
        Entity.__init__(self, "hive.auth")

    def sign_in(self):
        body = request.get_json(force=True, silent=True)
        if body is None:
            return self.response.response_err(
                UNAUTHORIZED, "parameter is not application/json")
        document = body.get('document', None)
        if document is None:
            return self.response.response_err(BAD_REQUEST,
                                              "Thd did document is null")

        doc_str = json.dumps(body.get('document', None))
        doc = lib.DIDDocument_FromJson(doc_str.encode())
        if (not doc) or (lib.DIDDocument_IsValid(doc) != 1):
            return self.response.response_err(BAD_REQUEST,
                                              "Thd did document is vaild")

        did = lib.DIDDocument_GetSubject(doc)
        if not did:
            return self.response.response_err(
                BAD_REQUEST, "Thd did document is vaild, can't get did.")

        spec_did_str = ffi.string(lib.DID_GetMethodSpecificId(did)).decode()
        try:
            with open(hive_setting.DID_DATA_LOCAL_DIDS + os.sep + spec_did_str,
                      "w") as f:
                f.write(doc_str)
        except Exception as e:
            logging.getLogger("HiveAuth").error(
                f"Exception in sign_in:{str(e)}")

        did_str = "did:" + ffi.string(
            lib.DID_GetMethod(did)).decode() + ":" + spec_did_str

        # save to db
        nonce = create_nonce()
        exp = int(
            datetime.now().timestamp()) + hive_setting.AUTH_CHALLENGE_EXPIRED
        if not self.__save_nonce_to_db(nonce, did_str, exp):
            return self.response.response_err(INTERNAL_SERVER_ERROR,
                                              "save to db fail!")

        # response token
        builder = lib.DIDDocument_GetJwtBuilder(self.doc)
        if not builder:
            return self.response.response_err(INTERNAL_SERVER_ERROR,
                                              "Can't get jwt builder.")

        lib.JWTBuilder_SetHeader(builder, "type".encode(), "JWT".encode())
        lib.JWTBuilder_SetHeader(builder, "version".encode(), "1.0".encode())

        lib.JWTBuilder_SetSubject(builder, "DIDAuthChallenge".encode())
        lib.JWTBuilder_SetAudience(builder, did_str.encode())
        lib.JWTBuilder_SetClaim(builder, "nonce".encode(), nonce.encode())
        lib.JWTBuilder_SetExpiration(builder, exp)

        lib.JWTBuilder_Sign(builder, ffi.NULL, self.storepass)
        token = lib.JWTBuilder_Compact(builder)
        if not token:
            return self.response.response_err(
                INTERNAL_SERVER_ERROR, "Compact builder to a token is fail.")

        token = ffi.string(token).decode()
        # print(token)
        lib.JWTBuilder_Destroy(builder)
        data = {
            "challenge": token,
        }
        return self.response.response_ok(data)

    def request_did_auth(self):
        # check auth token
        auth_info, err = self.__get_auth_token_info(["appDid"])
        if auth_info is None:
            return self.response.response_err(UNAUTHORIZED, err)

        # create access token
        access_token, err = self.__create_token(auth_info, "AccessToken")
        if not err is None:
            return self.response.response_err(UNAUTHORIZED, err)

        # save to db
        if not self.__save_auth_info_to_db(auth_info, access_token):
            return self.response.response_err(UNAUTHORIZED, "save to db fail!")

        # response token
        data = {
            "access_token": access_token,
        }
        return self.response.response_ok(data)

    def __get_auth_token_info(self, props):
        # get jwt
        body = request.get_json(force=True, silent=True)
        if body is None:
            return None, "The parameter is not application/json"
        jwt = body.get('jwt', None)

        if jwt is None:
            return None, "The jwt is none."

        # check jwt token
        jws = lib.DefaultJWSParser_Parse(jwt.encode())
        if not jws:
            return None, self.get_error_message("JWS parser")

        vp_str = lib.JWT_GetClaimAsJson(jws, "presentation".encode())
        if not vp_str:
            lib.JWT_Destroy(jws)
            return None, "The jwt's presentation is none."

        vp = lib.Presentation_FromJson(vp_str)
        if not vp:
            lib.JWT_Destroy(jws)
            return None, "The presentation string is error, unable to rebuild to a presentation object."

        vp_json = json.loads(ffi.string(vp_str).decode())
        lib.JWT_Destroy(jws)

        # check vp
        if lib.Presentation_IsValid(vp) != 1:
            return None, self.get_error_message("Presentation isValid")
        # print(ffi.string(vp_str).decode())

        # check nonce
        nonce = lib.Presentation_GetNonce(vp)
        if not nonce:
            return None, "The nonce is none."
        nonce = ffi.string(nonce).decode()
        if nonce is None:
            return None, "The nonce is isn't valid."

        # check did:nonce from db
        info = get_did_info_by_nonce(nonce)
        if info is None:
            return None, "The nonce is error."

        # check realm
        realm = lib.Presentation_GetRealm(vp)
        if not realm:
            return None, "The realm is none."
        realm = ffi.string(realm).decode()
        if realm is None:
            return None, "The realm is isn't valid."

        if realm != self.get_did_string():
            return None, "The realm is error."

        # check vc
        count = lib.Presentation_GetCredentialCount(vp)
        if count < 1:
            return None, "The credential count is error."

        if not "verifiableCredential" in vp_json:
            return None, "The credential isn't exist."

        vcs_json = vp_json["verifiableCredential"]
        if not isinstance(vcs_json, list):
            return None, "The verifiableCredential isn't valid"

        vc_json = vcs_json[0]
        if vc_json is None:
            return None, "The credential isn't exist"

        vc_str = json.dumps(vc_json)

        credential_info, err = self.get_credential_info(vc_str, props)
        if not credential_info is None:
            if credential_info["id"] != info[APP_INSTANCE_DID]:
                return None, "The app instance did is error."
            credential_info["nonce"] = nonce
            if info[DID_INFO_NONCE_EXPIRED] < int(datetime.now().timestamp()):
                return None, "The nonce is expired"

        return credential_info, err

    def __create_token(self, auth_info, subject):
        if not isinstance(auth_info, dict):
            return None, "auth info isn't dict type"

        doc = lib.DIDStore_LoadDID(self.store, self.did)
        if not doc:
            return None, self.get_error_message("The doc load from did")

        builder = lib.DIDDocument_GetJwtBuilder(doc)
        if not builder:
            return None, "Can't get jwt builder."

        lib.JWTBuilder_SetHeader(builder, "typ".encode(), "JWT".encode())
        lib.JWTBuilder_SetHeader(builder, "version".encode(), "1.0".encode())

        lib.JWTBuilder_SetSubject(builder, subject.encode())
        lib.JWTBuilder_SetAudience(builder, auth_info["id"].encode())
        lib.JWTBuilder_SetExpiration(builder, auth_info["expTime"])

        props = {}
        for key in auth_info:
            if key != "expTime" and key != "id":
                props[key] = auth_info[key]

        props_str = json.dumps(props)
        ret = lib.JWTBuilder_SetClaim(builder, "props".encode(),
                                      props_str.encode())
        if not ret:
            return None, self.get_error_message(
                "JWTBuilder_SetClaim 'props' to a token")

        lib.JWTBuilder_Sign(builder, ffi.NULL, self.storepass)
        token = lib.JWTBuilder_Compact(builder)
        if not token:
            return None, self.get_error_message("Compact builder to a token")

        token = ffi.string(token).decode()
        lib.JWTBuilder_Destroy(builder)

        return token, None

    def __save_nonce_to_db(self, nonce, app_instance_did, exp):
        info = get_did_info_by_app_instance_did(app_instance_did)

        try:
            if info is None:
                add_did_nonce_to_db(app_instance_did, nonce, exp)
            else:
                update_did_info_by_app_instance_did(app_instance_did, nonce,
                                                    exp)
        except Exception as e:
            logging.getLogger("HiveAuth").error(
                f"Exception in __save_nonce_to_db:: {e}")
            return False

        return True

    def __save_auth_info_to_db(self, auth_info, token):
        user_did = auth_info["userDid"]
        app_id = auth_info["appDid"]
        nonce = auth_info["nonce"]
        app_instance_did = auth_info["id"]
        exp = auth_info["expTime"]

        try:
            update_token_of_did_info(user_did, app_id, app_instance_did, nonce,
                                     token, exp)
        except Exception as e:
            logging.getLogger("HiveAuth").error(
                f"Exception in __save_auth_info_to_db:: {e}")
            return False

        return True

    def check_token(self):
        info, err = self.get_token_info()
        if info is None:
            return self.response.response_err(UNAUTHORIZED, err)
        else:
            return self.response.response_ok()

    def get_token_info(self):
        auth = request.headers.get("Authorization")
        if auth is None:
            return None, "Can't find the Authorization!"

        if not auth.strip().lower().startswith(("token", "bearer")):
            return None, "Can't find the token!"

        auth_splits = auth.split(" ")
        if len(auth_splits) < 2:
            return None, "Can't find the token!"

        access_token = auth_splits[1]
        if access_token == "":
            return None, "The token is None!"

        return self.get_info_from_token(access_token)

    def get_info_from_token(self, token):
        if token is None:
            return None, "Then token is none!"

        token_splits = token.split(".")
        if token_splits is None:
            return None, "Then token is invalid!"

        if (len(token_splits) != 3) or token_splits[2] == "":
            return None, "Then token is invalid!"

        jws = lib.DefaultJWSParser_Parse(token.encode())
        if not jws:
            return None, self.get_error_message("JWS parser")

        issuer = lib.JWT_GetIssuer(jws)
        if not issuer:
            lib.JWT_Destroy(jws)
            return None, self.get_error_message("JWT getIssuer")

        issuer = ffi.string(issuer).decode()
        if issuer != self.get_did_string():
            lib.JWT_Destroy(jws)
            return None, "Then issuer is invalid!"

        expired = lib.JWT_GetExpiration(jws)
        now = (int)(datetime.now().timestamp())
        if now > expired:
            lib.JWT_Destroy(jws)
            return None, "Then token is expired!"

        props = lib.JWT_GetClaim(jws, "props".encode())
        if not props:
            lib.JWT_Destroy(jws)
            return None, "Then props is none!"

        props_str = ffi.string(props).decode()
        props_json = json.loads(props_str)

        app_instance_did = ffi.string(lib.JWT_GetAudience(jws)).decode()
        if not app_instance_did:
            lib.JWT_Destroy(jws)
            return None, "Then app instance id is none!"

        props_json[APP_INSTANCE_DID] = app_instance_did

        lib.JWT_Destroy(jws)
        # print(props_json)

        return props_json, None

    def backup_auth_request(self, content):
        vc_str = content.get('backup_credential')

        # check backup request vc
        credential_info, err = self.get_credential_info(
            vc_str, ["targetHost", "targetDID"])
        if credential_info is None:
            return None, None, err

        # sign in and get auth token
        auth_token, issuer, err = self.get_auth_token_by_sign_in(
            credential_info["targetHost"], vc_str, "DIDBackupAuthResponse")
        if auth_token is None:
            return None, None, err

        # get backup token
        backup_token, err = self.get_backup_auth_from_node(
            credential_info["targetHost"], auth_token, issuer)
        if backup_token is None:
            return None, None, err
        else:
            return credential_info["targetHost"], backup_token, None

    def get_credential_info(self, vc_str, props):
        if vc_str is None:
            return None, "The credential is none."

        vc = lib.Credential_FromJson(vc_str.encode(), ffi.NULL)
        if not vc:
            return None, "The credential string is error, unable to rebuild to a credential object."

        if lib.Credential_IsValid(vc) != 1:
            return None, self.get_error_message("Credential isValid")

        vc_json = json.loads(vc_str)
        if not "credentialSubject" in vc_json:
            return None, "The credentialSubject isn't exist."
        credentialSubject = vc_json["credentialSubject"]

        if not "id" in credentialSubject:
            return None, "The credentialSubject's id isn't exist."

        for prop in props:
            if not prop in credentialSubject:
                return None, "The credentialSubject's '" + prop + "' isn't exist."

        if not "issuer" in vc_json:
            return None, "The credential issuer isn't exist."
        credentialSubject["userDid"] = vc_json["issuer"]

        expTime = lib.Credential_GetExpirationDate(vc)
        if expTime == 0:
            return None, self.get_error_message("Credential getExpirationDate")

        exp = int(
            datetime.now().timestamp()) + hive_setting.ACCESS_TOKEN_EXPIRED
        if expTime > exp:
            expTime = exp

        credentialSubject["expTime"] = expTime

        return credentialSubject, None

    def backup_auth(self):
        # check backup auth token
        auth_info, err = self.__get_auth_token_info(
            ["targetHost", "targetDID"])
        if auth_info is None:
            return self.response.response_err(UNAUTHORIZED, err)

        # create backup token
        backup_token, err = self.__create_token(auth_info, "BackupToken")
        if not err is None:
            return self.response.response_err(UNAUTHORIZED, err)

        # response token
        data = {
            "backup_token": backup_token,
        }
        return self.response.response_ok(data)
class HiveFile:
    def __init__(self, app=None):
        self.app = app
        self.response = ServerResponse("HiveFile")
        self.ipfs_files = IpfsFiles()

    def init_app(self, app):
        self.app = app
        self.app.config['UPLOAD_FOLDER'] = "./temp_file"
        self.app.config['MAX_CONTENT_PATH'] = 10000000
        self.app.config['MAX_CONTENT_LENGTH'] = 100 * 1024 * 1024

    def move(self, is_copy):
        did, app_id, content, response = post_json_param_pre_proc(self.response, "src_path", "dst_path",
                                                                  access_vault=VAULT_ACCESS_WR)
        if response is not None:
            return response

        _, resp_err = v2_wrapper(self.ipfs_files.move_copy_file)(
            did, app_id, content.get('src_path'), content.get('dst_path'), is_copy=is_copy
        )
        if resp_err:
            return resp_err

        return self.response.response_ok()

    def upload_file(self, file_name):
        did, app_id, response = pre_proc(self.response, access_vault=VAULT_ACCESS_WR)
        if response is not None:
            return response

        _, resp_err = v2_wrapper(self.ipfs_files.upload_file_with_path)(did, app_id, file_name)
        if resp_err:
            return resp_err

        return self.response.response_ok()

    def download_file(self):
        resp = Response()
        did, app_id = did_auth()
        if (did is None) or (app_id is None):
            resp.status_code = UNAUTHORIZED
            return resp
        r, msg = can_access_vault(did, VAULT_ACCESS_R)
        if r != SUCCESS:
            resp.status_code = r
            return resp

        data, resp_err = v2_wrapper(self.ipfs_files.download_file_with_path)(did, app_id, request.args.get('path'))
        if resp_err:
            return resp_err

        return data

    def get_property(self):
        did, app_id, content, response = get_pre_proc(self.response, "path", access_vault=VAULT_ACCESS_R)
        if response is not None:
            return response

        metadata, resp_err = v2_wrapper(self.ipfs_files.get_file_metadata)(did, app_id, content['path'])
        if resp_err:
            return resp_err
        data = HiveFile.get_info_by_metadata(metadata)

        return self.response.response_ok(data)

    @staticmethod
    def get_info_by_metadata(metadata):
        return {
            "type": "file" if metadata[COL_IPFS_FILES_IS_FILE] else "folder",
            "name": metadata[COL_IPFS_FILES_PATH],
            "size": metadata[SIZE],
            "last_modify": metadata['modified'],
        }

    def list_files(self):
        did, app_id = did_auth()
        if (did is None) or (app_id is None):
            return self.response.response_err(UNAUTHORIZED, "auth failed")

        r, msg = can_access_vault(did, VAULT_ACCESS_R)
        if r != SUCCESS:
            return self.response.response_err(r, msg)

        docs, resp_err = v2_wrapper(self.ipfs_files.list_folder_with_path)(did, app_id, request.args.get('path'))
        if resp_err:
            return resp_err
        file_info_list = list(map(lambda d: HiveFile.get_info_by_metadata(d), docs))

        return self.response.response_ok({"file_info_list": file_info_list})

    def file_hash(self):
        did, app_id, content, response = get_pre_proc(self.response, "path", access_vault=VAULT_ACCESS_R)
        if response is not None:
            return response

        metadata, resp_err = v2_wrapper(self.ipfs_files.get_file_metadata)(did, app_id, content['path'])
        if resp_err:
            return resp_err
        data = {"SHA256": metadata[COL_IPFS_FILES_SHA256]}

        return self.response.response_ok(data)

    def delete(self):
        did, app_id, content, response = post_json_param_pre_proc(self.response, "path", access_vault=VAULT_ACCESS_DEL)
        if response is not None:
            return response

        _, resp_err = v2_wrapper(self.ipfs_files.delete_file_with_path)(did, app_id, content.get('path'))
        if resp_err:
            return resp_err

        return self.response.response_ok()
class HiveScripting:
    def __init__(self, app=None):
        self.app = app
        self.response = ServerResponse("HiveScripting")
        self.ipfs_files = IpfsFiles()

    def init_app(self, app):
        self.app = app

    def __upsert_script_to_db(self, did, app_id, content):
        if hive_setting.MONGO_URI:
            uri = hive_setting.MONGO_URI
            connection = MongoClient(uri)
        else:
            connection = MongoClient(host=hive_setting.MONGO_HOST,
                                     port=hive_setting.MONGO_PORT)

        db_name = gene_mongo_db_name(did, app_id)
        db = connection[db_name]

        try:
            db.create_collection(SCRIPTING_SCRIPT_COLLECTION)
        except CollectionInvalid:
            pass
        except Exception as e:
            return None, f"Could not create collection. Please try again later. Exception : {str(e)}"

        try:
            db.create_collection(SCRIPTING_SCRIPT_TEMP_TX_COLLECTION)
        except CollectionInvalid:
            pass
        except Exception as e:
            return None, f"Could not create collection. Please try again later. Exception : {str(e)}"

        col = get_collection(did, app_id, SCRIPTING_SCRIPT_COLLECTION)
        query = {"name": content.get("name")}
        options = {"upsert": True, "bypass_document_validation": False}
        try:
            ret = col.replace_one(query, convert_oid(content), **options)
            data = {
                "acknowledged": ret.acknowledged,
                "matched_count": ret.matched_count,
                "modified_count": ret.modified_count,
                "upserted_id": str(ret.upserted_id),
            }
        except Exception as e:
            return None, f"Exception: method: '__upsert_script_to_db', Err: {str(e)}"
        db_size = get_mongo_database_size(did, app_id)
        update_vault_db_use_storage_byte(did, db_size)

        return data, None

    def __condition_validation(self, condition):
        condition_type = condition.get('type')
        condition_body = condition.get('body')
        if condition_type in [
                SCRIPTING_CONDITION_TYPE_AND, SCRIPTING_CONDITION_TYPE_OR
        ]:
            if not isinstance(condition_body, list):
                return False
            if len(condition_body) == 1:
                return self.__condition_validation(condition_body[0])
            else:
                new_condition = {
                    "type": condition.get('type'),
                    "body": condition_body[1:]
                }
                return self.__condition_validation(condition_body[0]) and \
                       self.__condition_validation(new_condition)
        elif condition_type == SCRIPTING_CONDITION_TYPE_QUERY_HAS_RESULTS:
            err_message = check_json_param(condition,
                                           "condition",
                                           args=["type", "name", "body"])
            if err_message:
                return False
            err_message = check_json_param(condition_body,
                                           "condition.body",
                                           args=["collection", "filter"])
            if err_message:
                return False
        else:
            return False
        return True

    def __executable_validation(self, executable):
        err_message = check_json_param(executable,
                                       "executable",
                                       args=["type", "body"])
        if err_message:
            return err_message
        executable_type = executable.get('type')
        executable_body = executable.get('body')
        if executable_type == SCRIPTING_EXECUTABLE_TYPE_AGGREGATED:
            if not isinstance(executable_body, list):
                return f"Invalid parameters passed for executable type '{executable_type}'"
            if len(executable_body) == 1:
                return self.__executable_validation(executable_body[0])
            else:
                new_executable = {
                    "type": executable_type,
                    "body": executable_body[1:]
                }
                return self.__executable_validation(new_executable)
        elif executable_type == SCRIPTING_EXECUTABLE_TYPE_FIND:
            return check_json_param(executable_body,
                                    f"{executable.get('name')}",
                                    args=["collection"])
        elif executable_type == SCRIPTING_EXECUTABLE_TYPE_INSERT:
            return check_json_param(executable_body,
                                    f"{executable.get('name')}",
                                    args=["collection", "document"])
        elif executable_type == SCRIPTING_EXECUTABLE_TYPE_DELETE:
            return check_json_param(executable_body,
                                    f"{executable.get('name')}",
                                    args=["collection", "filter"])
        elif executable_type == SCRIPTING_EXECUTABLE_TYPE_UPDATE:
            return check_json_param(executable_body,
                                    f"{executable.get('name')}",
                                    args=["collection", "filter", "update"])
        elif executable_type == SCRIPTING_EXECUTABLE_TYPE_FILE_DOWNLOAD:
            executable_name = executable.get('name')
            # We need to make sure that the script's name is not "_download" as it's a reserved field
            if executable_name == SCRIPTING_EXECUTABLE_DOWNLOADABLE:
                return f"invalid executable name '{executable_name}'. This name is reserved. Please use a different name"
            return check_json_param(executable_body,
                                    f"{executable_name}",
                                    args=["path"])
        elif executable_type in [
                SCRIPTING_EXECUTABLE_TYPE_FILE_UPLOAD,
                SCRIPTING_EXECUTABLE_TYPE_FILE_PROPERTIES,
                SCRIPTING_EXECUTABLE_TYPE_FILE_HASH
        ]:
            return check_json_param(executable_body,
                                    f"{executable.get('name')}",
                                    args=["path"])
        else:
            return f"invalid executable type '{executable_type}'"

    def __condition_execution(self, did, app_did, target_did, target_app_did,
                              condition, params):
        condition_type = condition.get('type')
        condition_body = condition.get('body')
        if condition_type in [
                SCRIPTING_CONDITION_TYPE_AND, SCRIPTING_CONDITION_TYPE_OR
        ]:
            if len(condition_body) == 1:
                return self.__condition_execution(did, app_did, target_did,
                                                  target_app_did,
                                                  condition_body[0], params)
            new_condition = {
                "type": condition_type,
                "body": condition_body[1:]
            }
            if condition_type == SCRIPTING_CONDITION_TYPE_AND:
                return self.__condition_execution(did, app_did, target_did, target_app_did, condition_body[0],
                                                  params) and \
                       self.__condition_execution(did, app_did, target_did, target_app_did, new_condition, params)
            elif condition_type == SCRIPTING_CONDITION_TYPE_OR:
                return self.__condition_execution(did, app_did, target_did, target_app_did, condition_body[0],
                                                  params) or \
                       self.__condition_execution(did, app_did, target_did, target_app_did, new_condition, params)
        else:
            return run_condition(did, app_did, target_did, target_app_did,
                                 condition_body, params)

    def __executable_execution(self,
                               did,
                               app_did,
                               target_did,
                               target_app_did,
                               executable,
                               params,
                               output={},
                               output_key=None,
                               capture_output=False):
        executable_type = executable.get('type')
        executable_body = executable.get('body')
        if not output_key:
            output_key = executable.get('name')

        if not capture_output:
            capture_output = executable.get('output', False)

        if executable_type == SCRIPTING_EXECUTABLE_TYPE_AGGREGATED:
            err_message = None
            for i, e in enumerate(executable_body):
                self.__executable_execution(did, app_did, target_did,
                                            target_app_did, e, params, output,
                                            e.get('name', f"output{i}"),
                                            e.get('output', False))
        elif executable_type == SCRIPTING_EXECUTABLE_TYPE_FIND:
            data, err_message = run_executable_find(did, app_did, target_did,
                                                    target_app_did,
                                                    executable_body, params)
        elif executable_type == SCRIPTING_EXECUTABLE_TYPE_INSERT:
            data, err_message = run_executable_insert(did, app_did, target_did,
                                                      target_app_did,
                                                      executable_body, params)
        elif executable_type == SCRIPTING_EXECUTABLE_TYPE_UPDATE:
            data, err_message = run_executable_update(did, app_did, target_did,
                                                      target_app_did,
                                                      executable_body, params)
        elif executable_type == SCRIPTING_EXECUTABLE_TYPE_DELETE:
            data, err_message = run_executable_delete(did, app_did, target_did,
                                                      target_app_did,
                                                      executable_body, params)
        elif executable_type == SCRIPTING_EXECUTABLE_TYPE_FILE_UPLOAD:
            data, err_message = run_executable_file_upload(
                did, app_did, target_did, target_app_did, executable_body,
                params)
        elif executable_type == SCRIPTING_EXECUTABLE_TYPE_FILE_DOWNLOAD:
            data, err_message = run_executable_file_download(
                did, app_did, target_did, target_app_did, executable_body,
                params)
        elif executable_type == SCRIPTING_EXECUTABLE_TYPE_FILE_PROPERTIES:
            data, err_message = run_executable_file_properties(
                did, app_did, target_did, target_app_did, executable_body,
                params)
        elif executable_type == SCRIPTING_EXECUTABLE_TYPE_FILE_HASH:
            data, err_message = run_executable_file_hash(
                did, app_did, target_did, target_app_did, executable_body,
                params)
        else:
            data, err_message = None, f"invalid executable type '{executable_type}'"

        if err_message:
            output[output_key] = err_message
        else:
            if capture_output:
                output[output_key] = data
        return output

    def __count_nested_condition(self, condition):
        content = copy.deepcopy(condition)
        count = {}
        for index, body in enumerate(content.get('body')):
            content_body = content.get('body')
            while isinstance(content_body, list):
                if index in count.keys():
                    count[index] += 1
                else:
                    count[index] = 1
                content_body = content_body[index].get('body')
        return count

    def set_script(self):
        # Request Validation
        did, app_id, content, err = post_json_param_pre_proc(
            self.response, "name", "executable", access_vault=VAULT_ACCESS_WR)
        if err:
            return err

        # Anonymity Options
        content['allowAnonymousUser'] = content.get('allowAnonymousUser',
                                                    False)
        content['allowAnonymousApp'] = content.get('allowAnonymousApp', False)

        logging.debug(
            f"Registering a script named '{content.get('name')}' with params: DID: '{did}', App DID: '{app_id}', "
            f"Anonymous User Access: {content['allowAnonymousUser']}, Anonymous App Access: {content['allowAnonymousApp']}"
        )

        # Anonymity Validation
        if (content['allowAnonymousUser'] is
                True) and (content['allowAnonymousApp'] is False):
            err_message = "Error while validating anonymity options: Cannot set allowAnonymousUser to be True but " \
                          "allowAnonymousApp to be False as we cannot request an auth to prove an app identity without " \
                          "proving the user identity"
            logging.debug(err_message)
            return self.response.response_err(BAD_REQUEST, err_message)

        # Data Validation
        executable = content.get('executable')
        massage_keys_with_dollar_signs(executable)
        err_message = self.__executable_validation(executable)
        if err_message:
            logging.debug(f"Error while validating executables: {err_message}")
            return self.response.response_err(BAD_REQUEST, err_message)

        # Condition Validation
        condition = content.get('condition', None)
        if condition:
            err_message = check_json_param(condition,
                                           "condition",
                                           args=["type", "name", "body"])
            if err_message:
                logging.debug(
                    f"Error while validating conditions: {err_message}")
                return self.response.response_err(BAD_REQUEST, err_message)
            nested_count = self.__count_nested_condition(condition)
            for count in nested_count.values():
                if count >= 5:
                    err_message = "conditions cannot be nested more than 5 times"
                    logging.debug(
                        f"Error while validating conditions: {err_message}")
                    return self.response.response_err(BAD_REQUEST, err_message)
            is_valid = self.__condition_validation(condition)
            if not is_valid:
                err_message = "some of the parameters are not set for 'condition'"
                logging.debug(
                    f"Error while validating conditions: {err_message}")
                return self.response.response_err(BAD_REQUEST, err_message)

        # Create collection "scripts" if it doesn't exist and
        # create/update script in the database
        data, err_message = self.__upsert_script_to_db(did, app_id, content)
        if err_message:
            return self.response.response_err(INTERNAL_SERVER_ERROR,
                                              err_message)
        return self.response.response_ok(data)

    def __run_script(self, script_name, caller_did, caller_app_did, target_did,
                     target_app_did, params):
        r, msg = can_access_vault(target_did, VAULT_ACCESS_R)
        if r != SUCCESS:
            logging.debug(
                f"Error while executing script named '{script_name}': vault can not be accessed"
            )
            return self.response.response_err(r, msg)

        # Find the script in the database
        col = get_collection(target_did, target_app_did,
                             SCRIPTING_SCRIPT_COLLECTION)
        content_filter = {"name": script_name}

        err_message = f"could not find script '{script_name}' in the database. Please register the script " \
                      f"first with set_script' API endpoint"
        try:
            script = col.find_one(content_filter)
        except Exception as e:
            err_message = f"{err_message}. Exception: {str(e)}"
            logging.debug(
                f"Error while executing script named '{script_name}': {err_message}"
            )
            return self.response.response_err(INTERNAL_SERVER_ERROR,
                                              err_message)

        if not script:
            logging.debug(
                f"Error while executing script named '{script_name}': {err_message}"
            )
            return self.response.response_err(NOT_FOUND, err_message)

        # Validate anonymity options
        allow_anonymous_user = script.get('allowAnonymousUser', False)
        allow_anonymous_app = script.get('allowAnonymousApp', False)
        if (allow_anonymous_user is True) and (allow_anonymous_app is False):
            err_message = "Error while validating anonymity options: Cannot set allowAnonymousUser to be True but " \
                          "allowAnonymousApp to be False as we cannot request an auth to prove an app identity without " \
                          "proving the user identity"
            logging.debug(err_message)
            return self.response.response_err(BAD_REQUEST, err_message)
        if allow_anonymous_user is True:
            caller_did = None
        else:
            if not caller_did:
                logging.debug(
                    f"Error while executing script named '{script_name}': Auth failed. caller_did "
                    f"not set")
                return self.response.response_err(
                    UNAUTHORIZED, "Auth failed. caller_did not set")
        if allow_anonymous_app is True:
            caller_app_did = None
        else:
            if not caller_app_did:
                logging.debug(
                    f"Error while executing script named '{script_name}': Auth failed. "
                    f"caller_app_did not set")
                return self.response.response_err(
                    UNAUTHORIZED, "Auth failed. caller_app_did not set")

        logging.debug(
            f"Executing a script named '{script_name}' with params: "
            f"Caller DID: '{caller_did}', Caller App DID: '{caller_app_did}', "
            f"Target DID: '{target_did}', Target App DID: '{target_app_did}', "
            f"Anonymous User Access: {allow_anonymous_user}, Anonymous App Access: {allow_anonymous_app}"
        )

        condition = script.get('condition', None)
        if condition:
            # Currently, there's only one kind of condition("count" db query)
            r, msg = can_access_vault(target_did, VAULT_ACCESS_R)
            if r != SUCCESS:
                logging.debug(
                    f"Error while executing script named '{script_name}': vault can not be accessed"
                )
                return self.response.response_err(r, msg)
            passed = self.__condition_execution(caller_did, caller_app_did,
                                                target_did, target_app_did,
                                                condition, params)
            if not passed:
                err_message = f"the conditions were not met to execute this script"
                logging.debug(
                    f"Error while executing script named '{script_name}': {err_message}"
                )
                return self.response.response_err(FORBIDDEN, err_message)

        executable = script.get("executable")
        unmassage_keys_with_dollar_signs(executable)
        output = {}
        data = self.__executable_execution(caller_did,
                                           caller_app_did,
                                           target_did,
                                           target_app_did,
                                           executable,
                                           params,
                                           output=output,
                                           output_key=executable.get(
                                               'name', "output0"))
        return data

    def run_script_url(self, target_did, target_app_did, script_name, params):
        # Get caller info
        caller_did, caller_app_did = did_auth()

        data = self.__run_script(script_name, caller_did, caller_app_did,
                                 target_did, target_app_did, params)

        return self.response.response_ok(data)

    def run_script(self):
        # Request script content first
        content, err = get_script_content(self.response, "name")
        if err:
            return err

        script_name = content.get('name')
        caller_did, caller_app_did = did_auth()
        target_did, target_app_did = caller_did, caller_app_did
        # Request the Target DID and Target App Validation if present
        context = content.get('context', {})
        if context:
            target_did = context.get('target_did', None)
            target_app_did = context.get('target_app_did', None)
        if not target_did:
            logging.debug(
                f"Error while executing script named '{script_name}': target_did not set"
            )
            return self.response.response_err(BAD_REQUEST,
                                              "target_did not set")
        if not target_app_did:
            logging.debug(
                f"Error while executing script named '{script_name}': target_app_did not set"
            )
            return self.response.response_err(BAD_REQUEST,
                                              "target_app_did not set")

        params = content.get('params', None)
        data = self.__run_script(script_name, caller_did, caller_app_did,
                                 target_did, target_app_did, params)

        return self.response.response_ok(data)

    def run_script_upload(self, transaction_id):
        row_id, target_did, target_app_did, file_name, err = self.run_script_fileapi_setup(
            transaction_id, "upload")
        if err:
            logging.debug(err[1])
            return self.response.response_err(err[0], err[1])

        _, resp_err = v2_wrapper(self.ipfs_files.upload_file_with_path)(
            target_did, target_app_did, file_name)
        if resp_err:
            return resp_err

        err_message = self.run_script_fileapi_teardown(row_id, target_did,
                                                       target_app_did,
                                                       "upload")
        if err_message:
            logging.debug(err_message)
            return self.response.response_err(INTERNAL_SERVER_ERROR,
                                              err_message)

        return self.response.response_ok()

    def run_script_download(self, transaction_id):
        row_id, target_did, target_app_did, file_name, err = self.run_script_fileapi_setup(
            transaction_id, "download")
        if err:
            logging.debug(err[1])
            return self.response.response_err(err[0], err[1])

        data, resp_err = v2_wrapper(self.ipfs_files.download_file_with_path)(
            target_did, target_app_did, file_name)
        if resp_err:
            return resp_err

        err_message = self.run_script_fileapi_teardown(row_id, target_did,
                                                       target_app_did,
                                                       "download")
        if err_message:
            logging.debug(err_message)
            return self.response.response_err(INTERNAL_SERVER_ERROR,
                                              err_message)

        return data

    def run_script_fileapi_setup(self, transaction_id, fileapi_type):
        # Request script content first
        try:
            transaction_detail = jwt.decode(transaction_id,
                                            hive_setting.DID_STOREPASS,
                                            algorithms=['HS256'])
            row_id, target_did, target_app_did = transaction_detail.get('row_id', None), transaction_detail.get('target_did', None), \
                                                 transaction_detail.get('target_app_did', None)
        except Exception as e:
            err = [
                INTERNAL_SERVER_ERROR,
                f"Error while executing file {fileapi_type} via scripting: Could not unpack details "
                f"from transaction_id jwt token. Exception: {str(e)}"
            ]
            return None, None, None, None, err

        r, m = can_access_vault(target_did, VAULT_ACCESS_R)
        if r != SUCCESS:
            err = [
                r,
                f"Error while executing file {fileapi_type} via scripting: vault can not be accessed"
            ]
            return None, None, None, None, err

        # Find the temporary tx in the database
        try:
            col = get_collection(target_did, target_app_did,
                                 SCRIPTING_SCRIPT_TEMP_TX_COLLECTION)
            content_filter = {"_id": ObjectId(row_id)}
            script_temp_tx = col.find_one(content_filter)
        except Exception as e:
            err = [
                NOT_FOUND,
                f"Error while executing file {fileapi_type} via scripting: Exception: {str(e)}"
            ]
            return None, None, None, None, err

        if not script_temp_tx:
            err = [
                NOT_FOUND,
                f"Error while executing file {fileapi_type} via scripting: "
                f"Exception: Could not find the transaction ID '{transaction_id}' in the database"
            ]
            return None, None, None, None, err

        file_name = script_temp_tx.get('file_name', None)
        if not file_name:
            err = [
                NOT_FOUND,
                f"Error while executing file {fileapi_type} via scripting: Could not find a file_name "
                f"'{file_name}' to be used to upload"
            ]
            return None, None, None, None, err

        return row_id, target_did, target_app_did, file_name, None

    def run_script_fileapi_teardown(self, row_id, target_did, target_app_did,
                                    fileapi_type):
        try:
            col = get_collection(target_did, target_app_did,
                                 SCRIPTING_SCRIPT_TEMP_TX_COLLECTION)
            content_filter = {"filter": {"_id": ObjectId(row_id)}}
            _, err_message = query_delete_one(col, content_filter)
            if err_message:
                err_message = f"Error while executing file {fileapi_type} via scripting: {err_message}"
                return err_message
            db_size = get_mongo_database_size(target_did, target_app_did)
            update_vault_db_use_storage_byte(target_did, db_size)
        except Exception as e:
            err_message = f"Error while executing file {fileapi_type} via scripting: Exception: {str(e)}"
            return err_message
        return None
Example #9
0
class HiveMongoDb:
    def __init__(self, app=None):
        self.app = app
        self.response = ServerResponse("HiveMongoDb")

    def init_app(self, app):
        self.app = app

    def create_collection(self):
        did, app_id, content, err = post_json_param_pre_proc(self.response, "collection", access_vault=VAULT_ACCESS_WR)
        if err:
            return err

        collection_name = content.get('collection')

        connection = MongoClient(host=hive_setting.MONGO_HOST, port=hive_setting.MONGO_PORT)
        db_name = gene_mongo_db_name(did, app_id)
        db = connection[db_name]
        try:
            col = db.create_collection(collection_name)
        except CollectionInvalid:
            pass
        except Exception as e:
            return self.response.response_err(INTERNAL_SERVER_ERROR, "Exception:" + str(e))
        return self.response.response_ok()

    def delete_collection(self):
        did, app_id, content, err = post_json_param_pre_proc(self.response, "collection", access_vault=VAULT_ACCESS_DEL)
        if err:
            return err

        collection_name = content.get('collection', None)
        if collection_name is None:
            return self.response.response_err(BAD_REQUEST, "parameter is null")

        connection = MongoClient(host=hive_setting.MONGO_HOST, port=hive_setting.MONGO_PORT)
        db_name = gene_mongo_db_name(did, app_id)
        db = connection[db_name]
        try:
            db.drop_collection(collection_name)
            db_size = get_mongo_database_size(did, app_id)
            update_vault_db_use_storage_byte(did, db_size)

        except CollectionInvalid:
            pass
        except Exception as e:
            return self.response.response_err(INTERNAL_SERVER_ERROR, "Exception:" + str(e))
        return self.response.response_ok()

    def insert_one(self):
        did, app_id, content, err = post_json_param_pre_proc(self.response, "collection", "document",
                                                             access_vault=VAULT_ACCESS_WR)
        if err:
            return err

        options = populate_options_insert_one(content)

        col = get_collection(did, app_id, content["collection"])
        if not col:
            return self.response.response_err(NOT_FOUND, "collection not exist")

        data, err_message = query_insert_one(col, content, options)
        if err_message:
            return self.response.response_err(INTERNAL_SERVER_ERROR, err_message)

        db_size = get_mongo_database_size(did, app_id)
        update_vault_db_use_storage_byte(did, db_size)
        return self.response.response_ok(data)

    def insert_many(self):
        did, app_id, content, err = post_json_param_pre_proc(self.response, "collection", "document",
                                                             access_vault=VAULT_ACCESS_WR)

        if err:
            return err

        col = get_collection(did, app_id, content["collection"])
        if not col:
            return self.response.response_err(NOT_FOUND, "collection not exist")

        options = options_filter(content, ("bypass_document_validation", "ordered"))

        try:
            new_document = []
            for document in content["document"]:
                document["created"] = datetime.utcnow()
                document["modified"] = datetime.utcnow()
                new_document.append(convert_oid(document))

            ret = col.insert_many(new_document, **options)
            db_size = get_mongo_database_size(did, app_id)
            update_vault_db_use_storage_byte(did, db_size)
            data = {
                "acknowledged": ret.acknowledged,
                "inserted_ids": [str(_id) for _id in ret.inserted_ids]
            }
            return self.response.response_ok(data)
        except Exception as e:
            return self.response.response_err(INTERNAL_SERVER_ERROR, "Exception:" + str(e))

    def update_one(self):
        did, app_id, content, err = post_json_param_pre_proc(self.response, "collection", "filter", "update",
                                                             access_vault=VAULT_ACCESS_WR)
        if err:
            return err

        options = populate_options_update_one(content)

        col = get_collection(did, app_id, content["collection"])
        if not col:
            return self.response.response_err(NOT_FOUND, "collection not exist")

        data, err_message = query_update_one(col, content, options)
        if err_message:
            return self.response.response_err(INTERNAL_SERVER_ERROR, err_message)

        db_size = get_mongo_database_size(did, app_id)
        update_vault_db_use_storage_byte(did, db_size)
        return self.response.response_ok(data)

    def update_many(self):
        did, app_id, content, err = post_json_param_pre_proc(self.response, "collection", "filter", "update",
                                                             access_vault=VAULT_ACCESS_WR)
        if err:
            return err

        col = get_collection(did, app_id, content["collection"])
        if not col:
            return self.response.response_err(NOT_FOUND, "collection not exist")

        options = options_filter(content, ("upsert", "bypass_document_validation"))

        try:
            update_set_on_insert = content.get('update').get('$setOnInsert', None)
            if update_set_on_insert:
                content["update"]["$setOnInsert"]['created'] = datetime.utcnow()
            else:
                content["update"]["$setOnInsert"] = {
                    "created": datetime.utcnow()
                }
            if "$set" in content["update"]:
                content["update"]["$set"]["modified"] = datetime.utcnow()
            ret = col.update_many(convert_oid(content["filter"]), convert_oid(content["update"], update=True),
                                  **options)
            data = {
                "acknowledged": ret.acknowledged,
                "matched_count": ret.matched_count,
                "modified_count": ret.modified_count,
                "upserted_id": str(ret.upserted_id)
            }
            db_size = get_mongo_database_size(did, app_id)
            update_vault_db_use_storage_byte(did, db_size)
            return self.response.response_ok(data)
        except Exception as e:
            return self.response.response_err(INTERNAL_SERVER_ERROR, "Exception:" + str(e))

    def delete_one(self):
        did, app_id, content, err = post_json_param_pre_proc(self.response, "collection", "filter",
                                                             access_vault=VAULT_ACCESS_DEL)
        if err:
            return err

        col = get_collection(did, app_id, content["collection"])
        if not col:
            return self.response.response_err(NOT_FOUND, "collection not exist")

        data, err_message = query_delete_one(col, content)
        if err_message:
            return self.response.response_err(INTERNAL_SERVER_ERROR, err_message)

        db_size = get_mongo_database_size(did, app_id)
        update_vault_db_use_storage_byte(did, db_size)
        return self.response.response_ok(data)

    def delete_many(self):
        did, app_id, content, err = post_json_param_pre_proc(self.response, "collection", "filter",
                                                             access_vault=VAULT_ACCESS_DEL)
        if err:
            return err

        col = get_collection(did, app_id, content["collection"])
        if not col:
            return self.response.response_err(NOT_FOUND, "collection not exist")

        try:
            ret = col.delete_many(convert_oid(content["filter"]))
            data = {
                "acknowledged": ret.acknowledged,
                "deleted_count": ret.deleted_count,
            }
            db_size = get_mongo_database_size(did, app_id)
            update_vault_db_use_storage_byte(did, db_size)
            return self.response.response_ok(data)
        except Exception as e:
            return self.response.response_err(INTERNAL_SERVER_ERROR, "Exception:" + str(e))

    def count_documents(self):
        did, app_id, content, err = post_json_param_pre_proc(self.response, "collection", "filter",
                                                             access_vault=VAULT_ACCESS_R)
        if err:
            return err

        options = populate_options_count_documents(content)

        col = get_collection(did, app_id, content["collection"])
        if not col:
            return self.response.response_err(NOT_FOUND, "collection not exist")

        data, err_message = query_count_documents(col, content, options)
        if err_message:
            return self.response.response_err(INTERNAL_SERVER_ERROR, err_message)

        return self.response.response_ok(data)

    def find_one(self):
        did, app_id, content, err = post_json_param_pre_proc(self.response, "collection", access_vault=VAULT_ACCESS_R)
        if err:
            return err

        col = get_collection(did, app_id, content["collection"])
        if not col:
            return self.response.response_err(NOT_FOUND, "collection not exist")

        options = options_filter(content, ("projection",
                                           "skip",
                                           "sort",
                                           "allow_partial_results",
                                           "return_key",
                                           "show_record_id",
                                           "batch_size"))
        if "sort" in options:
            sorts = gene_sort(options["sort"])
            options["sort"] = sorts

        try:
            if "filter" in content:
                result = col.find_one(convert_oid(content["filter"]), **options)
            else:
                result = col.find_one(**options)

            data = {"items": json.loads(json_util.dumps(result))}
            return self.response.response_ok(data)
        except Exception as e:
            return self.response.response_err(INTERNAL_SERVER_ERROR, "Exception:" + str(e))

    def find_many(self):
        did, app_id, content, err = post_json_param_pre_proc(self.response, "collection", access_vault=VAULT_ACCESS_R)
        if err:
            return err

        options = populate_options_find_many(content)

        col = get_collection(did, app_id, content.get('collection'))
        if not col:
            return self.response.response_err(NOT_FOUND, "collection not exist")

        data, err_message = query_find_many(col, content, options)
        if err_message:
            return self.response.response_err(INTERNAL_SERVER_ERROR, err_message)

        return self.response.response_ok(data)
class HiveBackup:
    mode = HIVE_MODE_DEV

    def __init__(self):
        self.app = None
        self.response = ServerResponse("HiveBackup")
        self.backup_ftp = None

    def init_app(self, app, mode):
        backup_path = Path(hive_setting.BACKUP_VAULTS_BASE_DIR)
        if not backup_path.exists:
            create_full_path_dir(backup_path)
        self.app = app
        HiveBackup.mode = mode

    # ------------------ common start ----------------------------

    @staticmethod
    def restore_vault_data(did):
        info = get_vault_backup_info(did)
        if not info:
            return None
        update_vault_backup_state(did, VAULT_BACKUP_STATE_RESTORE,
                                  VAULT_BACKUP_MSG_SUCCESS)
        vault_folder = get_vault_path(did)
        if not vault_folder.exists():
            create_full_path_dir(vault_folder)

        vault_backup_msg = VAULT_BACKUP_MSG_SUCCESS
        if info[VAULT_BACKUP_INFO_TYPE] == VAULT_BACKUP_INFO_TYPE_GOOGLE_DRIVE:
            HiveBackup.__restore_google_drive(vault_folder,
                                              info[VAULT_BACKUP_INFO_DRIVE])
        elif info[VAULT_BACKUP_INFO_TYPE] == VAULT_BACKUP_INFO_TYPE_HIVE_NODE:
            HiveBackup.restore_from_hive_node_start(
                vault_folder, did, info[VAULT_BACKUP_INFO_DRIVE],
                info[VAULT_BACKUP_INFO_TOKEN])
            ret = HiveBackup.restore_backup_finish(
                did, info[VAULT_BACKUP_INFO_DRIVE] +
                INTER_BACKUP_RESTORE_FINISH_URL, info[VAULT_BACKUP_INFO_TOKEN])
            if not ret:
                vault_backup_msg = VAULT_BACKUP_MSG_FAILED
        else:
            logging.getLogger("HiveBackup").error(
                "restore_vault_data not support backup type:" +
                info[VAULT_BACKUP_INFO_TYPE])
            info = None

        if vault_backup_msg == VAULT_BACKUP_MSG_SUCCESS:
            import_mongo_db(did)
            update_vault_backup_state(did, VAULT_BACKUP_STATE_STOP,
                                      vault_backup_msg)
            delete_mongo_db_export(did)
        else:
            update_vault_backup_state(did, VAULT_BACKUP_STATE_STOP,
                                      vault_backup_msg)
        return info

    @staticmethod
    def export_mongo_db_did(did):
        did_info_list = get_all_did_info_by_did(did)
        for did_info in did_info_list:
            export_mongo_db(did_info[DID], did_info[APP_ID])

    @staticmethod
    def save_vault_data(did):
        info = get_vault_backup_info(did)
        if not info:
            return None
        update_vault_backup_state(did, VAULT_BACKUP_STATE_BACKUP,
                                  VAULT_BACKUP_MSG_SUCCESS)
        HiveBackup.export_mongo_db_did(did)
        did_vault_folder = get_vault_path(did)
        vault_backup_msg = VAULT_BACKUP_MSG_SUCCESS
        if info[VAULT_BACKUP_INFO_TYPE] == VAULT_BACKUP_INFO_TYPE_GOOGLE_DRIVE:
            HiveBackup.__save_google_drive(did_vault_folder,
                                           info[VAULT_BACKUP_INFO_DRIVE])
        elif info[VAULT_BACKUP_INFO_TYPE] == VAULT_BACKUP_INFO_TYPE_HIVE_NODE:
            checksum_list = get_file_checksum_list(did_vault_folder)
            if not checksum_list:
                logging.getLogger("HiveBackup").error(
                    f"{did} vault data is empty, no need to backup")
            else:
                HiveBackup.save_to_hive_node_start(
                    did_vault_folder, did, info[VAULT_BACKUP_INFO_DRIVE],
                    info[VAULT_BACKUP_INFO_TOKEN])
                ret = HiveBackup.save_to_hive_node_finish(
                    did, info[VAULT_BACKUP_INFO_DRIVE] +
                    INTER_BACKUP_SAVE_FINISH_URL,
                    info[VAULT_BACKUP_INFO_TOKEN], checksum_list)
                if not ret:
                    vault_backup_msg = VAULT_BACKUP_MSG_FAILED
        else:
            logging.getLogger("HiveBackup").error(
                "restore_vault_data not support backup type:" +
                info[VAULT_BACKUP_INFO_TYPE])
            info = None

        update_vault_backup_state(did, VAULT_BACKUP_STATE_STOP,
                                  vault_backup_msg)
        delete_mongo_db_export(did)
        if info:
            # if all ok, we return updated info
            info = get_vault_backup_info(did)
        return info

    # ------------------ common end ----------------------------

    # ------------------ backup to google start ----------------------------
    def __proc_google_drive_param(self):
        did, content, err = did_post_json_param_pre_proc(
            self.response,
            'token',
            'refresh_token',
            'expiry',
            'client_id',
            'client_secret',
            access_vault=VAULT_ACCESS_R)
        if err:
            return None, None, err

        info = get_vault_backup_info(did)
        if info and info[VAULT_BACKUP_INFO_STATE] != VAULT_BACKUP_STATE_STOP:
            # If sync process more than one day, we think it is failed
            if info[VAULT_BACKUP_INFO_TIME] < (datetime.utcnow().timestamp() -
                                               60 * 60 * 24):
                data = dict()
                data["vault_backup_state"] = info[VAULT_BACKUP_INFO_STATE]
                return None, None, self.response.response_ok(data)

        config_data = RcloneTool.get_config_data(content, did)
        drive_name = HiveBackup.gene_did_google_drive_name(did)

        RcloneTool.create_rclone_config_file(drive_name, config_data)
        upsert_vault_backup_info(did, VAULT_BACKUP_INFO_TYPE_GOOGLE_DRIVE,
                                 drive_name)
        return did, drive_name, None

    def save_to_google_drive(self):
        did, drive_name, response = self.__proc_google_drive_param()
        if response:
            return response
        if HiveBackup.mode != HIVE_MODE_TEST:
            _thread.start_new_thread(HiveBackup.save_vault_data, (did, ))
        return self.response.response_ok()

    def restore_from_google_drive(self):
        did, drive_name, err = self.__proc_google_drive_param()
        if err:
            return err
        if HiveBackup.mode != HIVE_MODE_TEST:
            _thread.start_new_thread(HiveBackup.restore_vault_data, (did, ))
        return self.response.response_ok()

    def get_sync_state(self):
        did, app_id = did_auth()
        if (did is None) or (app_id is None):
            return self.response.response_err(UNAUTHORIZED, "auth failed")

        info = get_vault_backup_info(did)
        if info:
            if VAULT_BACKUP_INFO_MSG not in info:
                result = VAULT_BACKUP_MSG_SUCCESS
            else:
                result = info[VAULT_BACKUP_INFO_MSG]
            data = {
                "hive_backup_state": info[VAULT_BACKUP_INFO_STATE],
                "result": result
            }
        else:
            data = {
                "hive_backup_state": VAULT_BACKUP_STATE_STOP,
                "result": VAULT_BACKUP_MSG_SUCCESS
            }
        return self.response.response_ok(data)

    @staticmethod
    def gene_did_google_drive_name(did):
        drive = "gdrive_%s" % did_tail_part(did)
        return drive

    @staticmethod
    def __restore_google_drive(did_folder, drive_name):
        rclone_config = RcloneTool.find_rclone_config_file(drive_name)
        if not rclone_config.exists():
            return
        line = f'rclone  --config {rclone_config.as_posix()} sync {drive_name}:elastos_hive_node_data {did_folder.as_posix()}'
        if HiveBackup.mode != HIVE_MODE_TEST:
            subprocess.call(line, shell=True)
        RcloneTool.remove_rclone_config_file(drive_name)

    @staticmethod
    def __save_google_drive(did_folder, drive_name):
        rclone_config = RcloneTool.find_rclone_config_file(drive_name)
        if not rclone_config.exists():
            return
        line = f'rclone --config {rclone_config.as_posix()} sync {did_folder.as_posix()} {drive_name}:elastos_hive_node_data'
        if HiveBackup.mode != HIVE_MODE_TEST:
            subprocess.call(line, shell=True)
        RcloneTool.remove_rclone_config_file(drive_name)

    # ------------------ backup to google end ----------------------------

    # ------------------ backup to node start ----------------------------
    def __proc_hive_node_param(self):
        did, content, err = did_post_json_param_pre_proc(
            self.response, "backup_credential", access_vault=VAULT_ACCESS_R)
        if err:
            return None, None, err
        host, backup_token, err = view.h_auth.backup_auth_request(content)
        if err:
            return None, None, self.response.response_err(UNAUTHORIZED, err)

        info = get_vault_backup_info(did)
        if info and info[VAULT_BACKUP_INFO_STATE] != VAULT_BACKUP_STATE_STOP:
            if info[VAULT_BACKUP_INFO_TIME] < (datetime.utcnow().timestamp() -
                                               60 * 60 * 24):
                data = dict()
                data["vault_backup_state"] = info[VAULT_BACKUP_INFO_STATE]
                return None, None, self.response.response_ok(data)

        upsert_vault_backup_info(did, VAULT_BACKUP_INFO_TYPE_HIVE_NODE, host,
                                 backup_token)

        data, err = self.get_backup_service(host + INTER_BACKUP_SERVICE_URL,
                                            backup_token)
        if err:
            return None, None, err

        backup_service = data["backup_service"]
        return did, backup_service, None

    def get_backup_service(self, url, backup_token):
        try:
            r = requests.get(url,
                             headers={
                                 "Content-Type": "application/json",
                                 "Authorization": "token " + backup_token
                             })
        except Exception as e:
            logging.getLogger("HiveBackup").error(
                f"start_internal_backup exception:{str(e)}, host:{url} backup_token:{backup_token}"
            )
            return None, self.response.response_err(BAD_REQUEST,
                                                    "start node backup error")

        if r.status_code != SUCCESS:
            logging.getLogger("HiveBackup").error(
                "start_internal_backup error, host:" + url + " backup_token:" +
                backup_token + "error code:" + str(r.status_code))
            return None, self.response.response_err(
                r.status_code,
                "start internal backup error. content:" + str(r.content))
        else:
            data = r.json()
            return data, None

    @staticmethod
    def save_to_hive_node_finish(did, url, backup_token, checksum_list):
        param = {"checksum_list": checksum_list}
        try:
            r = requests.post(url,
                              json=param,
                              headers={
                                  "Content-Type": "application/json",
                                  "Authorization": "token " + backup_token
                              })
        except Exception as e:
            logging.getLogger("HiveBackup").error(
                f"internal_save_app_list exception:{str(e)}, host:{url} backup_token:{backup_token}"
            )
            return False

        if r.status_code != SUCCESS:
            logging.getLogger("HiveBackup").error(
                "internal_save_app_list error, host:" + url +
                " backup_token:" + backup_token + "error code:" +
                str(r.status_code) + " content:" + str(r.content))
            return False
        else:
            return True

    @staticmethod
    def restore_backup_finish(did, url, backup_token):
        param = {}
        try:
            r = requests.post(url,
                              json=param,
                              headers={
                                  "Content-Type": "application/json",
                                  "Authorization": "token " + backup_token
                              })
        except Exception as e:
            logging.getLogger("HiveBackup").error(
                f"internal_restore_data exception:{str(e)}, did:{did} host:{url}"
            )
            return False

        if r.status_code != SUCCESS:
            logging.getLogger("HiveBackup").error(
                f"internal_restore_data error, did:{did} host:{url} error code {str(r.status_code)} content {str(r.content)}"
            )
            return False
        else:
            data = r.json()
            checksum_list = data["checksum_list"]
            vault_path = get_vault_path(did)
            if not vault_path.exists():
                logging.getLogger("HiveBackup").error(
                    f"internal_restore_data error, did:{did} host:{url} vault not exist"
                )
                return False

            restore_checksum_list = get_file_checksum_list(vault_path)
            for checksum in checksum_list:
                if checksum not in restore_checksum_list:
                    logging.getLogger("HiveBackup").error(
                        f"internal_restore_data error, did:{did} host:{url} vault restore check failed"
                    )
                    return False
            return True

    @staticmethod
    def __token_to_node_backup_data(access_token):
        alist = access_token.split(":")
        ftp_port = alist[0]
        user = alist[1]
        password = alist[2]
        return ftp_port, user, password

    @staticmethod
    def __data_to_node_backup_token(ftp_port, user, password):
        return f"{ftp_port}:{user}:{password}"

    @staticmethod
    def classify_save_files(saved_file_list, local_file_list, vault_folder):
        file_put_list = list()
        file_delete_list = list()
        file_patch_list = list()

        saved_file_dict = dict()
        for info in saved_file_list:
            name = info[1]
            checksum = info[0]
            saved_file_dict[name] = checksum

        # simple way of classifying
        for info in local_file_list:
            file_checksum = info[0]
            file_full_name = info[1]
            file_name = Path(info[1]).relative_to(vault_folder).as_posix()
            if file_name in saved_file_dict:
                save_checksum = saved_file_dict[file_name]
                if save_checksum != file_checksum:
                    file_patch_list.append([file_full_name, file_name])
                del saved_file_dict[file_name]
            else:
                file_put_list.append([file_full_name, file_name])

        for name in saved_file_dict.keys():
            file_delete_list.append(name)

        return file_put_list, file_patch_list, file_delete_list

    @staticmethod
    def put_files(file_put_list, host, token):
        if not file_put_list:
            return

        for info in file_put_list:
            src_file = info[0]
            dst_file = info[1]
            try:
                with open(src_file, "br") as f:
                    f.seek(0)
                    url = host + INTER_BACKUP_FILE_URL + '?file=' + dst_file
                    r = requests.put(
                        url,
                        data=f,
                        headers={"Authorization": "token " + token})
            except Exception as e:
                logging.getLogger("HiveBackup").error(
                    f"__put_files exception:{str(e)}, host:{host}")
                continue
            if r.status_code != SUCCESS:
                logging.getLogger("HiveBackup").error(
                    f"__put_files err code:{r.status_code}, host:{host}")
                continue

    @staticmethod
    def get_unpatch_file_hash(file_name, host, token):
        get_file_hash_url = host + INTER_BACKUP_PATCH_HASH_URL + "?file=" + file_name
        try:
            r = requests.get(get_file_hash_url,
                             headers={"Authorization": "token " + token},
                             stream=True)
        except Exception as e:
            logging.getLogger("HiveBackup").error(
                f"get_unpatch_file_hash exception:{str(e)}, host:{host}")
            return None
        if r.status_code != SUCCESS:
            logging.getLogger("HiveBackup").error(
                f"get_unpatch_file_hash error code is:" + str(r.status_code))
            return None
        hashes = list()
        for line in r.iter_lines(chunk_size=CHUNK_SIZE):
            if not line:
                continue
            data = line.split(b',')
            h = (int(data[0]), data[1].decode("utf-8"))
            hashes.append(h)
        return hashes

    @staticmethod
    def patch_remote_file(src_file_name, dst_file_name, host, token):
        hashes = HiveBackup.get_unpatch_file_hash(dst_file_name, host, token)
        try:
            with open(src_file_name, "rb") as f:
                delta_list = rsyncdelta(f, hashes, blocksize=CHUNK_SIZE)
        except Exception as e:
            print(
                f"patch_remote_file get {src_file_name} delta exception:{str(e)}, host:{host}"
            )
            logging.getLogger("HiveBackup").error(
                f"patch_remote_file get {src_file_name} delta exception:{str(e)}, host:{host}"
            )
            return SERVER_OPEN_FILE_ERROR

        patch_delta_file = gene_temp_file_name()
        try:
            with open(patch_delta_file, "wb") as f:
                pickle.dump(delta_list, f)
        except Exception as e:
            logging.getLogger("HiveBackup").error(
                f"patch_remote_file dump {dst_file_name} delta exception:{str(e)}, host:{host}"
            )
            patch_delta_file.unlink()
            return SERVER_SAVE_FILE_ERROR

        post_delta_url = host + INTER_BACKUP_PATCH_DELTA_URL + "?file=" + dst_file_name
        try:
            with open(patch_delta_file.as_posix(), 'rb') as f:
                r = requests.post(post_delta_url,
                                  data=f,
                                  headers={"Authorization": "token " + token})
        except Exception as e:
            logging.getLogger("HiveBackup").error(
                f"patch_remote_file post {dst_file_name} exception:{str(e)}, host:{host}"
            )
            return SERVER_PATCH_FILE_ERROR
        if r.status_code != SUCCESS:
            return r.status_code
        patch_delta_file.unlink()
        return SUCCESS

    @staticmethod
    def patch_save_files(file_patch_list, host, token):
        if not file_patch_list:
            return

        for info in file_patch_list:
            src_file = info[0]
            dst_file = info[1]
            HiveBackup.patch_remote_file(src_file, dst_file, host, token)

    @staticmethod
    def delete_files(file_delete_list, host, token):
        if not file_delete_list:
            return

        for name in file_delete_list:
            try:
                r = requests.delete(
                    host + INTER_BACKUP_FILE_URL + "?file=" + name,
                    headers={"Authorization": "token " + token})
            except Exception as e:
                logging.getLogger("HiveBackup").error(
                    f"__delete_files exception:{str(e)}, host:{host}")
                continue
            if r.status_code != SUCCESS:
                logging.getLogger("HiveBackup").error(
                    f"__delete_files err code:{r.status_code}, host:{host}")
                continue

    @staticmethod
    def save_to_hive_node_start(vault_folder, did, host, backup_token):
        # 1. get backup file list (with checksum)
        try:
            r = requests.get(host + INTER_BACKUP_FILE_LIST_URL,
                             headers={
                                 "Content-Type": "application/json",
                                 "Authorization": "token " + backup_token
                             })
        except Exception as e:
            logging.getLogger("HiveBackup").error(
                f"save_to_hive_node_start INTER_BACKUP_FILE_LIST_URL exception:{str(e)}, did:{did} host:{host}"
            )
            return False

        if r.status_code != SUCCESS:
            logging.getLogger("HiveBackup").error(
                f"save_to_hive_node_start INTER_BACKUP_FILE_LIST_URL error, did:{did} host:{host} error code {str(r.status_code)}"
            )
            return False

        # 2. classify dealing of files
        data = r.json()
        saved_file_list = data["backup_files"]
        file_md5_gene = deal_dir(vault_folder.as_posix(), get_file_md5_info)
        file_put_list, file_patch_list, file_delete_list = HiveBackup.classify_save_files(
            saved_file_list, file_md5_gene, vault_folder)

        # 3. deal local file to backup node
        HiveBackup.put_files(file_put_list, host, backup_token)
        HiveBackup.patch_save_files(file_patch_list, host, backup_token)
        HiveBackup.delete_files(file_delete_list, host, backup_token)

    @staticmethod
    def classify_restore_files(saved_file_list, local_file_list, vault_folder):
        file_get_list = list()
        file_patch_list = list()
        file_delete_list = list()

        if not saved_file_list:
            return file_get_list, file_patch_list, file_delete_list

        local_file_dict = dict()
        for info in local_file_list:
            file_full_name = info[1]
            checksum = info[0]
            local_file_dict[file_full_name] = checksum

        # simple way of classifying
        for info in saved_file_list:
            file_checksum = info[0]
            file_name = filter_path_root(info[1])
            file_full_name = (vault_folder / file_name).as_posix()
            if file_full_name in local_file_dict:
                save_checksum = local_file_dict[file_full_name]
                if save_checksum != file_checksum:
                    file_patch_list.append([file_name, file_full_name])
                del local_file_dict[file_full_name]
            else:
                file_get_list.append([file_name, file_full_name])

        for file_full_name in local_file_dict.keys():
            file_delete_list.append(file_full_name)

        return file_get_list, file_patch_list, file_delete_list

    @staticmethod
    def get_files(file_get_list, host, token):
        if not file_get_list:
            return

        for info in file_get_list:
            src_file = info[0]
            dst_file = Path(info[1])
            dst_file.resolve()
            temp_file = gene_temp_file_name()

            if not dst_file.parent.exists():
                if not create_full_path_dir(dst_file.parent):
                    logging.getLogger("HiveBackup").error(
                        f"__get_files error mkdir :{dst_file.parent.as_posix()}, host:{host}"
                    )
                    continue
            try:
                r = requests.get(host + INTER_BACKUP_FILE_URL + "?file=" +
                                 src_file,
                                 stream=True,
                                 headers={"Authorization": "token " + token})
                with open(temp_file, 'bw') as f:
                    f.seek(0)
                    for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
                        if chunk:
                            f.write(chunk)
            except Exception as e:
                logging.getLogger("HiveBackup").error(
                    f"__get_files exception:{str(e)}, host:{host}")
                temp_file.unlink()
                continue
            if r.status_code != SUCCESS:
                logging.getLogger("HiveBackup").error(
                    f"__get_files err code:{r.status_code}, host:{host}")
                temp_file.unlink()
                continue

            if dst_file.exists():
                dst_file.unlink()
            shutil.move(temp_file.as_posix(), dst_file.as_posix())

    @staticmethod
    def patch_local_file(src_file_name, dst_file_name, host, token):
        full_dst_file_name = Path(dst_file_name).resolve()
        try:
            with open(full_dst_file_name, 'rb') as open_file:
                gene = gene_blockchecksums(open_file, blocksize=CHUNK_SIZE)
                hashes = ""
                for h in gene:
                    hashes += h
                r = requests.post(host + INTER_BACKUP_GENE_DELTA_URL +
                                  "?file=" + src_file_name,
                                  data=hashes,
                                  stream=True,
                                  headers={
                                      "content-type": "application/json",
                                      "Authorization": "token " + token
                                  })
        except Exception as e:
            logging.getLogger("HiveBackup").error(
                f"__delete_files exception:{str(e)}, host:{host}")
            return INTERNAL_SERVER_ERROR
        if r.status_code != SUCCESS:
            logging.getLogger("HiveBackup").error(
                f"__delete_files err code:{r.status_code}, host:{host}")
            return r.status_code

        patch_delta_file = gene_temp_file_name()
        with open(patch_delta_file, 'wb') as f:
            f.seek(0)
            for chunk in r.iter_content(CHUNK_SIZE):
                f.write(chunk)

        with open(patch_delta_file, 'rb') as f:
            delta_list = pickle.load(f)
        try:
            new_file = gene_temp_file_name()
            with open(full_dst_file_name, "br") as unpatched:
                with open(new_file, "bw") as save_to:
                    unpatched.seek(0)
                    patchstream(unpatched, save_to, delta_list)
            patch_delta_file.unlink()
            if full_dst_file_name.exists():
                full_dst_file_name.unlink()
            shutil.move(new_file.as_posix(), full_dst_file_name.as_posix())
        except Exception as e:
            logging.getLogger("HiveBackup").error(
                f"exception of post_file_patch_delta patch error is {str(e)}")
            return SERVER_PATCH_FILE_ERROR

        return SUCCESS

    @staticmethod
    def patch_restore_files(file_patch_list, host, token):
        if not file_patch_list:
            return

        for info in file_patch_list:
            src_file = info[0]
            dst_file = info[1]
            HiveBackup.patch_local_file(src_file, dst_file, host, token)

    @staticmethod
    def restore_from_hive_node_start(vault_folder, did, host, backup_token):
        # 1. get backup file list (with checksum)
        try:
            r = requests.get(host + INTER_BACKUP_FILE_LIST_URL,
                             headers={
                                 "Content-Type": "application/json",
                                 "Authorization": "token " + backup_token
                             })
        except Exception as e:
            logging.getLogger("HiveBackup").error(
                f"restore_from_hive_node_start INTER_BACKUP_FILE_LIST_URL exception:{str(e)}, did:{did} host:{host}"
            )
            return False

        if r.status_code != SUCCESS:
            logging.getLogger("HiveBackup").error(
                f"restore_from_hive_node_start INTER_BACKUP_FILE_LIST_URL error, did:{did} host:{host} error code {str(r.status_code)}"
            )
            return False

        # 2. classify dealing of files
        data = r.json()
        saved_file_list = data["backup_files"]
        local_file_gene = deal_dir(vault_folder.as_posix(), get_file_md5_info)

        # 2. classfiy local file list
        file_get_list, file_patch_list, file_delete_list = HiveBackup.classify_restore_files(
            saved_file_list, local_file_gene, vault_folder)

        # 3. deal backup node file to local
        HiveBackup.get_files(file_get_list, host, backup_token)
        HiveBackup.patch_restore_files(file_patch_list, host, backup_token)
        HiveBackup.delete_files(file_delete_list, host, backup_token)

    def save_to_hive_node(self):
        did, backup_service, err = self.__proc_hive_node_param()
        if err:
            return err

        use_storage = get_vault_used_storage(did)
        if use_storage > backup_service[VAULT_BACKUP_SERVICE_MAX_STORAGE]:
            return self.response.response_err(
                INSUFFICIENT_STORAGE,
                f"The backup hive node dose not enough space for backup")

        if HiveBackup.mode != HIVE_MODE_TEST:
            _thread.start_new_thread(HiveBackup.save_vault_data, (did, ))
        return self.response.response_ok()

    def restore_from_hive_node(self):
        did, backup_service, err = self.__proc_hive_node_param()
        if err:
            return err

        if HiveBackup.mode != HIVE_MODE_TEST:
            _thread.start_new_thread(HiveBackup.restore_vault_data, (did, ))
        return self.response.response_ok()

    def activate_to_vault(self):
        did, content, err = did_post_json_param_pre_proc(self.response)
        if err:
            return self.response.response_err(
                UNAUTHORIZED, "Backup backup_to_vault auth failed")

        vault_service = get_vault_service(did)
        if not vault_service:
            return self.response.response_err(
                BAD_REQUEST, f"There is not vault service of {did} to active")

        backup_service = get_vault_backup_service(did)
        if not backup_service:
            return self.response.response_err(
                BAD_REQUEST, f"There is not vault backup service of {did}")

        freeze_vault(did)
        delete_user_vault_data(did)
        copy_local_backup_to_vault(did)
        import_mongo_db(did)
        delete_mongo_db_export(did)
        unfreeze_vault(did)
        return self.response.response_ok()
Example #11
0
class HiveBackup:
    mode = HIVE_MODE_DEV

    def __init__(self):
        self.app = None
        self.response = ServerResponse("HiveBackup")
        self.backup_ftp = None

    def init_app(self, app, mode):
        backup_path = Path(hive_setting.BACKUP_VAULTS_BASE_DIR)
        if not backup_path.exists:
            create_full_path_dir(backup_path)
        self.app = app
        HiveBackup.mode = mode
        if mode != HIVE_MODE_TEST:
            print("hive_setting.BACKUP_VAULTS_BASE_DIR:" +
                  hive_setting.BACKUP_VAULTS_BASE_DIR)
            self.backup_ftp = FtpServer(hive_setting.BACKUP_VAULTS_BASE_DIR,
                                        hive_setting.BACKUP_FTP_PORT)
            self.backup_ftp.max_cons = 256
            self.backup_ftp.max_cons_per_ip = 10
            _thread.start_new_thread(self.backup_ftp.run, ())

    # ------------------ common start ----------------------------
    @staticmethod
    def import_did_mongodb_data(did):
        did_info_list = get_all_did_info_by_did(did)
        for did_info in did_info_list:
            import_mongo_db(did_info[DID], did_info[APP_ID])

    @staticmethod
    def export_did_mongodb_data(did):
        did_info_list = get_all_did_info_by_did(did)
        for did_info in did_info_list:
            export_mongo_db(did_info[DID], did_info[APP_ID])

    @staticmethod
    def delete_did_mongodb_export_data(did):
        did_info_list = get_all_did_info_by_did(did)
        for did_info in did_info_list:
            delete_mongo_db_export(did_info[DID], did_info[APP_ID])

    @staticmethod
    def restore_vault_data(did):
        info = get_vault_backup_info(did)
        if not info:
            return None
        update_vault_backup_state(did, VAULT_BACKUP_STATE_RESTORE,
                                  VAULT_BACKUP_MSG_SUCCESS)
        did_folder = HiveBackup.get_did_vault_path(did)
        if not did_folder.exists():
            create_full_path_dir(did_folder)

        if info[VAULT_BACKUP_INFO_TYPE] == VAULT_BACKUP_INFO_TYPE_GOOGLE_DRIVE:
            HiveBackup.__restore_google_drive(did_folder,
                                              info[VAULT_BACKUP_INFO_DRIVE])
        elif info[VAULT_BACKUP_INFO_TYPE] == VAULT_BACKUP_INFO_TYPE_HIVE_NODE:
            HiveBackup.__restore_hive_node(did_folder,
                                           info[VAULT_BACKUP_INFO_FTP], did,
                                           info[VAULT_BACKUP_INFO_DRIVE],
                                           info[VAULT_BACKUP_INFO_TOKEN])
        else:
            logger.error("restore_vault_data not support backup type:" +
                         info[VAULT_BACKUP_INFO_TYPE])

        HiveBackup.import_did_mongodb_data(did)
        update_vault_backup_state(did, VAULT_BACKUP_STATE_STOP,
                                  VAULT_BACKUP_MSG_SUCCESS)
        HiveBackup.delete_did_mongodb_export_data(did)
        return info

    @staticmethod
    def save_vault_data(did):
        info = get_vault_backup_info(did)
        if not info:
            return None
        update_vault_backup_state(did, VAULT_BACKUP_STATE_BACKUP,
                                  VAULT_BACKUP_MSG_SUCCESS)
        HiveBackup.export_did_mongodb_data(did)
        did_folder = HiveBackup.get_did_vault_path(did)

        if info[VAULT_BACKUP_INFO_TYPE] == VAULT_BACKUP_INFO_TYPE_GOOGLE_DRIVE:
            HiveBackup.__save_google_drive(did_folder,
                                           info[VAULT_BACKUP_INFO_DRIVE])
        elif info[VAULT_BACKUP_INFO_TYPE] == VAULT_BACKUP_INFO_TYPE_HIVE_NODE:
            HiveBackup.__save_hive_node(did_folder,
                                        info[VAULT_BACKUP_INFO_FTP], did,
                                        info[VAULT_BACKUP_INFO_DRIVE],
                                        info[VAULT_BACKUP_INFO_TOKEN])
            HiveBackup.internal_save_app_list(
                did, info[VAULT_BACKUP_INFO_DRIVE] + INTER_BACKUP_SAVE_URL,
                info[VAULT_BACKUP_INFO_TOKEN])
        else:
            logger.error("restore_vault_data not support backup type:" +
                         info[VAULT_BACKUP_INFO_TYPE])
            info = None

        update_vault_backup_state(did, VAULT_BACKUP_STATE_STOP,
                                  VAULT_BACKUP_MSG_SUCCESS)
        HiveBackup.delete_did_mongodb_export_data(did)
        return info

    # ------------------ common end ----------------------------

    # ------------------ backup to google start ----------------------------
    def __proc_google_drive_param(self):
        did, app_id, content, err = post_json_param_pre_proc(
            self.response,
            'token',
            'refresh_token',
            'expiry',
            'client_id',
            'client_secret',
            access_vault=VAULT_ACCESS_R)
        if err:
            return None, None, err

        info = get_vault_backup_info(did)
        if info and info[VAULT_BACKUP_INFO_STATE] != VAULT_BACKUP_STATE_STOP:
            # If sync process more than one day, we think it is failed
            if info[VAULT_BACKUP_INFO_TIME] < (datetime.utcnow().timestamp() -
                                               60 * 60 * 24):
                data = dict()
                data["vault_backup_state"] = info[VAULT_BACKUP_INFO_STATE]
                return None, None, self.response.response_ok(data)

        config_data = RcloneTool.get_config_data(content, did)
        drive_name = HiveBackup.gene_did_google_drive_name(did)

        RcloneTool.create_rclone_config_file(drive_name, config_data)
        upsert_vault_backup_info(did, VAULT_BACKUP_INFO_TYPE_GOOGLE_DRIVE,
                                 drive_name)
        return did, drive_name, None

    def save_to_google_drive(self):
        did, drive_name, response = self.__proc_google_drive_param()
        if response:
            return response
        if HiveBackup.mode != HIVE_MODE_TEST:
            _thread.start_new_thread(HiveBackup.save_vault_data, (did, ))
        return self.response.response_ok()

    def restore_from_google_drive(self):
        did, drive_name, err = self.__proc_google_drive_param()
        if err:
            return err
        if HiveBackup.mode != HIVE_MODE_TEST:
            _thread.start_new_thread(HiveBackup.restore_vault_data, (did, ))
        return self.response.response_ok()

    def get_sync_state(self):
        did, app_id = did_auth()
        if (did is None) or (app_id is None):
            return self.response.response_err(UNAUTHORIZED, "auth failed")

        info = get_vault_backup_info(did)
        if info:
            data = {"hive_backup_state": info[VAULT_BACKUP_INFO_STATE]}
        else:
            data = {"hive_backup_state": VAULT_BACKUP_STATE_STOP}
        return self.response.response_ok(data)

    @staticmethod
    def gene_did_google_drive_name(did):
        drive = "gdrive_%s" % did_tail_part(did)
        return drive

    @staticmethod
    def get_did_vault_path(did):
        path = pathlib.Path(hive_setting.VAULTS_BASE_DIR)
        if path.is_absolute():
            path = path / did_tail_part(did)
        else:
            path = path.resolve() / did_tail_part(did)
        return path.resolve()

    @staticmethod
    def __restore_google_drive(did_folder, drive_name):
        rclone_config = RcloneTool.find_rclone_config_file(drive_name)
        if not rclone_config.exists():
            return
        line = f'rclone  --config {rclone_config.as_posix()} sync {drive_name}:elastos_hive_node_data {did_folder.as_posix()}'
        if HiveBackup.mode != HIVE_MODE_TEST:
            subprocess.call(line, shell=True)
        RcloneTool.remove_rclone_config_file(drive_name)

    @staticmethod
    def __save_google_drive(did_folder, drive_name):
        rclone_config = RcloneTool.find_rclone_config_file(drive_name)
        if not rclone_config.exists():
            return
        line = f'rclone --config {rclone_config.as_posix()} sync {did_folder.as_posix()} {drive_name}:elastos_hive_node_data'
        if HiveBackup.mode != HIVE_MODE_TEST:
            subprocess.call(line, shell=True)
        RcloneTool.remove_rclone_config_file(drive_name)

    # ------------------ backup to google end ----------------------------

    # ------------------ backup to node start ----------------------------
    def __proc_hive_node_param(self):
        did, app_id, content, err = post_json_param_pre_proc(
            self.response, "backup_credential")
        if err:
            return None, None, None, err
        host, backup_token, err = view.h_auth.backup_auth_request(content)
        if err:
            return None, None, None, self.response.response_err(
                UNAUTHORIZED, err)

        info = get_vault_backup_info(did)
        if info and info[VAULT_BACKUP_INFO_STATE] != VAULT_BACKUP_STATE_STOP:
            if info[VAULT_BACKUP_INFO_TIME] < (datetime.utcnow().timestamp() -
                                               60 * 60 * 24):
                data = dict()
                data["vault_backup_state"] = info[VAULT_BACKUP_INFO_STATE]
                return None, None, None, self.response.response_ok(data)

        upsert_vault_backup_info(did, VAULT_BACKUP_INFO_TYPE_HIVE_NODE, host,
                                 backup_token)

        data, err = self.start_internal_ftp(host + INTER_BACKUP_FTP_START_URL,
                                            backup_token)
        if err:
            return None, None, None, err

        backup_service = data["backup_service"]
        access_token = data["token"]
        update_vault_backup_info_item(did, VAULT_BACKUP_INFO_FTP, access_token)
        return did, access_token, backup_service, None

    def start_internal_ftp(self, url, backup_token):
        param = {}
        try:
            r = requests.post(url,
                              json=param,
                              headers={
                                  "Content-Type": "application/json",
                                  "Authorization": "token " + backup_token
                              })
        except Exception as e:
            logger.error(
                f"start_internal_backup exception:{str(e)}, host:{url} backup_token:{backup_token}"
            )
            return None, self.response.response_err(BAD_REQUEST,
                                                    "start node backup error")

        if r.status_code != SUCCESS:
            ret = r.json()
            logger.error("start_internal_backup error, host:" + url +
                         " backup_token:" + backup_token + "error code:" +
                         str(r.status_code))
            if not ret["_error"]:
                return None, self.response.response_err(
                    r.status_code,
                    "start internal backup error. content:" + str(r.content))
            else:
                return None, self.response.response_err(
                    ret["_error"]["code"], ret["_error"]["message"])
        else:
            data = r.json()
            return data, None

    @staticmethod
    def stop_internal_ftp(did, url, backup_token):
        param = {}
        try:
            r = requests.post(url,
                              json=param,
                              headers={
                                  "Content-Type": "application/json",
                                  "Authorization": "token " + backup_token
                              })
        except Exception as e:
            logger.error(
                f"stop_internal_backup exception:{str(e)}, host:{url} backup_token:{backup_token}"
            )
            return

        if r.status_code != SUCCESS:
            ret = r.json()
            if not ret["_error"]:
                logger.error("stop_internal_backup error, host:" + url +
                             " backup_token:" + backup_token + "error code:" +
                             str(r.status_code + " content:" + str(r.content)))
            else:
                logger.error("stop_internal_backup error, host:" + url +
                             " backup_token:" + backup_token + "error code:" +
                             str(r.status_code + " message:" +
                                 ret["_error"]["message"]))

    @staticmethod
    def internal_save_app_list(did, url, backup_token):
        app_id_list = list()
        did_info_list = get_all_did_info_by_did(did)
        for did_info in did_info_list:
            app_id_list.append(did_info[APP_ID])

        param = {"app_id_list": app_id_list}
        try:
            r = requests.post(url,
                              json=param,
                              headers={
                                  "Content-Type": "application/json",
                                  "Authorization": "token " + backup_token
                              })
        except Exception as e:
            logger.error(
                f"internal_save_app_list exception:{str(e)}, host:{url} backup_token:{backup_token}"
            )
            return

        if r.status_code != SUCCESS:
            ret = r.json()
            if not ret["_error"]:
                logger.error("internal_save_app_list error, host:" + url +
                             " backup_token:" + backup_token + "error code:" +
                             str(r.status_code + " content:" + str(r.content)))
            else:
                logger.error("internal_save_app_list error, host:" + url +
                             " backup_token:" + backup_token + "error code:" +
                             str(r.status_code + " message:" +
                                 ret["_error"]["message"]))

    @staticmethod
    def __token_to_node_backup_data(access_token):
        alist = access_token.split(":")
        ftp_port = alist[0]
        user = alist[1]
        password = alist[2]
        return ftp_port, user, password

    @staticmethod
    def __data_to_node_backup_token(ftp_port, user, password):
        return f"{ftp_port}:{user}:{password}"

    @staticmethod
    def __get_rclone_obscure(password):
        obj = subprocess.Popen(["rclone", "obscure", password],
                               stdout=subprocess.PIPE,
                               universal_newlines=True,
                               encoding="utf-8")
        encode_password = obj.stdout.read()
        obj.wait()
        obj.stdout.close()
        return encode_password.strip("\n")

    @staticmethod
    def __save_hive_node(did_folder, access_token, did, host, backup_token):
        ftp_port, user, password = HiveBackup.__token_to_node_backup_data(
            access_token)
        if HiveBackup.mode != HIVE_MODE_TEST:
            encode_password = HiveBackup.__get_rclone_obscure(password)
            ftp_host = get_host(host)
            line = f"rclone sync {did_folder.as_posix()} :ftp: --ftp-host={ftp_host} --ftp-port={ftp_port} --ftp-user={user} --ftp-pass={encode_password}"
            subprocess.call(line, shell=True)
        HiveBackup.stop_internal_ftp(did, host + INTER_BACKUP_FTP_END_URL,
                                     backup_token)

    @staticmethod
    def __restore_hive_node(did_folder, access_token, did, host, backup_token):
        ftp_port, user, password = HiveBackup.__token_to_node_backup_data(
            access_token)
        if HiveBackup.mode != HIVE_MODE_TEST:
            encode_password = HiveBackup.__get_rclone_obscure(password)
            ftp_host = get_host(host)
            line = f"rclone sync :ftp: {did_folder.as_posix()} --ftp-host={ftp_host} --ftp-port={ftp_port} --ftp-user={user} --ftp-pass={encode_password}"
            subprocess.call(line, shell=True)
        HiveBackup.stop_internal_ftp(did, host + INTER_BACKUP_FTP_END_URL,
                                     backup_token)

    def save_to_hive_node(self):
        did, access_token, backup_service, err = self.__proc_hive_node_param()
        if err:
            return err

        use_storage = get_vault_used_storage(did)
        if use_storage > backup_service[VAULT_BACKUP_SERVICE_MAX_STORAGE]:
            return self.response.response_err(
                INSUFFICIENT_STORAGE,
                f"The backup hive node dose not enough space for backup")

        if HiveBackup.mode != HIVE_MODE_TEST:
            _thread.start_new_thread(HiveBackup.save_vault_data, (did, ))
        return self.response.response_ok()

    def restore_from_hive_node(self):
        did, access_token, backup_service, err = self.__proc_hive_node_param()
        if err:
            return err

        if HiveBackup.mode != HIVE_MODE_TEST:
            _thread.start_new_thread(HiveBackup.restore_vault_data, (did, ))
        return self.response.response_ok()

    # ------------------ backup to node end ----------------------------
    def inter_backup_save(self):
        did, content, err = did_post_json_param_pre_proc(
            self.response, "app_id_list")
        if err:
            return self.response.response_err(
                UNAUTHORIZED,
                "Backup internal backup_communication_start auth failed")

        update_vault_backup_service_item(did, VAULT_BACKUP_SERVICE_DATA,
                                         content["app_id_list"])
        return self.response.response_ok()

    def inter_backup_ftp_start(self):
        did, content, err = did_post_json_param_pre_proc(self.response)
        if err:
            return self.response.response_err(
                UNAUTHORIZED,
                "Backup internal backup_communication_start auth failed")

        # check backup service exist
        info = get_vault_backup_service(did)
        if not info:
            return self.response.response_err(
                BAD_REQUEST, "There is no backup service of " + did)

        backup_path = get_vault_backup_path(did)
        if not backup_path.exists():
            create_full_path_dir(backup_path)

        # add user to backup ftp server
        user, passwd = gene_vault_backup_ftp_record(did)

        if self.mode != HIVE_MODE_TEST:
            self.backup_ftp.add_user(user, passwd, backup_path, 'elradfmwMT')

        del info["_id"]
        if VAULT_BACKUP_SERVICE_FTP in info:
            del info[VAULT_BACKUP_SERVICE_FTP]
        if VAULT_BACKUP_SERVICE_DATA in info:
            del info[VAULT_BACKUP_SERVICE_DATA]

        data = {
            "token":
            HiveBackup.__data_to_node_backup_token(
                hive_setting.BACKUP_FTP_PORT, user, passwd),
            "backup_service":
            info
        }
        return self.response.response_ok(data)

    def inter_backup_ftp_end(self):
        did, app_id = did_auth()
        if not did:
            return self.response.response_err(
                UNAUTHORIZED,
                "Backup internal backup_communication_end auth failed")
        user, passwd = get_vault_backup_ftp_record(did)
        if not user:
            return self.response.response_err(
                BAD_REQUEST, "There is not backup process for " + did)
        if self.mode != HIVE_MODE_TEST:
            self.backup_ftp.remove_user(user)
        remove_vault_backup_ftp_record(did)
        return self.response.response_ok()

    def backup_to_vault(self):
        did, content, err = did_post_json_param_pre_proc(self.response)
        if err:
            return self.response.response_err(
                UNAUTHORIZED, "Backup backup_to_vault auth failed")

        vault_service = get_vault_service(did)
        if not vault_service:
            return self.response.response_err(
                BAD_REQUEST, f"There is not vault service of {did} to active")

        backup_service = get_vault_backup_service(did)
        if not backup_service:
            return self.response.response_err(
                BAD_REQUEST, f"There is not vault backup service of {did}")

        freeze_vault(did)
        delete_user_vault(did)

        app_id_list = backup_service[VAULT_BACKUP_SERVICE_DATA]
        for app_id in app_id_list:
            import_files_from_backup(did, app_id)
            import_mongo_db_from_backup(did, app_id)
        unfreeze_vault(did)
        return self.response.response_ok()