def create_service_entity_model(org_uuid, service_uuid, payload, status): try: service_state = getattr(ServiceStatus, status).value except: raise InvalidServiceStateException() service_state_entity_model = ServiceFactory.create_service_state_entity_model( org_uuid, service_uuid, service_state) service_group_entity_model_list = [ ServiceFactory.create_service_group_entity_model(org_uuid, service_uuid, group) for group in payload.get("groups", [])] service_id = payload.get("service_id", "") display_name = payload.get("display_name", "") short_description = payload.get("short_description", "") description = payload.get("description", "") project_url = payload.get("project_url", "") proto = payload.get("proto", {}) assets = payload.get("assets", {}) ranking = payload.get("ranking", DEFAULT_SERVICE_RANKING) rating = payload.get("rating", {}) payload_contributors = payload.get("contributors", []) contributors = [] for contributor in payload_contributors: if ServiceFactory.is_valid_contributor(contributor): contributors.append(contributor) tags = payload.get("tags", []) mpe_address = payload.get("mpe_address", "") metadata_uri = payload.get("metadata_uri", "") return Service( org_uuid, service_uuid, service_id, display_name, short_description, description, project_url, proto, assets, ranking, rating, contributors, tags, mpe_address, metadata_uri, service_group_entity_model_list, service_state_entity_model)
def convert_service_db_model_to_entity_model(service): return Service( org_uuid=service.org_uuid, uuid=service.uuid, service_id=service.service_id, display_name=service.display_name, short_description=service.short_description, description=service.description, project_url=service.project_url, proto=service.proto, metadata_ipfs_hash=service.metadata_ipfs_hash, assets=service.assets, rating=service.rating, ranking=service.ranking, contributors=service.contributors, tags=service.tags, mpe_address=service.mpe_address, service_state=ServiceState(service.service_state.org_uuid, service.service_state.service_uuid, service.service_state.state, service.service_state.transaction_hash), groups=[ ServiceGroup( org_uuid=group.org_uuid, service_uuid=group.service_uuid, group_id=group.group_id, group_name=group.group_name, endpoints=group.endpoints, pricing=group.pricing, free_calls=group.free_calls, daemon_address=group.daemon_address, free_call_signer_address=group.free_call_signer_address) for group in service.groups ])
def publish_service_data(self): # Validate service metadata current_service = self.publish_service_data_to_ipfs() is_valid = Service.is_metadata_valid( service_metadata=current_service.to_metadata()) logger.info( f"is_valid :: {is_valid} :: validated current_metadata :: {current_service.to_metadata()}" ) if not is_valid: raise InvalidMetadataException() # Monitor blockchain and offchain changes current_org = OrganizationPublisherRepository().get_organization( org_uuid=self._org_uuid) existing_service_data = self.get_existing_service_details_from_contract_api( current_service.service_id, current_org.id) if existing_service_data: existing_metadata = ipfs_util.read_file_from_ipfs( existing_service_data["ipfs_hash"]) else: existing_metadata = {} publish_to_blockchain = self.are_blockchain_attributes_got_updated( existing_metadata, current_service.to_metadata()) existing_offchain_configs = self.get_existing_offchain_configs( existing_service_data) current_offchain_attributes = ServicePublisherRepository( ).get_offchain_service_config(org_uuid=self._org_uuid, service_uuid=self._service_uuid) new_offchain_attributes = self.get_offchain_changes( current_offchain_config=current_offchain_attributes.configs, existing_offchain_config=existing_offchain_configs, current_service=current_service) status = {"publish_to_blockchain": publish_to_blockchain} if publish_to_blockchain: filename = f"{METADATA_FILE_PATH}/{current_service.uuid}_service_metadata.json" ipfs_hash = ServicePublisherService.publish_to_ipfs( filename, current_service.to_metadata()) status["service_metadata_ipfs_hash"] = "ipfs://" + ipfs_hash self.publish_offchain_service_configs( org_id=current_org.id, service_id=current_service.service_id, payload=json.dumps(new_offchain_attributes)) # if there is no blockchain change update state as published # else status will be marked based event received from blockchain if not publish_to_blockchain: ServicePublisherRepository().save_service( username=self._username, service=current_service, state=ServiceStatus.PUBLISHED.value) return status
def create_service_from_service_metadata(org_uuid, service_uuid, service_id, service_metadata, tags_data, ranking, rating, status): service_state_entity_model = \ ServiceFactory.create_service_state_entity_model(org_uuid, service_uuid, getattr(ServiceStatus, status).value) service_group_entity_model_list = [ ServiceFactory.create_service_group_entity_model("", service_uuid, group) for group in service_metadata.get("groups", [])] return Service( org_uuid, service_uuid, service_id, service_metadata.get("display_name", ""), service_metadata.get("short_description", ""), service_metadata.get("description", ""), service_metadata.get("project_url", ""), service_metadata.get("proto", {}), service_metadata.get("assets", {}), ranking, rating, service_metadata.get("contributors", []), tags_data, service_metadata.get("mpe_address", ""), service_metadata.get("metadata_ipfs_hash", ""), service_group_entity_model_list, service_state_entity_model)
def create_service_entity_model(org_uuid, service_uuid, payload, status): service_state_entity_model = \ ServiceFactory.create_service_state_entity_model(org_uuid, service_uuid, getattr(ServiceStatus, status).value) service_group_entity_model_list = [ ServiceFactory.create_service_group_entity_model( org_uuid, service_uuid, group) for group in payload.get("groups", []) ] return Service(org_uuid, service_uuid, payload.get("service_id", ""), payload.get("display_name", ""), payload.get("short_description", ""), payload.get("description", ""), payload.get("project_url", ""), payload.get("proto", {}), payload.get("assets", {}), payload.get("ranking", DEFAULT_SERVICE_RANKING), payload.get("rating", {}), payload.get("contributors", []), payload.get("tags", []), payload.get("mpe_address", ""), payload.get("metadata_ipfs_hash", ""), service_group_entity_model_list, service_state_entity_model)
def _process_service_data(self, org_id, service_id, service_metadata, transaction_hash, metadata_uri): org_uuid, existing_service = self._get_existing_service_details( org_id, service_id) service_uuid = str(uuid4()) display_name = service_metadata.get("display_name", "") description_dict = service_metadata.get("service_description", {}) short_description = description_dict.get("short_description", "") description = description_dict.get("description", "") project_url = description_dict.get("url", "") proto = { "encoding": service_metadata.get("encoding", ""), "service_type": service_metadata.get("service_type", ""), "model_ipfs_hash": service_metadata.get("model_ipfs_hash", "") } assets = service_metadata.get("assets", {}) mpe_address = service_metadata.get("mpe_address", "") metadata_uri = "ipfs://" + metadata_uri contributors = service_metadata.get("contributors", []) tags_data = service_metadata.get("tags", []) state = \ ServiceFactory.create_service_state_entity_model(org_uuid, service_uuid, getattr(ServiceStatus, "PUBLISHED_UNAPPROVED").value) self._add_validation_attribute_to_endpoint( service_metadata.get("groups", [])) groups = [ ServiceFactory.create_service_group_entity_model( org_uuid, service_uuid, group) for group in service_metadata.get("groups", []) ] if existing_service: existing_service.display_name = display_name existing_service.short_description = short_description existing_service.description = description existing_service.project_url = project_url existing_service.proto = proto existing_service.assets = ServiceFactory.parse_service_metadata_assets( assets, existing_service.assets) existing_service.mpe_address = mpe_address existing_service.metadata_uri = metadata_uri existing_service.contributors = contributors existing_service.tags = tags_data existing_service.groups = [ ServiceFactory.create_service_group_entity_model( org_uuid, existing_service.uuid, group) for group in service_metadata.get("groups", []) ] recieved_service = Service(org_uuid, str(uuid4()), service_id, display_name, short_description, description, project_url, proto, assets, DEFAULT_SERVICE_RANKING, {}, contributors, tags_data, mpe_address, metadata_uri, groups, state) if not existing_service: self._service_repository.add_service(recieved_service, BLOCKCHAIN_USER) elif existing_service.service_state.transaction_hash is None: self._service_repository.save_service(BLOCKCHAIN_USER, existing_service, ServiceStatus.DRAFT.value) elif existing_service.service_state.transaction_hash != transaction_hash: # TODO: Implement major & minor changes self._service_repository.save_service(BLOCKCHAIN_USER, existing_service, ServiceStatus.DRAFT.value) elif existing_service.service_state.transaction_hash == transaction_hash: self.__curate_service_in_marketplace(service_id, org_id, curated=True) self._service_repository.save_service( BLOCKCHAIN_USER, existing_service, ServiceStatus.PUBLISHED.value)
def test_get_service_metadata(self): service = Service( org_uuid="test_org_uuid", uuid="test_service_uuid", service_id="test_service_id", display_name="test_display_name", short_description="test_short_description", description="test_description", project_url="https://dummy.io", proto={ "encoding": "proto", "service_type": "grpc", "model_ipfs_hash": "test_model_ipfs_hash" }, assets={ "proto_files": { "url": "https://ropsten-marketplace-service-assets.s3.amazonaws.com/9887ec2e099e4afd92c4a052737eaa" "97/services/7420bf47989e4afdb1797d1bba8090aa/proto/20200327130256_proto_files.zip", "ipfs_hash": "QmUfDprFisFeaRnmLEqks1AFN6iam5MmTh49KcomXHEiQY" }, "hero_image": { "url": "QmUfDprFisFeaRnmLEqks1AFN6iam5MmTh49KcomXHEiQY/20200323130126_asset.png", "ipfs_hash": "QmUfDprFisFeaRnmLEqks1AFN6iam5MmTh49KcomXHEiQY" }, "demo_files": { "url": "https://ropsten-marketplace-service-assets.s3.amazonaws.com/9887ec2e099e4afd92c4a052737eaa" "97/services/7420bf47989e4afdb1797d1bba8090aa/component/20200401121414_component.zip", "ipfs_hash": "QmUfDprFisFeaRnmLEqks1AFN6iam5MmTh49KcomXHEiQY" } }, ranking=1, rating={}, contributors=[{ 'name': 'df', 'email_id': '*****@*****.**' }], tags=["1234", "4321"], mpe_address="0x123", metadata_uri="", groups=[ ServiceGroup(org_uuid="test_org_uuid", service_uuid="test_service_uuid", group_id="test", group_name="test_name", endpoints={ "https://dummydaemonendpoint.io": { "verfied": True } }, test_endpoints=["https://dummydaemonendpoint.io"], pricing=[{ 'default': True, 'price_model': 'fixed_price', 'price_in_cogs': 1 }], free_calls=10, daemon_address=["0xq2w3e4rr5t6y7u8i9"], free_call_signer_address="0xq2s3e4r5t6y7u8i9o0") ], service_state=None) service_metadata = ServicePublisherDomainService( "", "", "").get_service_metadata(service, EnvironmentType.MAIN.value) self.assertDictEqual.__self__.maxDiff = None self.assertDictEqual( { 'version': 1, 'display_name': 'test_display_name', 'encoding': 'proto', 'service_type': 'grpc', 'model_ipfs_hash': 'test_model_ipfs_hash', 'mpe_address': '0x123', 'groups': [{ 'free_calls': 10, 'free_call_signer_address': '0xq2s3e4r5t6y7u8i9o0', 'daemon_addresses': ['0xq2w3e4rr5t6y7u8i9'], 'pricing': [{ 'default': True, 'price_model': 'fixed_price', 'price_in_cogs': 1 }], 'endpoints': ['https://dummydaemonendpoint.io'], 'group_id': 'test', 'group_name': 'test_name' }], 'service_description': { 'url': 'https://dummy.io', 'short_description': 'test_short_description', 'description': 'test_description' }, 'media': [{ 'order': 1, 'url': 'QmUfDprFisFeaRnmLEqks1AFN6iam5MmTh49KcomXHEiQY/20200323130126_asset.png', 'file_type': 'image', 'asset_type': 'hero_image', 'alt_text': '' }], 'contributors': [{ 'name': 'df', 'email_id': '*****@*****.**' }], 'tags': ['1234', '4321'] }, service_metadata)