def create_vcrecord(self, cred_dict: dict) -> VCRecord: """Return VCRecord from a credential dict.""" proofs = cred_dict.get("proof") or [] proof_types = None if type(proofs) is dict: proofs = [proofs] if proofs: proof_types = [proof.get("type") for proof in proofs] contexts = [ctx for ctx in cred_dict.get("@context") if type(ctx) is str] if "@graph" in cred_dict: for enclosed_data in cred_dict.get("@graph"): if ( enclosed_data["id"].startswith("urn:") and "credentialSubject" in enclosed_data ): cred_dict.update(enclosed_data) del cred_dict["@graph"] break given_id = cred_dict.get("id") if given_id and self.check_if_cred_id_derived(given_id): given_id = str(uuid4()) # issuer issuer = cred_dict.get("issuer") if type(issuer) is dict: issuer = issuer.get("id") # subjects subject_ids = None subjects = cred_dict.get("credentialSubject") if subjects: if type(subjects) is dict: subjects = [subjects] subject_ids = [ subject.get("id") for subject in subjects if ("id" in subject) ] else: cred_dict["credentialSubject"] = {} # Schemas schemas = cred_dict.get("credentialSchema", []) if type(schemas) is dict: schemas = [schemas] schema_ids = [schema.get("id") for schema in schemas] expanded = jsonld.expand(cred_dict) types = JsonLdProcessor.get_values( expanded[0], "@type", ) return VCRecord( contexts=contexts, expanded_types=types, issuer_id=issuer, subject_ids=subject_ids, proof_types=proof_types, given_id=given_id, cred_value=cred_dict, schema_ids=schema_ids, )
def normalize_jsonld(json_ld_to_normalize, document_loader=preloaded_context_document_loader, detect_unmapped_fields=False): """ Canonicalize the JSON-LD certificate. The detect_unmapped_fields parameter is a temporary, incomplete, workaround to detecting fields that do not correspond to items in the JSON-LD schemas. It works in the Blockcerts context because: - Blockcerts doesn't use a default vocab - fallback.org is not expected to occur Because unmapped fields get dropped during canonicalization, this uses a trick of adding {"@vocab": "http://fallback.org/"} to the json ld, which will cause any unmapped fields to be prefixed with http://fallback.org/. If a @vocab is already there (i.e. an issuer adds this in their extensions), then tampering will change the normalized form, hence the hash of the certificate, so we will still detect this during verification. This issue will be addressed in a first-class manner in the future by the pyld library. :param json_ld_to_normalize: :param document_loader :param detect_unmapped_fields: :return: """ json_ld = json_ld_to_normalize options = deepcopy(JSONLD_OPTIONS) if document_loader: options['documentLoader'] = document_loader if detect_unmapped_fields: json_ld = deepcopy(json_ld_to_normalize) prev_context = JsonLdProcessor.get_values(json_ld_to_normalize, '@context') add_fallback = True for pc in prev_context: if type(pc) is dict: for key, value in pc.items(): if key == '@vocab': # this already has a vocab; unmapped fields will be detected in the hash add_fallback = False break if add_fallback: prev_context.append(FALLBACK_CONTEXT) json_ld['@context'] = prev_context normalized = jsonld.normalize(json_ld, options=options) if detect_unmapped_fields and FALLBACK_VOCAB in normalized: unmapped_fields = [] for m in re.finditer('<http://fallback\.org/(.*)>', normalized): unmapped_fields.append(m.group(0)) error_string = ', '.join(unmapped_fields) raise BlockcertValidationError( 'There are some fields in the certificate that do not correspond to the expected schema. This has likely been tampered with. Unmapped fields are: ' + error_string) return normalized
async def _verify_presentation( *, presentation: dict, suites: List[LinkedDataProof], document_loader: DocumentLoaderMethod, challenge: str = None, domain: str = None, purpose: ProofPurpose = None, ): """Verify presentation structure, credentials, proof purpose and signature.""" if not purpose and not challenge: raise LinkedDataProofException( 'A "challenge" param is required for AuthenticationProofPurpose.') elif not purpose: purpose = AuthenticationProofPurpose(challenge=challenge, domain=domain) # TODO validate presentation structure here if "proof" not in presentation: raise LinkedDataProofException('presentation must contain "proof"') presentation_result = await ld_proofs_verify( document=presentation, suites=suites, purpose=purpose, document_loader=document_loader, ) credential_results = None verified = True credentials = JsonLdProcessor.get_values(presentation, "verifiableCredential") credential_results = await asyncio.gather(*[ verify_credential( credential=credential, suites=suites, document_loader=document_loader, # FIXME: we don't want to interhit the authentication purpose # from the presentation. However we do want to have subject # authentication I guess # purpose=purpose, ) for credential in credentials ]) verified = all([result.verified for result in credential_results]) return PresentationVerificationResult( verified=verified, presentation_result=presentation_result, credential_results=credential_results, errors=presentation_result.errors, )
def validate( self, *, proof: dict, document: dict, suite: LinkedDataProof, verification_method: dict, document_loader: DocumentLoaderMethod, ) -> PurposeResult: """Validate if the issuer matches the controller of the verification method.""" try: result = super().validate( proof=proof, document=document, suite=suite, verification_method=verification_method, document_loader=document_loader, ) # Return early if super check was invalid if not result.valid: return result # FIXME: Other implementations don't expand, but # if we don't expand we can't get the property using # the full CREDENTIALS_ISSUER_URL. [expanded] = jsonld.expand( document, { "documentLoader": document_loader, }, ) issuer: List[dict] = JsonLdProcessor.get_values( expanded, CREDENTIALS_ISSUER_URL ) if len(issuer) == 0: raise LinkedDataProofException("Credential issuer is required.") controller_id = result.controller.get("id") issuer_id = issuer[0].get("@id") if controller_id != issuer_id: raise LinkedDataProofException( "Credential issuer must match the verification method controller." ) return result except Exception as e: return PurposeResult(valid=False, error=e)
async def _get_proofs(document: dict, proof_types: Union[List[str], None] = None) -> list: """Get proof set from document, optionally filtered by proof_types.""" proof_set = JsonLdProcessor.get_values(document, "proof") # If proof_types is present, only take proofs that match if proof_types: proof_set = list( filter(lambda _: _["type"] in proof_types, proof_set)) if len(proof_set) == 0: raise LinkedDataProofException( "No matching proofs found in the given document") # Shallow copy proofs and add document context or SECURITY_CONTEXT_URL context = document.get("@context") or SECURITY_CONTEXT_URL proof_set = [{"@context": context, **proof} for proof in proof_set] return proof_set
def sign(to_sign, private_key, options, chain_name='mainnet'): import copy copy = copy.deepcopy(to_sign) if 'signature' in copy: del copy['signature'] # normalize and get data to hash normalized = normalize_jsonld(to_sign) to_hash = _getDataToHash(normalized, options=options) # TODO: obtain lock while modifying global state bitcoin.SelectParams(chain_name) message = BitcoinMessage(to_hash) secret_key = CBitcoinSecret(private_key) signature = SignMessage(secret_key, message) # compact just signature part against all contexts signature_payload = { '@context': SECURITY_CONTEXT_URL, 'type': algorithm, 'creator': options.creator, 'created': options.created, 'signatureValue': signature.decode('utf-8') } tmp = {'https://w3id.org/security#signature': signature_payload} prev_contexts = JsonLdProcessor.get_values(to_sign, '@context') if not SECURITY_CONTEXT_URL in prev_contexts: prev_contexts.append(SECURITY_CONTEXT_URL) c = {'@context': prev_contexts} res = jsonld.compact(tmp, c, options={'documentLoader': cached_document_loader}) copy['@context'] = prev_contexts copy['signature'] = res['signature'] return copy
def get_values(self, property): return JsonLdProcessor.get_values(self, property)
def validate( self, *, proof: dict, document: dict, suite: "LinkedDataProof", verification_method: dict, document_loader: DocumentLoaderMethod, ) -> PurposeResult: """Validate whether verification method of proof is authorized by controller.""" try: result = super().validate( proof=proof, document=document, suite=suite, verification_method=verification_method, document_loader=document_loader, ) # Return early if super check was invalid if not result.valid: return result verification_id = verification_method.get("id") controller = verification_method.get("controller") if isinstance(controller, dict): controller_id = controller.get("id") elif isinstance(controller, str): controller_id = controller else: raise LinkedDataProofException( '"controller" must be a string or dict') # Get the controller result.controller = jsonld.frame( controller_id, frame={ "@context": SECURITY_CONTEXT_URL, "id": controller_id, self.term: { "@embed": "@never", "id": verification_id }, }, options={ "documentLoader": document_loader, "expandContext": SECURITY_CONTEXT_URL, # if we don't set base explicitly it will remove the base in returned # document (e.g. use key:z... instead of did:key:z...) # same as compactToRelative in jsonld.js "base": None, }, ) # Retrieve al verification methods on controller associated with term verification_methods = JsonLdProcessor.get_values( result.controller, self.term) # Check if any of the verification methods matches with the verification id result.valid = any(method == verification_id for method in verification_methods) if not result.valid: raise LinkedDataProofException( f"Verification method {verification_id} not authorized" f" by controller for proof purpose {self.term}") return result except Exception as e: return PurposeResult(valid=False, error=e)
async def store_credential(self, cred_ex_record: V20CredExRecord, cred_id: str = None) -> None: """Store linked data proof credential.""" # Get attachment data cred_dict: dict = cred_ex_record.cred_issue.attachment( LDProofCredFormatHandler.format) # Deserialize objects credential = VerifiableCredential.deserialize(cred_dict, unknown=INCLUDE) # Get signature suite, proof purpose and document loader suite = await self._get_suite(proof_type=credential.proof.type) purpose = self._get_proof_purpose( proof_purpose=credential.proof.proof_purpose, challenge=credential.proof.challenge, domain=credential.proof.domain, ) document_loader = self.profile.inject(DocumentLoader) # Verify the credential result = await verify_credential( credential=cred_dict, suites=[suite], document_loader=document_loader, purpose=purpose, ) if not result.verified: raise V20CredFormatError(f"Received invalid credential: {result}") # Saving expanded type as a cred_tag expanded = jsonld.expand(cred_dict) types = JsonLdProcessor.get_values( expanded[0], "@type", ) # create VC record for storage vc_record = VCRecord( contexts=credential.context_urls, expanded_types=types, issuer_id=credential.issuer_id, subject_ids=credential.credential_subject_ids, schema_ids=[], # Schemas not supported yet proof_types=[credential.proof.type], cred_value=credential.serialize(), given_id=credential.id, record_id=cred_id, cred_tags=None, # Tags should be derived from credential values ) # Create detail record with cred_id_stored detail_record = V20CredExRecordLDProof( cred_ex_id=cred_ex_record.cred_ex_id, cred_id_stored=vc_record.record_id) # save credential and detail record async with self.profile.session() as session: vc_holder = session.inject(VCHolder) await vc_holder.store_credential(vc_record) # Store detail record, emit event await detail_record.save(session, reason="store credential v2.0", event=True)