def generate_signed_legacy(address: bytes, output: Output): decoded_address = Base58.decode(address) claim = OldClaimMessage() claim.ParseFromString( unhexlify( '080110011aee04080112a604080410011a2b4865726520617265203520526561736f6e73204920e29da4e' 'fb88f204e657874636c6f7564207c20544c4722920346696e64206f7574206d6f72652061626f7574204e' '657874636c6f75643a2068747470733a2f2f6e657874636c6f75642e636f6d2f0a0a596f752063616e206' '6696e64206d65206f6e20746865736520736f6369616c733a0a202a20466f72756d733a2068747470733a' '2f2f666f72756d2e6865617679656c656d656e742e696f2f0a202a20506f64636173743a2068747470733' 'a2f2f6f6666746f706963616c2e6e65740a202a2050617472656f6e3a2068747470733a2f2f7061747265' '6f6e2e636f6d2f7468656c696e757867616d65720a202a204d657263683a2068747470733a2f2f7465657' '37072696e672e636f6d2f73746f7265732f6f6666696369616c2d6c696e75782d67616d65720a202a2054' '77697463683a2068747470733a2f2f7477697463682e74762f786f6e64616b0a202a20547769747465723' 'a2068747470733a2f2f747769747465722e636f6d2f7468656c696e757867616d65720a0a2e2e2e0a6874' '7470733a2f2f7777772e796f75747562652e636f6d2f77617463683f763d4672546442434f535f66632a0' 'f546865204c696e75782047616d6572321c436f7079726967687465642028636f6e746163742061757468' '6f722938004a2968747470733a2f2f6265726b2e6e696e6a612f7468756d626e61696c732f46725464424' '34f535f666352005a001a41080110011a30040e8ac6e89c061f982528c23ad33829fd7146435bf7a4cc22' 'f0bff70c4fe0b91fd36da9a375e3e1c171db825bf5d1f32209766964656f2f6d70342a5c080110031a406' '2b2dd4c45e364030fbfad1a6fefff695ebf20ea33a5381b947753e2a0ca359989a5cc7d15e5392a0d354c' '0b68498382b2701b22c03beb8dcb91089031b871e72214feb61536c007cdf4faeeaab4876cb397feaf6b51' )) claim.ClearField("publisherSignature") digest = sha256(b''.join( [decoded_address, claim.SerializeToString(), output.claim_hash[::-1]])) signature = output.private_key.sign_digest_deterministic( digest, hashfunc=hashlib.sha256) claim.publisherSignature.version = 1 claim.publisherSignature.signatureType = 1 claim.publisherSignature.signature = signature claim.publisherSignature.certificateId = output.claim_hash[::-1] return claim
def get_signature_digest(self, ledger): if self.signable.unsigned_payload: pieces = [ Base58.decode(self.get_address(ledger)), self.signable.unsigned_payload, self.signable.signing_channel_hash[::-1] ] else: pieces = [ self.tx_ref.tx.inputs[0].txo_ref.hash, self.signable.signing_channel_hash, self.signable.to_message_bytes() ] return sha256(b''.join(pieces))
def address_to_hash160(address): return Base58.decode(address)[1:21]
def expand_query(**kwargs): if "amount_order" in kwargs: kwargs["limit"] = 1 kwargs["order_by"] = "effective_amount" kwargs["offset"] = int(kwargs["amount_order"]) - 1 if 'name' in kwargs: kwargs['name'] = normalize_name(kwargs.pop('name')) if kwargs.get('is_controlling') is False: kwargs.pop('is_controlling') query = {'must': [], 'must_not': []} collapse = None for key, value in kwargs.items(): key = key.replace('claim.', '') many = key.endswith('__in') or isinstance(value, list) if many: key = key.replace('__in', '') value = list(filter(None, value)) if value is None or isinstance(value, list) and len(value) == 0: continue key = REPLACEMENTS.get(key, key) if key in FIELDS: partial_id = False if key == 'claim_type': if isinstance(value, str): value = CLAIM_TYPES[value] else: value = [CLAIM_TYPES[claim_type] for claim_type in value] if key == '_id': if isinstance(value, Iterable): value = [item[::-1].hex() for item in value] else: value = value[::-1].hex() if not many and key in ('_id', 'claim_id') and len(value) < 20: partial_id = True if key == 'public_key_id': key = 'public_key_hash' value = Base58.decode(value)[1:21].hex() if key == 'signature_valid': continue # handled later if key in TEXT_FIELDS: key += '.keyword' ops = {'<=': 'lte', '>=': 'gte', '<': 'lt', '>': 'gt'} if partial_id: query['must'].append({"prefix": {"claim_id": value}}) elif key in RANGE_FIELDS and isinstance(value, str) and value[0] in ops: operator_length = 2 if value[:2] in ops else 1 operator, value = value[:operator_length], value[operator_length:] if key == 'fee_amount': value = str(Decimal(value)*1000) query['must'].append({"range": {key: {ops[operator]: value}}}) elif many: query['must'].append({"terms": {key: value}}) else: if key == 'fee_amount': value = str(Decimal(value)*1000) query['must'].append({"term": {key: {"value": value}}}) elif key == 'not_channel_ids': for channel_id in value: query['must_not'].append({"term": {'channel_id.keyword': channel_id}}) query['must_not'].append({"term": {'_id': channel_id}}) elif key == 'channel_ids': query['must'].append({"terms": {'channel_id.keyword': value}}) elif key == 'claim_ids': query['must'].append({"terms": {'claim_id.keyword': value}}) elif key == 'media_types': query['must'].append({"terms": {'media_type.keyword': value}}) elif key == 'stream_types': query['must'].append({"terms": {'stream_type': [STREAM_TYPES[stype] for stype in value]}}) elif key == 'any_languages': query['must'].append({"terms": {'languages': clean_tags(value)}}) elif key == 'any_languages': query['must'].append({"terms": {'languages': value}}) elif key == 'all_languages': query['must'].extend([{"term": {'languages': tag}} for tag in value]) elif key == 'any_tags': query['must'].append({"terms": {'tags.keyword': clean_tags(value)}}) elif key == 'all_tags': query['must'].extend([{"term": {'tags.keyword': tag}} for tag in clean_tags(value)]) elif key == 'not_tags': query['must_not'].extend([{"term": {'tags.keyword': tag}} for tag in clean_tags(value)]) elif key == 'not_claim_id': query['must_not'].extend([{"term": {'claim_id.keyword': cid}} for cid in value]) elif key == 'limit_claims_per_channel': collapse = ('channel_id.keyword', value) if kwargs.get('has_channel_signature'): query['must'].append({"exists": {"field": "signature_digest"}}) if 'signature_valid' in kwargs: query['must'].append({"term": {"signature_valid": bool(kwargs["signature_valid"])}}) elif 'signature_valid' in kwargs: query.setdefault('should', []) query["minimum_should_match"] = 1 query['should'].append({"bool": {"must_not": {"exists": {"field": "signature_digest"}}}}) query['should'].append({"term": {"signature_valid": bool(kwargs["signature_valid"])}}) if kwargs.get('text'): query['must'].append( {"simple_query_string": {"query": kwargs["text"], "fields": [ "claim_name^4", "channel_name^8", "title^1", "description^.5", "author^1", "tags^.5" ]}}) query = { "_source": {"excludes": ["description", "title"]}, 'query': {'bool': query}, "sort": [], } if "limit" in kwargs: query["size"] = kwargs["limit"] if 'offset' in kwargs: query["from"] = kwargs["offset"] if 'order_by' in kwargs: if isinstance(kwargs["order_by"], str): kwargs["order_by"] = [kwargs["order_by"]] for value in kwargs['order_by']: if 'trending_group' in value: # fixme: trending_mixed is 0 for all records on variable decay, making sort slow. continue is_asc = value.startswith('^') value = value[1:] if is_asc else value value = REPLACEMENTS.get(value, value) if value in TEXT_FIELDS: value += '.keyword' query['sort'].append({value: "asc" if is_asc else "desc"}) if collapse: query["collapse"] = { "field": collapse[0], "inner_hits": { "name": collapse[0], "size": collapse[1], "sort": query["sort"] } } return query
def address(self, address: str): self.address_bytes = Base58.decode(address)