def create_or_update_listing(cls, listing_data): """ Creates a new or updates an existing Listing row in the database. """ listing_obj = Listing.query.filter_by( contract_address=listing_data['contract_address']).first() # Load IPFS data. Note: we filter out pictures since those should # not get persisted in the database. listing_data['ipfs_data'] = \ IPFSHelper().file_from_hash(listing_data['ipfs_hash'], root_attr='data', exclude_fields=['pictures']) if not listing_obj: # No existing Listing in the DB. # Download content from IPFS then insert new row in the DB. listing_obj = Listing(**listing_data) db.session.add(listing_obj) else: # Update existing Listing in the DB. if listing_obj.ipfs_hash != listing_data['ipfs_hash']: listing_obj.ipfs_hash = listing_data['ipfs_hash'] listing_obj.ipfs_data = listing_data['ipfs_data'] listing_obj.price = listing_data['price'] listing_obj.units = listing_data['units'] db.session.commit() return listing_obj
def verify_twitter(oauth_verifier, eth_address): ipfs_helper = IPFSHelper() # Verify authenticity of user if 'request_token' not in session: raise TwitterVerificationError('Session not found.') oauth = OAuth1(settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET, session['request_token']['oauth_token'], session['request_token']['oauth_token_secret'], verifier=oauth_verifier) response = requests.post(url=twitter_access_token_url, auth=oauth) try: response.raise_for_status() except requests.exceptions.HTTPError as exc: logger.exception(exc) raise TwitterVerificationError( 'The verifier you provided is invalid.') query_string = urllib.parse.parse_qs(response.content) screen_name = query_string[b'screen_name'][0].decode('utf-8') ipfs_hash = ipfs_helper.add_json({ 'schemaId': 'https://schema.originprotocol.com/twitter-attestation_1.0.0.json', 'screen_name': screen_name }) signature = attestations.generate_signature(signing_key, eth_address, TOPICS['twitter'], base58_to_hex(ipfs_hash)) attestation = Attestation(method=AttestationTypes.TWITTER, eth_address=eth_address, value=screen_name, signature=signature, remote_ip_address=request.remote_addr) db.session.add(attestation) db.session.commit() return VerificationServiceResponse({ 'signature': signature, 'claim_type': TOPICS['twitter'], 'data': ipfs_hash })
def _scan_listings(dry_run): """ Pins IPFS hashes with an associated listing and unpins hashes without one. """ logging.info("started") ipfs_helper = IPFSHelper() pinned_ipfs_hashes = set(ipfs_helper.directly_pinned_hashes()) logging.info("currently pinned hashes: %s", pinned_ipfs_hashes) listing_ipfs_hashes = _ipfs_hashes_for_listings() hashes_to_pin = listing_ipfs_hashes - pinned_ipfs_hashes hashes_to_unpin = pinned_ipfs_hashes - listing_ipfs_hashes # pin content that belongs to a listing and isn't already hashed logging.info("hashes to pin: %s", hashes_to_pin) if hashes_to_pin and not dry_run: pinned_hashes = set(ipfs_helper.pin_hashes(*hashes_to_pin)['Pins']) failed_hashes = pinned_hashes - hashes_to_pin if failed_hashes: logging.warning("failed to pin hashes %s", failed_hashes) # unpin content that doesn't belong to a listing # # TODO(cuongdo): Add a grace period for unpinning, so that we don't # potentially unpin content that's associated with new listings. Note that # unpinning allows GC to *potentially* happen. Once that happens, it's a race # between the IPFS GC and the next run of this tool. logging.info("hashes to unpin: %s", hashes_to_unpin) if hashes_to_unpin and not dry_run: unpinned_hashes = set( ipfs_helper.unpin_hashes( *hashes_to_unpin)['Pins']) failed_hashes = unpinned_hashes - hashes_to_unpin if failed_hashes: logging.warning("failed to unpin hashes %s", failed_hashes) logging.info("finished")
def verify_airbnb(eth_address, airbnbUserId): ipfs_helper = IPFSHelper() validate_airbnb_user_id(airbnbUserId) code = get_airbnb_verification_code(eth_address, airbnbUserId) try: # TODO: determine if this user agent is acceptable. # We need to set an user agent otherwise Airbnb returns 403 response = urlopen( Request( url='https://www.airbnb.com/users/show/' + airbnbUserId, headers={'User-Agent': 'Origin Protocol client-0.1.0'})) except HTTPError as e: if e.code == 404: raise AirbnbVerificationError('Airbnb user id: ' + airbnbUserId + ' not found.') else: raise AirbnbVerificationError( "Can not fetch user's Airbnb profile.") except URLError as e: raise AirbnbVerificationError( "Can not fetch user's Airbnb profile.") if code not in response.read().decode('utf-8'): raise AirbnbVerificationError( "Origin verification code: " + code + " has not been found in user's Airbnb profile.") ipfs_hash = ipfs_helper.add_json({ 'schemaId': 'https://schema.originprotocol.com/airbnb-attestation_1.0.0.json', 'airbnb_user_id': airbnbUserId }) """ - IPFS hash is a base58 encoded string - We store IPFS hashes in solidity claims in bytes32 binary format to minimise gas cost. - bytes32 is not string serialisable so it can not be transmitted in that form from bridge to the DApp - bridge needs to transform ipfs hash to bytes32 format (that is how it is going to be stored in the contract) before signing the claim, and then send IPFS hash to the DApp in base58 string encoding. - this way claim has a correct signature if IPFS hash has bytes32 hex encoding - the DApp takes signature and other claim info and transforms the base58 encoded IPFS hash to base32 hex before submitting the claim to web3. """ signature = attestations.generate_signature(signing_key, eth_address, TOPICS['airbnb'], base58_to_hex(ipfs_hash)) attestation = Attestation(method=AttestationTypes.AIRBNB, eth_address=eth_address, value=airbnbUserId, signature=signature, remote_ip_address=request.remote_addr) db.session.add(attestation) db.session.commit() return VerificationServiceResponse({ 'signature': signature, 'claim_type': TOPICS['airbnb'], 'data': ipfs_hash })
def get_listing_picture(listing_obj, index=0): data = IPFSHelper().file_from_hash(listing_obj.ipfs_hash, root_attr='data') if data: pictures = data.get('pictures') if isinstance(pictures, list) and len(pictures) > index: return pictures[index]