def sync_listings(profile): listing_url = OB_HOST + 'listings/' + profile.peerID try: from ob.tasks.sync_listing import sync_listing as sync_listing_deep response = get(listing_url) if response.status_code == 200: try: listings_data = json.loads(response.content.decode('utf-8')) for data in listings_data: listing = parse_listing_fast(data, profile) listing.save() sync_listing_deep(listing) except json.decoder.JSONDecodeError: logger.info( "Problem decoding json for listings of peer: " + profile.peerID ) else: code = response.status_code logger.debug('{} fetching {}'.format(code, listing_url)) except (ReadTimeout, ConnectionError): logger.info("listing peerID " + profile.peerID + " timeout")
def sync_one_rating(rating_url, profile): response = get(rating_url) if response.status_code == 200: logger.debug('BEGIN rating sync ' + profile.peerID) try: rating_data = json.loads(response.content.decode('utf-8')) listing_slug, r_pk = get_listing_slug(rating_data) try: listing = Listing.objects.get(profile=profile, slug=listing_slug) try: update_rating(profile, listing, r_pk, rating_data) except IntegrityError as err: logger.debug("DB Integrity Error: {0}".format(err)) # listing.save() except Listing.DoesNotExist: logger.debug("Ignoring rating for listing we don't have:" "{peer}" "{slug}".format(peer=profile.peerID, slug=listing_slug)) except TypeError: logger.debug("ignoring rating") except json.decoder.JSONDecodeError: logger.debug("Problem decoding json for listings of peer: " + profile.peerID)
def get_profile_connection(profile): peer_info_url = OB_INFO_URL + profile.peerID peer_info_response = get(peer_info_url) if peer_info_response.status_code == 200: add_data = json.loads(peer_info_response.content.decode('utf-8')) get_profile_address_type(add_data, profile) else: code = peer_info_response.status_code logger.debug("{} fetching {}".format(code, peer_info_url))
def get_neighbors(self): try: closestpeers_url = OB_HOST + 'closestpeers/' + self.peerID peer_response = get(closestpeers_url) if peer_response.status_code == 200: peer_data = json.loads(peer_response.content.decode('utf-8')) return peer_data except IndexError: logger.info('index error getting address')
def ping(self): try: health_url = OB_HOST + 'status/' + self.peerID peer_response = get(health_url, timeout=5) try: return json.loads(peer_response.content.decode( 'utf-8'))['status'] == 'online' except: return False except: return False
def get_user_agent(peer_id): user_agent_url = IPNS_HOST + peer_id + '/user_agent' try: user_agent_response = get(user_agent_url) if user_agent_response.status_code == 200: ua = user_agent_response.content.decode('utf-8') else: ua = 'Error : {}'.format(user_agent_response.status_code) return ua except (ReadTimeout, ConnectionError): return 'Error : timeout' except ObNodeSSLError: return 'Error : ob cert error'
def get_seralized_record(self): try: profile_url = OB_HOST + self.peerID peer_response = get(profile_url) if peer_response.status_code == 200: peer_data = json.loads(peer_response.content.decode('utf-8')) return peer_data['serializedRecord'] else: logger.info( 'Error getting seralized record {}'.format(peer_response)) except IndexError: logger.info('index error getting serialized record')
def sync_listing(listing, force=True): listing_detail_url = listing.get_sync_url() logger.debug(listing.profile_id + " sync : " + listing.slug) try: r = get(listing_detail_url) if r.status_code == 200: data = json.loads(r.content.decode('utf-8')) parse_listing(listing, data) else: logger.debug('Error: {}'.format(r.status_code)) except json.decoder.JSONDecodeError: logger.debug("bad json") except requests.exceptions.ReadTimeout: is_modified = Listing.objects.filter( slug=listing.slug, profile=listing.profile).update(attempt=now()) logger.debug("timeout")
def sync_ratings(profile): parser = lxmletree.HTMLParser() url = IPNS_HOST + profile.peerID + '/ratings/' try: r = get(url) tree = lxmletree.fromstring(r.content, parser) rating_files = tree.xpath('//table/tr/td[contains(.,"Qm")]/a/text()') ratings_urls = [url + f for f in rating_files] for rating_url in ratings_urls: try: sync_one_rating(rating_url, profile) except (ConnectionError, ReadTimeout): logger.debug("listing peerID " + profile.peerID + " timeout") for l in profile.listing_set.all(): l.save() except (ConnectionError, ReadTimeout): logger.info("read timeout on ratings")
def sync_profile(profile): profile_url = OB_HOST + 'profile/' + profile.peerID try: profile_response = get(profile_url) if profile_response.status_code == 200: profile_data = json.loads(profile_response.content.decode('utf-8')) new_speed_rank = profile_response.elapsed.microseconds profile.moving_average_speed(new_speed_rank) profile = parse_profile(profile, profile_data) if profile.vendor: sync_listings(profile) sync_ratings(profile) else: profile.listing_set.update(active=False) else: code = profile_response.status_code logger.debug('{} fetching {}'.format(code, profile_url)) profile.moving_average_speed(settings.CRAWL_TIMEOUT * 1e6) except json.decoder.JSONDecodeError: logger.warning("Problem decoding json for peer: " + profile.peerID) except ReadTimeout: profile.moving_average_speed(settings.CRAWL_TIMEOUT * 1e6)