def modify_extended_asset_info(asset, description): """adds an asset to asset_extended_info collection if the description is a valid json link. or, if the link is not a valid json link, will remove the asset entry from the table if it exists""" if util.is_valid_url(description, suffix='.json', allow_no_protocol=True): config.mongo_db.asset_extended_info.update( {'asset': asset}, { '$set': { 'info_url': description, 'info_status': 'needfetch', 'fetch_info_retry': 0, # retry ASSET_MAX_RETRY times to fetch info from info_url 'info_data': {}, 'errors': [] } }, upsert=True) #^ valid info_status settings: needfetch, valid, invalid, error # additional fields will be added later in events, once the asset info is pulled else: config.mongo_db.asset_extended_info.remove({'asset': asset}) # remove any saved asset image data imagePath = os.path.join(config.data_dir, config.SUBDIR_ASSET_IMAGES, asset + '.png') if os.path.exists(imagePath): os.remove(imagePath)
def parse_broadcast(msg, msg_data): if msg["category"] != "broadcasts": return save = False feed = config.mongo_db.feeds.find_one({"source": msg_data["source"]}) if util.is_valid_url(msg_data["text"], allow_no_protocol=True) and msg_data["value"] == -1.0: if feed is None: feed = {} feed["source"] = msg_data["source"] feed["info_url"] = msg_data["text"] feed["info_status"] = "needfetch" # needfetch, valid (included in CW feed directory), invalid, error feed["fetch_info_retry"] = 0 # retry FEED_MAX_RETRY times to fetch info from info_url feed["info_data"] = {} feed["fee_fraction_int"] = msg_data["fee_fraction_int"] feed["locked"] = False feed["last_broadcast"] = {} feed["errors"] = [] save = True elif feed is not None: if msg_data["locked"]: feed["locked"] = True else: feed["last_broadcast"] = {"text": msg_data["text"], "value": msg_data["value"]} feed["fee_fraction_int"] = msg_data["fee_fraction_int"] save = True if save: config.mongo_db.feeds.save(feed) return save
def process_asset_info(asset, info_data): def sanitize_json_data(data): data['asset'] = util.sanitize_eliteness(data['asset']) if 'description' in data: data['description'] = util.sanitize_eliteness(data['description']) if 'website' in data: data['website'] = util.sanitize_eliteness(data['website']) if 'pgpsig' in data: data['pgpsig'] = util.sanitize_eliteness(data['pgpsig']) return data # sanity check assert asset['info_status'] == 'needfetch' assert 'info_url' in asset assert util.is_valid_url(asset['info_url'], allow_no_protocol=True) #already validated in the fetch errors = util.is_valid_json(info_data, config.ASSET_SCHEMA) if not isinstance(info_data, dict) or 'asset' not in info_data: errors.append('Invalid data format') elif asset['asset'] != info_data['asset']: errors.append('asset field does not match asset name') if len(errors) > 0: inc_fetch_retry(asset, new_status='invalid', errors=errors) return (False, errors) asset['info_status'] = 'valid' #fetch any associated images... #TODO: parallelize this 2nd level asset image fetching ... (e.g. just compose a list here, and process it in later on) if 'image' in info_data: info_data['valid_image'] = util.fetch_image(info_data['image'], config.SUBDIR_ASSET_IMAGES, asset['asset'], fetch_timeout=5) asset['info_data'] = sanitize_json_data(info_data) config.mongo_db.asset_extended_info.save(asset) return (True, None)
def parse_broadcast(db, message): save = False feed = db.feeds.find_one({'source': message['source']}) if util.is_valid_url(message['text'], allow_no_protocol=True) and message['value'] == -1.0: if feed is None: feed = {} feed['source'] = message['source'] feed['info_url'] = message['text'] feed[ 'info_status'] = 'needfetch' #needfetch, valid (included in CW feed directory), invalid, error feed[ 'fetch_info_retry'] = 0 # retry FEED_MAX_RETRY times to fetch info from info_url feed['info_data'] = {} feed['fee_fraction_int'] = message['fee_fraction_int'] feed['locked'] = False feed['last_broadcast'] = {} feed['errors'] = [] save = True elif feed is not None: if message['locked']: feed['locked'] = True else: feed['last_broadcast'] = { 'text': message['text'], 'value': message['value'] } feed['fee_fraction_int'] = message['fee_fraction_int'] save = True if save: db.feeds.save(feed) return True return False
def parse_broadcast(msg, msg_data): if msg['category'] != 'broadcasts': return save = False feed = config.mongo_db.feeds.find_one({'source': msg_data['source']}) if util.is_valid_url(msg_data['text'], allow_no_protocol=True) and msg_data['value'] == -1.0: if feed is None: feed = {} feed['source'] = msg_data['source'] feed['info_url'] = msg_data['text'] feed['info_status'] = 'needfetch' #needfetch, valid (included in CW feed directory), invalid, error feed['fetch_info_retry'] = 0 # retry FEED_MAX_RETRY times to fetch info from info_url feed['info_data'] = {} feed['fee_fraction_int'] = msg_data['fee_fraction_int'] feed['locked'] = False feed['last_broadcast'] = {} feed['errors'] = [] save = True elif feed is not None: if msg_data['locked']: feed['locked'] = True else: feed['last_broadcast'] = { 'text': msg_data['text'], 'value': msg_data['value'] } feed['fee_fraction_int'] = msg_data['fee_fraction_int'] save = True if save: config.mongo_db.feeds.save(feed) return save
def process_feed_info(feed, info_data): # sanity check assert feed['info_status'] == 'needfetch' assert 'info_url' in feed assert util.is_valid_url( feed['info_url'], allow_no_protocol=True) # already validated in the fetch errors = util.is_valid_json(info_data, config.FEED_SCHEMA) if not isinstance(info_data, dict) or 'address' not in info_data: errors.append('Invalid data format') elif feed['source'] != info_data['address']: errors.append('Invalid address') if len(errors) > 0: inc_fetch_retry(feed, new_status='invalid', errors=errors) return (False, errors) feed['info_status'] = 'valid' # fetch any associated images... # TODO: parallelize this 2nd level feed image fetching ... (e.g. just compose a list here, and process it in later on) if 'image' in info_data: info_data['valid_image'] = util.fetch_image( info_data['image'], config.SUBDIR_FEED_IMAGES, feed['source'] + '_topic', fetch_timeout=5) if 'operator' in info_data and 'image' in info_data['operator']: info_data['operator']['valid_image'] = util.fetch_image( info_data['operator']['image'], config.SUBDIR_FEED_IMAGES, feed['source'] + '_owner', fetch_timeout=5) if 'targets' in info_data: for i in range(len(info_data['targets'])): if 'image' in info_data['targets'][i]: image_name = feed['source'] + '_tv_' + str( info_data['targets'][i]['value']) info_data['targets'][i]['valid_image'] = util.fetch_image( info_data['targets'][i]['image'], config.SUBDIR_FEED_IMAGES, image_name, fetch_timeout=5) feed['info_data'] = sanitize_json_data(info_data) config.mongo_db.feeds.save(feed) return (True, None)
def process_asset_info(asset, info_data): def sanitize_json_data(data): data['asset'] = util.sanitize_eliteness(data['asset']) if 'description' in data: data['description'] = util.sanitize_eliteness(data['description']) if 'website' in data: data['website'] = util.sanitize_eliteness(data['website']) if 'pgpsig' in data: data['pgpsig'] = util.sanitize_eliteness(data['pgpsig']) return data # sanity check assert asset['info_status'] == 'needfetch' assert 'info_url' in asset assert util.is_valid_url( asset['info_url'], allow_no_protocol=True) # already validated in the fetch errors = util.is_valid_json(info_data, config.ASSET_SCHEMA) if not isinstance(info_data, dict) or 'asset' not in info_data: errors.append('Invalid data format') elif asset['asset'] != info_data['asset']: errors.append('asset field does not match asset name') if len(errors) > 0: inc_fetch_retry(asset, new_status='invalid', errors=errors) return (False, errors) asset['info_status'] = 'valid' # fetch any associated images... # TODO: parallelize this 2nd level asset image fetching ... (e.g. just compose a list here, and process it in later on) if 'image' in info_data: info_data['valid_image'] = util.fetch_image(info_data['image'], config.SUBDIR_ASSET_IMAGES, asset['asset'], fetch_timeout=5) asset['info_data'] = sanitize_json_data(info_data) config.mongo_db.asset_extended_info.save(asset) return (True, None)
def modify_extended_asset_info(asset, description): """adds an asset to asset_extended_info collection if the description is a valid json link. or, if the link is not a valid json link, will remove the asset entry from the table if it exists""" if util.is_valid_url(description, suffix='.json', allow_no_protocol=True): config.mongo_db.asset_extended_info.update({'asset': asset}, {'$set': { 'info_url': description, 'info_status': 'needfetch', 'fetch_info_retry': 0, # retry ASSET_MAX_RETRY times to fetch info from info_url 'info_data': {}, 'errors': [] }}, upsert=True) #^ valid info_status settings: needfetch, valid, invalid, error #additional fields will be added later in events, once the asset info is pulled else: config.mongo_db.asset_extended_info.remove({ 'asset': asset }) #remove any saved asset image data imagePath = os.path.join(config.data_dir, config.SUBDIR_ASSET_IMAGES, asset + '.png') if os.path.exists(imagePath): os.remove(imagePath)
def process_feed_info(feed, info_data): # sanity check assert feed["info_status"] == "needfetch" assert "info_url" in feed assert util.is_valid_url(feed["info_url"], allow_no_protocol=True) # already validated in the fetch errors = util.is_valid_json(info_data, config.FEED_SCHEMA) if not isinstance(info_data, dict) or "address" not in info_data: errors.append("Invalid data format") elif feed["source"] != info_data["address"]: errors.append("Invalid address") if len(errors) > 0: inc_fetch_retry(feed, new_status="invalid", errors=errors) return (False, errors) feed["info_status"] = "valid" # fetch any associated images... # TODO: parallelize this 2nd level feed image fetching ... (e.g. just compose a list here, and process it in later on) if "image" in info_data: info_data["valid_image"] = util.fetch_image( info_data["image"], config.SUBDIR_FEED_IMAGES, feed["source"] + "_topic", fetch_timeout=5 ) if "operator" in info_data and "image" in info_data["operator"]: info_data["operator"]["valid_image"] = util.fetch_image( info_data["operator"]["image"], config.SUBDIR_FEED_IMAGES, feed["source"] + "_owner", fetch_timeout=5 ) if "targets" in info_data: for i in range(len(info_data["targets"])): if "image" in info_data["targets"][i]: image_name = feed["source"] + "_tv_" + str(info_data["targets"][i]["value"]) info_data["targets"][i]["valid_image"] = util.fetch_image( info_data["targets"][i]["image"], config.SUBDIR_FEED_IMAGES, image_name, fetch_timeout=5 ) feed["info_data"] = sanitize_json_data(info_data) config.mongo_db.feeds.save(feed) return (True, None)
def process_feed_info(feed, info_data): # sanity check assert feed['info_status'] == 'needfetch' assert 'info_url' in feed assert util.is_valid_url(feed['info_url'], allow_no_protocol=True) #already validated in the fetch errors = util.is_valid_json(info_data, config.FEED_SCHEMA) if not isinstance(info_data, dict) or 'address' not in info_data: errors.append('Invalid data format') elif feed['source'] != info_data['address']: errors.append('Invalid address') if len(errors) > 0: inc_fetch_retry(feed, new_status='invalid', errors=errors) return (False, errors) feed['info_status'] = 'valid' #fetch any associated images... #TODO: parallelize this 2nd level feed image fetching ... (e.g. just compose a list here, and process it in later on) if 'image' in info_data: info_data['valid_image'] = util.fetch_image(info_data['image'], config.SUBDIR_FEED_IMAGES, feed['source'] + '_topic', fetch_timeout=5) if 'operator' in info_data and 'image' in info_data['operator']: info_data['operator']['valid_image'] = util.fetch_image(info_data['operator']['image'], config.SUBDIR_FEED_IMAGES, feed['source'] + '_owner', fetch_timeout=5) if 'targets' in info_data: for i in range(len(info_data['targets'])): if 'image' in info_data['targets'][i]: image_name = feed['source'] + '_tv_' + str(info_data['targets'][i]['value']) info_data['targets'][i]['valid_image'] = util.fetch_image( info_data['targets'][i]['image'], config.SUBDIR_FEED_IMAGES, image_name, fetch_timeout=5) feed['info_data'] = sanitize_json_data(info_data) config.mongo_db.feeds.save(feed) return (True, None)