def sync_with_db_new(feed_path): if SyncGit.validate_feed(feed_path): feed_data = SyncGit.parse_feed_file(feed_path) added_ip_count = len(feed_data.get("added_ip")) if added_ip_count: SyncGit.logger.info("Found %d new IP(s) in new file %s" % (added_ip_count, feed_path)) FeedsAlchemy.db_update_metatable(feed_data) FeedsAlchemy.db_update_added(feed_data)
def search_get(v: hug.types.text): """Search for an IP object in all available feeds. Input: HTTP GET with parameter containing a single IP address""" if General.validate_request(v): payload = [v] else: return {"errors": "Data validation error in '%s'" % v} return FeedsAlchemy.db_search(payload)
def search(body): """Search for a list of IP objects in all available feeds. Input: a string containing IP addresses separated by commas in HTTP POST body""" try: payload = body.read().decode("utf-8") except AttributeError: payload = body payload = payload.split(",") if isinstance(payload, list): for item in payload: if General.validate_request(item): pass else: return {"errors": "Data validation error in '%s'" % item} else: return {"errors": "Got an unrecognized structure"} return FeedsAlchemy.db_search(list(set(payload)))
def sync_with_db_diff(diff_serialized): diff = pickle.loads(diff_serialized) modified_feed_path = "%s/%s" % (SyncGit.repo_path, diff.target_file[2:]) if SyncGit.validate_feed(modified_feed_path): feed_diff_data = SyncGit.get_diff_data(diff, modified_feed_path) added_ip_count = len(feed_diff_data.get("added_ip")) removed_ip_count = len(feed_diff_data.get("removed_ip")) if added_ip_count or removed_ip_count: SyncGit.logger.info("Found %d new IP(s) and %d removed data item(s) in diff for file %s" % (added_ip_count, removed_ip_count, modified_feed_path)) FeedsAlchemy.db_update_metatable(feed_diff_data) if added_ip_count: FeedsAlchemy.db_update_added(feed_diff_data) if removed_ip_count: FeedsAlchemy.db_update_removed(feed_diff_data)
def maintainer_info(maintainer: hug.types.text): """Retrieve all available information about the maintainer by its name""" maintainer_lower = maintainer.lower() return FeedsAlchemy.db_maintainer_info(maintainer_lower)
def feed_info(feed_name: hug.types.text): """Retrieve all available information about the feed by its name""" feed_name_lower = feed_name.lower() return FeedsAlchemy.db_feed_info(feed_name_lower)
def feeds_maintainers(): """Retrieve all feed maintainers""" return FeedsAlchemy.db_feeds_maintainers()
def feeds_categories(): """Retrieve all feed categories""" return FeedsAlchemy.db_feeds_categories()
def feeds(): """Retrieve all information about feeds""" return FeedsAlchemy.db_feeds()
def ip_bulk_by_category(category: hug.types.text): """Retrieve all IP addresses that are in feeds by feed category""" category_lower = category.lower() return FeedsAlchemy.db_ip_bulk_by_category(category_lower)
def maintainers_by_category(category: hug.types.text): """Retrieve all maintainers by category""" category = category.lower() return FeedsAlchemy.db_maintainers_by_category(category)
import hug from modules.db_sync import FeedsAlchemy from modules.general import General FeedsAlchemy = FeedsAlchemy() General = General() General.logger.info("API instance successfully started") @hug.post("/search", output=hug.output_format.json, version=1) def search(body): """Search for a list of IP objects in all available feeds. Input: a string containing IP addresses separated by commas in HTTP POST body""" try: payload = body.read().decode("utf-8") except AttributeError: payload = body payload = payload.split(",") if isinstance(payload, list): for item in payload: if General.validate_request(item): pass else: return {"errors": "Data validation error in '%s'" % item}
def refresh_aggregated(): FeedsAlchemy.db_clear_aggregated() FeedsAlchemy.db_fill_aggregated()