Example #1
0
def test_ipfs(app):
    json_file = "tests/res/test_ipfs.json"
    ipfs_peer_host = app.config["ipfs"]["host"]
    ipfs_peer_port = app.config["ipfs"]["port"]

    # Instantiate IPFS client from src lib
    ipfsclient = IPFSClient(ipfs_peer_host, ipfs_peer_port, [])

    remove_test_file(json_file)
    api = ipfshttpclient.connect(
        f"/dns/{ipfs_peer_host}/tcp/{ipfs_peer_port}/http")

    # Create generic metadata object w/above IPFS multihash
    test_metadata_object = dict(track_metadata_format)
    test_metadata_object["title"] = chance.name()
    test_metadata_object["release_date"] = str(chance.date())
    test_metadata_object["file_type"] = "mp3"
    test_metadata_object["license"] = "HN"
    with open(json_file, "w") as f:
        json.dump(test_metadata_object, f)

    # Add metadata object to ipfs node
    json_res = api.add(json_file)
    metadata_hash = json_res["Hash"]

    # Invoke audius-ipfs
    ipfs_metadata = ipfsclient.get_metadata(metadata_hash,
                                            track_metadata_format)
    remove_test_file(json_file)

    # Confirm retrieved metadata object state
    for key in test_metadata_object:
        assert key in ipfs_metadata
        assert test_metadata_object[key] == ipfs_metadata[key]
Example #2
0
    def test_invalid_cid(self, app):
        ipfs_peer_host = app.config["ipfs"]["host"]
        ipfs_peer_port = app.config["ipfs"]["port"]

        # Instantiate IPFS client from src lib
        ipfsclient = IPFSClient(ipfs_peer_host, ipfs_peer_port, [])

        try:
            ipfsclient.multihash_is_directory('Qmfake')
        except Exception as e:
            assert 'invalid multihash' in str(e)
Example #3
0
    def test_valid_cid_not_directory(self, app):
        ipfs_peer_host = app.config["ipfs"]["host"]
        ipfs_peer_port = app.config["ipfs"]["port"]

        # Instantiate IPFS client from src lib
        ipfsclient = IPFSClient(ipfs_peer_host, ipfs_peer_port, [])

        # override the cat function in the ipfs api so it returns an Exception
        def cat_handler_not_dir(cid, s, e, timeout):
            return 'not a directory'

        ipfsclient._api.cat = cat_handler_not_dir

        # Invoke audius-ipfs with a invalid cid, verify that it throws an 'invalid multihash' error
        is_directory = ipfsclient.multihash_is_directory(
            'QmVmEZnQr49gDtd7xpcsNdmgrtRTT5Te2x27KbTRPNapqy')
        assert is_directory is False
Example #4
0
def configure_celery(flask_app, celery, test_config=None):
    database_url = shared_config["db"]["url"]
    engine_args_literal = ast.literal_eval(
        shared_config["db"]["engine_args_literal"])
    redis_url = shared_config["redis"]["url"]

    if test_config is not None:
        if "db" in test_config:
            if "url" in test_config["db"]:
                database_url = test_config["db"]["url"]

    ipld_interval = int(
        shared_config["discprov"]["blacklist_block_indexing_interval"])
    # default is 5 seconds
    indexing_interval_sec = int(
        shared_config["discprov"]["block_processing_interval_sec"])

    # Update celery configuration
    celery.conf.update(
        imports=[
            "src.tasks.index", "src.tasks.index_blacklist",
            "src.tasks.index_plays", "src.tasks.index_metrics",
            "src.tasks.index_materialized_views",
            "src.tasks.index_network_peers", "src.tasks.index_trending",
            "src.tasks.cache_user_balance", "src.monitors.monitoring_queue",
            "src.tasks.cache_trending_playlists",
            "src.tasks.index_solana_plays", "src.tasks.index_aggregate_views"
        ],
        beat_schedule={
            "update_discovery_provider": {
                "task": "update_discovery_provider",
                "schedule": timedelta(seconds=indexing_interval_sec),
            },
            "update_ipld_blacklist": {
                "task": "update_ipld_blacklist",
                "schedule": timedelta(seconds=ipld_interval),
            },
            "update_play_count": {
                "task": "update_play_count",
                "schedule": timedelta(seconds=60)
            },
            "update_metrics": {
                "task": "update_metrics",
                "schedule": crontab(minute=0, hour="*")
            },
            "aggregate_metrics": {
                "task": "aggregate_metrics",
                "schedule": timedelta(minutes=METRICS_INTERVAL)
            },
            "synchronize_metrics": {
                "task": "synchronize_metrics",
                "schedule": crontab(minute=0, hour=1)
            },
            "update_materialized_views": {
                "task": "update_materialized_views",
                "schedule": timedelta(seconds=300)
            },
            "update_network_peers": {
                "task": "update_network_peers",
                "schedule": timedelta(seconds=30)
            },
            "index_trending": {
                "task": "index_trending",
                "schedule": crontab(minute=15, hour="*")
            },
            "update_user_balances": {
                "task": "update_user_balances",
                "schedule": timedelta(seconds=60)
            },
            "monitoring_queue": {
                "task": "monitoring_queue",
                "schedule": timedelta(seconds=60)
            },
            "cache_trending_playlists": {
                "task": "cache_trending_playlists",
                "schedule": timedelta(minutes=30)
            },
            "index_solana_plays": {
                "task": "index_solana_plays",
                "schedule": timedelta(seconds=5)
            },
            "update_aggregate_user": {
                "task": "update_aggregate_user",
                "schedule": timedelta(seconds=30)
            },
            "update_aggregate_track": {
                "task": "update_aggregate_track",
                "schedule": timedelta(seconds=30)
            },
            "update_aggregate_playlist": {
                "task": "update_aggregate_playlist",
                "schedule": timedelta(seconds=30)
            }
        },
        task_serializer="json",
        accept_content=["json"],
        broker_url=redis_url,
    )

    # Initialize DB object for celery task context
    db = SessionManager(database_url, engine_args_literal)
    logger.info('Database instance initialized!')
    # Initialize IPFS client for celery task context
    ipfs_client = IPFSClient(shared_config["ipfs"]["host"],
                             shared_config["ipfs"]["port"])

    # Initialize Redis connection
    redis_inst = redis.Redis.from_url(url=redis_url)
    # Clear existing locks used in tasks if present
    redis_inst.delete("disc_prov_lock")
    redis_inst.delete("network_peers_lock")
    redis_inst.delete("materialized_view_lock")
    redis_inst.delete("update_metrics_lock")
    redis_inst.delete("update_play_count_lock")
    redis_inst.delete("ipld_blacklist_lock")
    redis_inst.delete("update_discovery_lock")
    redis_inst.delete("aggregate_metrics_lock")
    redis_inst.delete("synchronize_metrics_lock")
    logger.info('Redis instance initialized!')

    # Initialize custom task context with database object
    class DatabaseTask(Task):
        def __init__(self, *args, **kwargs):
            self._db = db
            self._web3_provider = web3
            self._abi_values = abi_values
            self._shared_config = shared_config
            self._ipfs_client = ipfs_client
            self._redis = redis_inst
            self._eth_web3_provider = eth_web3
            self._solana_client = solana_client

        @property
        def abi_values(self):
            return self._abi_values

        @property
        def web3(self):
            return self._web3_provider

        @property
        def db(self):
            return self._db

        @property
        def shared_config(self):
            return self._shared_config

        @property
        def ipfs_client(self):
            return self._ipfs_client

        @property
        def redis(self):
            return self._redis

        @property
        def eth_web3(self):
            return self._eth_web3_provider

        @property
        def solana_client(self):
            return self._solana_client

    celery.autodiscover_tasks(["src.tasks"], "index", True)

    # Subclassing celery task with discovery provider context
    # Provided through properties defined in 'DatabaseTask'
    celery.Task = DatabaseTask

    celery.finalize()
Example #5
0
def configure_celery(flask_app, celery, test_config=None):
    database_url = shared_config["db"]["url"]
    engine_args_literal = ast.literal_eval(shared_config["db"]["engine_args_literal"])
    redis_url = shared_config["redis"]["url"]

    if test_config is not None:
        if "db" in test_config:
            if "url" in test_config["db"]:
                database_url = test_config["db"]["url"]

    # Update celery configuration
    celery.conf.update(
        imports=["src.tasks.index", "src.tasks.index_blacklist", "src.tasks.index_cache"],
        beat_schedule={
            "update_discovery_provider": {
                "task": "update_discovery_provider",
                "schedule": timedelta(seconds=5),
            },
            "update_ipld_blacklist": {
                "task": "update_ipld_blacklist",
                "schedule": timedelta(seconds=60),
            },
            "update_cache": {
                "task": "update_discovery_cache",
                "schedule": timedelta(seconds=60)
            }
        },
        task_serializer="json",
        accept_content=["json"],
        broker_url=redis_url,
    )

    # Initialize DB object for celery task context
    db = SessionManager(database_url, engine_args_literal)

    # Initialize IPFS client for celery task context
    gateway_addrs = shared_config["ipfs"]["gateway_hosts"].split(',')
    gateway_addrs.append(shared_config["discprov"]["user_metadata_service_url"])
    logger.warning(f"__init__.py | {gateway_addrs}")
    ipfs_client = IPFSClient(
        shared_config["ipfs"]["host"], shared_config["ipfs"]["port"], gateway_addrs
    )

    # Initialize Redis connection
    redis_inst = redis.Redis.from_url(url=redis_url)

    # Clear existing lock if present
    redis_inst.delete("disc_prov_lock")

    # Initialize custom task context with database object
    class DatabaseTask(Task):
        def __init__(self, *args, **kwargs):
            self._db = db
            self._web3_provider = web3
            self._abi_values = abi_values
            self._shared_config = shared_config
            self._ipfs_client = ipfs_client
            self._redis = redis_inst

        @property
        def abi_values(self):
            return self._abi_values

        @property
        def web3(self):
            return self._web3_provider

        @property
        def db(self):
            return self._db

        @property
        def shared_config(self):
            return self._shared_config

        @property
        def ipfs_client(self):
            return self._ipfs_client

        @property
        def redis(self):
            return self._redis

    celery.autodiscover_tasks(["src.tasks"], "index", True)

    # Subclassing celery task with discovery provider context
    # Provided through properties defined in 'DatabaseTask'
    celery.Task = DatabaseTask

    celery.finalize()
import re

from src.utils.ipfs_lib import IPFSClient
from src.utils.config import shared_config

# Convert key to snake case
pattern = re.compile(r'(?<!^)(?=[A-Z])')
ipfs_client = IPFSClient(shared_config["ipfs"]["host"],
                         shared_config["ipfs"]["port"])


def convert_to_snake_case(value):
    if value == 'ID':
        return 'id'

    return pattern.sub('_', value).lower()


def get_ipfs_peer_info():
    # Convert IPFS object into serializable json object
    ipfs_peer_info = ipfs_client.get_peer_info()
    ipfs_peer_info_json = {}
    for item in ipfs_peer_info.items():
        ipfs_peer_info_json[convert_to_snake_case(item[0])] = item[1]

    return ipfs_peer_info_json