Example #1
0
def parse_track_event(self, session, update_task, entry, event_type,
                      track_record, block_timestamp):
    event_args = entry["args"]
    # Just use block_timestamp as integer
    block_datetime = datetime.utcfromtimestamp(block_timestamp)

    ipfs = update_task.ipfs_client._api

    if event_type == track_event_types_lookup["new_track"]:
        track_record.created_at = block_datetime

        track_metadata_digest = event_args._multihashDigest.hex()
        track_metadata_hash_fn = event_args._multihashHashFn
        buf = multihash.encode(bytes.fromhex(track_metadata_digest),
                               track_metadata_hash_fn)
        track_metadata_multihash = multihash.to_b58_string(buf)
        logger.info(f"track metadata ipld : {track_metadata_multihash}")

        # If the IPLD is blacklisted, do not keep processing the current entry
        # continue with the next entry in the update_track_events list
        if is_blacklisted_ipld(session, track_metadata_multihash):
            return track_record

        track_record.owner_id = event_args._trackOwnerId

        # Reconnect to creator nodes for this user
        refresh_track_owner_ipfs_conn(track_record.owner_id, session,
                                      update_task)

        track_record.is_delete = False
        track_metadata = update_task.ipfs_client.get_metadata(
            track_metadata_multihash, track_metadata_format)

        track_record = populate_track_record_metadata(track_record,
                                                      track_metadata)
        track_record.metadata_multihash = track_metadata_multihash

        # if cover_art CID is of a dir, store under _sizes field instead
        if track_record.cover_art:
            logger.info(f"catting track cover_art {track_record.cover_art}")
            try:
                # attempt to cat single byte from CID to determine if dir or file
                ipfs.cat(track_record.cover_art, 0, 1)
            except Exception as e:  # pylint: disable=W0703
                if "this dag node is a directory" in str(e):
                    track_record.cover_art_sizes = track_record.cover_art
                    track_record.cover_art = None
                    logger.info('Successfully processed CID')
                else:
                    raise Exception(e)

    if event_type == track_event_types_lookup["update_track"]:
        upd_track_metadata_digest = event_args._multihashDigest.hex()
        upd_track_metadata_hash_fn = event_args._multihashHashFn
        update_buf = multihash.encode(bytes.fromhex(upd_track_metadata_digest),
                                      upd_track_metadata_hash_fn)
        upd_track_metadata_multihash = multihash.to_b58_string(update_buf)
        logger.info(
            f"update track metadata ipld : {upd_track_metadata_multihash}")

        # If the IPLD is blacklisted, do not keep processing the current entry
        # continue with the next entry in the update_track_events list
        if is_blacklisted_ipld(session, upd_track_metadata_multihash):
            return track_record

        track_record.owner_id = event_args._trackOwnerId
        track_record.is_delete = False

        # Reconnect to creator nodes for this user
        refresh_track_owner_ipfs_conn(track_record.owner_id, session,
                                      update_task)

        track_metadata = update_task.ipfs_client.get_metadata(
            upd_track_metadata_multihash, track_metadata_format)

        track_record = populate_track_record_metadata(track_record,
                                                      track_metadata)
        track_record.metadata_multihash = upd_track_metadata_multihash

        # if cover_art CID is of a dir, store under _sizes field instead
        if track_record.cover_art:
            logger.info(f"catting track cover_art {track_record.cover_art}")
            try:
                # attempt to cat single byte from CID to determine if dir or file
                ipfs.cat(track_record.cover_art, 0, 1)
            except Exception as e:  # pylint: disable=W0703
                if "this dag node is a directory" in str(e):
                    track_record.cover_art_sizes = track_record.cover_art
                    track_record.cover_art = None
                    logger.info('Successfully processed CID')
                else:
                    raise Exception(e)

    if event_type == track_event_types_lookup["delete_track"]:
        track_record.is_delete = True
        logger.info(f"Removing track : {track_record.track_id}")

    track_record.updated_at = block_datetime

    return track_record
def parse_track_event(self, session, update_task, entry, event_type,
                      track_record, block_timestamp):
    event_args = entry["args"]
    # Just use block_timestamp as integer
    block_datetime = datetime.utcfromtimestamp(block_timestamp)

    if event_type == track_event_types_lookup["new_track"]:
        track_record.created_at = block_datetime

        track_metadata_digest = event_args._multihashDigest.hex()
        track_metadata_hash_fn = event_args._multihashHashFn
        buf = multihash.encode(bytes.fromhex(track_metadata_digest),
                               track_metadata_hash_fn)
        track_metadata_multihash = multihash.to_b58_string(buf)
        logger.info(f"track metadata ipld : {track_metadata_multihash}")

        # If the IPLD is blacklisted, do not keep processing the current entry
        # continue with the next entry in the update_track_events list
        if is_blacklisted_ipld(session, track_metadata_multihash):
            logger.info(
                f"Encountered blacklisted metadata CID {track_metadata_multihash} in indexing new track"
            )
            return None

        owner_id = event_args._trackOwnerId
        handle = (session.query(User.handle).filter(
            User.user_id == owner_id, User.is_current == True).first())[0]
        track_record.owner_id = owner_id

        track_record.is_delete = False

        track_metadata = update_task.ipfs_client.get_metadata(
            track_metadata_multihash, track_metadata_format)

        track_record = populate_track_record_metadata(track_record,
                                                      track_metadata, handle)
        track_record.metadata_multihash = track_metadata_multihash

        # if cover_art CID is of a dir, store under _sizes field instead
        if track_record.cover_art:
            # If CID is in IPLD blacklist table, do not continue with indexing
            if is_blacklisted_ipld(session, track_record.cover_art):
                logger.info(
                    f"Encountered blacklisted cover art CID {track_record.cover_art} in indexing new track"
                )
                return None

            logger.warning(
                f"tracks.py | Processing track cover art {track_record.cover_art}"
            )
            track_record.cover_art_sizes = track_record.cover_art
            track_record.cover_art = None

        update_stems_table(session, track_record, track_metadata)
        update_remixes_table(session, track_record, track_metadata)

    if event_type == track_event_types_lookup["update_track"]:
        upd_track_metadata_digest = event_args._multihashDigest.hex()
        upd_track_metadata_hash_fn = event_args._multihashHashFn
        update_buf = multihash.encode(bytes.fromhex(upd_track_metadata_digest),
                                      upd_track_metadata_hash_fn)
        upd_track_metadata_multihash = multihash.to_b58_string(update_buf)
        logger.info(
            f"update track metadata ipld : {upd_track_metadata_multihash}")

        # If the IPLD is blacklisted, do not keep processing the current entry
        # continue with the next entry in the update_track_events list
        if is_blacklisted_ipld(session, upd_track_metadata_multihash):
            logger.info(
                f"Encountered blacklisted metadata CID {upd_track_metadata_multihash} in indexing update track"
            )
            return None

        owner_id = event_args._trackOwnerId
        handle = (session.query(User.handle).filter(
            User.user_id == owner_id, User.is_current == True).first())[0]
        track_record.owner_id = owner_id
        track_record.is_delete = False

        track_metadata = update_task.ipfs_client.get_metadata(
            upd_track_metadata_multihash, track_metadata_format)

        track_record = populate_track_record_metadata(track_record,
                                                      track_metadata, handle)
        track_record.metadata_multihash = upd_track_metadata_multihash

        # All incoming cover art is intended to be a directory
        # Any write to cover_art field is replaced by cover_art_sizes
        if track_record.cover_art:
            # If CID is in IPLD blacklist table, do not continue with indexing
            if is_blacklisted_ipld(session, track_record.cover_art):
                logger.info(
                    f"Encountered blacklisted cover art CID {track_record.cover_art} in indexing update track"
                )
                return None

            logger.info(
                f"tracks.py | Processing track cover art {track_record.cover_art}"
            )
            track_record.cover_art_sizes = track_record.cover_art
            track_record.cover_art = None

        update_remixes_table(session, track_record, track_metadata)

    if event_type == track_event_types_lookup["delete_track"]:
        track_record.is_delete = True
        track_record.stem_of = null()
        track_record.remix_of = null()
        logger.info(f"Removing track : {track_record.track_id}")

    track_record.updated_at = block_datetime

    return track_record
Example #3
0
def parse_track_event(
    self,
    session,
    update_task: DatabaseTask,
    entry,
    event_type,
    track_record,
    block_number,
    block_timestamp,
    track_metadata,
    pending_track_routes,
):
    challenge_bus = update_task.challenge_event_bus
    # Just use block_timestamp as integer
    block_datetime = datetime.utcfromtimestamp(block_timestamp)

    if event_type == track_event_types_lookup["new_track"]:
        track_record.created_at = block_datetime

        track_metadata_digest = helpers.get_tx_arg(entry,
                                                   "_multihashDigest").hex()
        track_metadata_hash_fn = helpers.get_tx_arg(entry, "_multihashHashFn")
        buf = multihash.encode(bytes.fromhex(track_metadata_digest),
                               track_metadata_hash_fn)
        track_metadata_multihash = multihash.to_b58_string(buf)
        logger.info(
            f"index.py | tracks.py | track metadata ipld : {track_metadata_multihash}"
        )

        owner_id = helpers.get_tx_arg(entry, "_trackOwnerId")
        track_record.owner_id = owner_id
        track_record.is_delete = False

        handle = (session.query(User.handle).filter(
            User.user_id == owner_id, User.is_current == True).first())[0]
        if not handle:
            raise EntityMissingRequiredFieldError(
                "track",
                track_record,
                f"No user found for {track_record}",
            )

        update_track_routes_table(session, track_record, track_metadata,
                                  pending_track_routes)
        track_record = populate_track_record_metadata(track_record,
                                                      track_metadata, handle)
        track_record.metadata_multihash = track_metadata_multihash

        # if cover_art CID is of a dir, store under _sizes field instead
        if track_record.cover_art:
            # If CID is in IPLD blacklist table, do not continue with indexing
            if is_blacklisted_ipld(session, track_record.cover_art):
                logger.info(
                    f"index.py | tracks.py | Encountered blacklisted cover art CID:"
                    f"{track_record.cover_art} in indexing new track")
                return None

            logger.warning(
                f"index.py | tracks.py | Processing track cover art {track_record.cover_art}"
            )
            track_record.cover_art_sizes = track_record.cover_art
            track_record.cover_art = None

        update_stems_table(session, track_record, track_metadata)
        update_remixes_table(session, track_record, track_metadata)
        dispatch_challenge_track_upload(challenge_bus, block_number,
                                        track_record)

    if event_type == track_event_types_lookup["update_track"]:
        upd_track_metadata_digest = helpers.get_tx_arg(
            entry, "_multihashDigest").hex()
        upd_track_metadata_hash_fn = helpers.get_tx_arg(
            entry, "_multihashHashFn")
        update_buf = multihash.encode(bytes.fromhex(upd_track_metadata_digest),
                                      upd_track_metadata_hash_fn)
        upd_track_metadata_multihash = multihash.to_b58_string(update_buf)
        logger.info(
            f"index.py | tracks.py | update track metadata ipld : {upd_track_metadata_multihash}"
        )

        owner_id = helpers.get_tx_arg(entry, "_trackOwnerId")
        track_record.owner_id = owner_id
        track_record.is_delete = False

        handle = (session.query(User.handle).filter(
            User.user_id == owner_id, User.is_current == True).first())[0]
        if not handle:
            raise EntityMissingRequiredFieldError(
                "track",
                track_record,
                f"No user found for {track_record}",
            )

        update_track_routes_table(session, track_record, track_metadata,
                                  pending_track_routes)
        track_record = populate_track_record_metadata(track_record,
                                                      track_metadata, handle)
        track_record.metadata_multihash = upd_track_metadata_multihash

        # All incoming cover art is intended to be a directory
        # Any write to cover_art field is replaced by cover_art_sizes
        if track_record.cover_art:
            # If CID is in IPLD blacklist table, do not continue with indexing
            if is_blacklisted_ipld(session, track_record.cover_art):
                logger.info(
                    f"index.py | tracks.py | Encountered blacklisted cover art CID:"
                    f"{track_record.cover_art} in indexing update track")
                return None

            logger.info(
                f"index.py | tracks.py | Processing track cover art {track_record.cover_art}"
            )
            track_record.cover_art_sizes = track_record.cover_art
            track_record.cover_art = None

        update_remixes_table(session, track_record, track_metadata)

    if event_type == track_event_types_lookup["delete_track"]:
        track_record.is_delete = True
        track_record.stem_of = null()
        track_record.remix_of = null()
        logger.info(
            f"index.py | tracks.py | Removing track : {track_record.track_id}")

    track_record.updated_at = block_datetime

    if not all_required_fields_present(Track, track_record):
        raise EntityMissingRequiredFieldError(
            "track",
            track_record,
            f"Error parsing track {track_record} with entity missing required field(s)",
        )

    return track_record
Example #4
0
def parse_track_event(self, session, update_task, entry, event_type,
                      track_record, block_timestamp):
    event_args = entry["args"]
    # Just use block_timestamp as integer
    block_datetime = datetime.utcfromtimestamp(block_timestamp)

    if event_type == track_event_types_lookup["new_track"]:
        track_record.created_at = block_datetime

        track_metadata_digest = event_args._multihashDigest.hex()
        track_metadata_hash_fn = event_args._multihashHashFn
        buf = multihash.encode(bytes.fromhex(track_metadata_digest),
                               track_metadata_hash_fn)
        track_metadata_multihash = multihash.to_b58_string(buf)
        logger.info(f"track metadata ipld : {track_metadata_multihash}")

        # If the IPLD is blacklisted, do not keep processing the current entry
        # continue with the next entry in the update_track_events list
        if is_blacklisted_ipld(session, track_metadata_multihash):
            return track_record

        owner_id = event_args._trackOwnerId
        handle = (session.query(User.handle).filter(
            User.user_id == owner_id, User.is_current == True).first())[0]
        track_record.owner_id = owner_id

        # Reconnect to creator nodes for this user
        refresh_track_owner_ipfs_conn(track_record.owner_id, session,
                                      update_task)

        track_record.is_delete = False
        track_metadata = update_task.ipfs_client.get_metadata(
            track_metadata_multihash, track_metadata_format)

        track_record = populate_track_record_metadata(track_record,
                                                      track_metadata, handle)
        track_record.metadata_multihash = track_metadata_multihash

        # if cover_art CID is of a dir, store under _sizes field instead
        if track_record.cover_art:
            logger.warning(
                f"tracks.py | Processing track cover art {track_record.cover_art}"
            )
            is_directory = update_task.ipfs_client.multihash_is_directory(
                track_record.cover_art)
            if is_directory:
                track_record.cover_art_sizes = track_record.cover_art
                track_record.cover_art = None

        update_stems_table(session, track_record, track_metadata)
        update_remixes_table(session, track_record, track_metadata)

    if event_type == track_event_types_lookup["update_track"]:
        upd_track_metadata_digest = event_args._multihashDigest.hex()
        upd_track_metadata_hash_fn = event_args._multihashHashFn
        update_buf = multihash.encode(bytes.fromhex(upd_track_metadata_digest),
                                      upd_track_metadata_hash_fn)
        upd_track_metadata_multihash = multihash.to_b58_string(update_buf)
        logger.info(
            f"update track metadata ipld : {upd_track_metadata_multihash}")

        # If the IPLD is blacklisted, do not keep processing the current entry
        # continue with the next entry in the update_track_events list
        if is_blacklisted_ipld(session, upd_track_metadata_multihash):
            return track_record

        owner_id = event_args._trackOwnerId
        handle = (session.query(User.handle).filter(
            User.user_id == owner_id, User.is_current == True).first())[0]
        track_record.owner_id = owner_id
        track_record.is_delete = False

        # Reconnect to creator nodes for this user
        refresh_track_owner_ipfs_conn(track_record.owner_id, session,
                                      update_task)

        track_metadata = update_task.ipfs_client.get_metadata(
            upd_track_metadata_multihash, track_metadata_format)

        track_record = populate_track_record_metadata(track_record,
                                                      track_metadata, handle)
        track_record.metadata_multihash = upd_track_metadata_multihash

        # if cover_art CID is of a dir, store under _sizes field instead
        if track_record.cover_art:
            logger.warning(
                f"tracks.py | Processing track cover art {track_record.cover_art}"
            )
            try:
                is_directory = update_task.ipfs_client.multihash_is_directory(
                    track_record.cover_art)
                if is_directory:
                    track_record.cover_art_sizes = track_record.cover_art
                    track_record.cover_art = None
            except Exception as e:
                # we are unable to get the cover art
                if 'invalid multihash' in str(e):
                    track_record.cover_art_sizes = None
                    track_record.cover_art = None
                else:
                    raise e

        update_remixes_table(session, track_record, track_metadata)

    if event_type == track_event_types_lookup["delete_track"]:
        track_record.is_delete = True
        if not track_record.stem_of:
            track_record.stem_of = null()
        if not track_record.remix_of:
            track_record.remix_of = null()
        logger.info(f"Removing track : {track_record.track_id}")

    track_record.updated_at = block_datetime

    return track_record
Example #5
0
def track_state_update(
    self,
    update_task: DatabaseTask,
    session: Session,
    track_factory_txs,
    block_number,
    block_timestamp,
    block_hash,
    ipfs_metadata,
    blacklisted_cids,
) -> Tuple[int, Set]:
    """Return tuple containing int representing number of Track model state changes found in transaction and set of processed track IDs."""
    begin_track_state_update = datetime.now()
    metric = PrometheusMetric(
        "track_state_update_runtime_seconds",
        "Runtimes for src.task.tracks:track_state_update()",
        ("scope", ),
    )

    blockhash = update_task.web3.toHex(block_hash)
    num_total_changes = 0
    skipped_tx_count = 0
    # This stores the track_ids created or updated in the set of transactions
    track_ids: Set[int] = set()

    if not track_factory_txs:
        return num_total_changes, track_ids

    pending_track_routes: List[TrackRoute] = []
    track_events: Dict[int, Dict[str, Any]] = {}
    for tx_receipt in track_factory_txs:
        txhash = update_task.web3.toHex(tx_receipt.transactionHash)
        for event_type in track_event_types_arr:
            track_events_tx = get_track_events_tx(update_task, event_type,
                                                  tx_receipt)
            processedEntries = 0  # if record does not get added, do not count towards num_total_changes
            for entry in track_events_tx:
                track_event_start_time = time()
                event_args = entry["args"]
                track_id = (helpers.get_tx_arg(entry, "_trackId") if "_trackId"
                            in event_args else helpers.get_tx_arg(
                                entry, "_id"))
                existing_track_record = None
                track_metadata = None
                try:
                    # look up or populate existing record
                    if track_id in track_events:
                        existing_track_record = track_events[track_id]["track"]
                    else:
                        existing_track_record = lookup_track_record(
                            update_task,
                            session,
                            entry,
                            track_id,
                            block_number,
                            blockhash,
                            txhash,
                        )
                    # parse track event to add metadata to record
                    if event_type in [
                            track_event_types_lookup["new_track"],
                            track_event_types_lookup["update_track"],
                    ]:
                        track_metadata_digest = event_args._multihashDigest.hex(
                        )
                        track_metadata_hash_fn = event_args._multihashHashFn
                        buf = multihash.encode(
                            bytes.fromhex(track_metadata_digest),
                            track_metadata_hash_fn)
                        cid = multihash.to_b58_string(buf)
                        # do not process entry if cid is blacklisted
                        if cid in blacklisted_cids:
                            continue
                        track_metadata = ipfs_metadata[cid]

                    parsed_track = parse_track_event(
                        self,
                        session,
                        update_task,
                        entry,
                        event_type,
                        existing_track_record,
                        block_number,
                        block_timestamp,
                        track_metadata,
                        pending_track_routes,
                    )

                    # If track record object is None, it has a blacklisted metadata CID
                    if parsed_track is not None:
                        if track_id not in track_events:
                            track_events[track_id] = {
                                "track": parsed_track,
                                "events": [],
                            }
                        else:
                            track_events[track_id]["track"] = parsed_track
                        track_events[track_id]["events"].append(event_type)
                        track_ids.add(track_id)
                        processedEntries += 1
                except EntityMissingRequiredFieldError as e:
                    logger.warning(f"Skipping tx {txhash} with error {e}")
                    skipped_tx_count += 1
                    add_node_level_skipped_transaction(session, block_number,
                                                       blockhash, txhash)
                    pass
                except Exception as e:
                    logger.info("Error in parse track transaction")
                    raise IndexingError("track", block_number, blockhash,
                                        txhash, str(e)) from e
                metric.save_time({"scope": "track_event"},
                                 start_time=track_event_start_time)

            num_total_changes += processedEntries

    logger.info(
        f"index.py | tracks.py | [track indexing] There are {num_total_changes} events processed and {skipped_tx_count} skipped transactions."
    )

    for track_id, value_obj in track_events.items():
        if value_obj["events"]:
            logger.info(f"index.py | tracks.py | Adding {value_obj['track']}")
            invalidate_old_track(session, track_id)
            session.add(value_obj["track"])

    if num_total_changes:
        metric.save_time({"scope": "full"})
        logger.info(
            f"index.py | tracks.py | track_state_update | finished track_state_update in {datetime.now() - begin_track_state_update} // per event: {(datetime.now() - begin_track_state_update) / num_total_changes} secs"
        )
    return num_total_changes, track_ids