示例#1
0
def get_names_for_ids(id_list: List[int], *args) -> Dict[int, NameItem]:
    """
    Get item info for a list of ids
    :throws ApiException when something went wrong with the api
    """
    # we should split in chunks of 1000
    ep = UniverseEndpoint()
    data: Dict[int, NameItem] = dict()
    for c in chunks(id_list, 1000):
        d = __get_names_for_ids(ep, c, args)
        data.update(d)

    return data
示例#2
0
def update_market_groups():
    """This updates all MarketGroups
    No Commit is done
    """
    logger.debug('update_market_groups')
    ep: MarketEndpoint = MarketEndpoint()
    groups_resp: MarketGroupsResponse = ep.get_groups()

    upstream_group_ids = set(groups_resp.data)
    db_marketgroup_ids = {
        gid
        for gid, in db.session.query(MarketGroup.marketGroupID)
    }
    not_in_upstream = db_marketgroup_ids - upstream_group_ids
    not_in_db = upstream_group_ids - db_marketgroup_ids
    in_db_and_upstream = upstream_group_ids.intersection(db_marketgroup_ids)
    logger.debug('upstream: %r db: %r', upstream_group_ids, db_marketgroup_ids)
    logger.debug('not upstream: %r not_db: %r both: %r', not_in_upstream,
                 not_in_db, in_db_and_upstream)
    # lets delete marketgroups what don't exist anymore first
    logger.info("Deleting market groups: %r", not_in_upstream)
    # this can't be done by 'evaluate' so we use 'fetch'
    db.session.query(MarketGroup)\
        .filter(MarketGroup.marketGroupID.in_(not_in_upstream))\
        .delete(synchronize_session='fetch')

    market_group_responses: List[MarketGroupResponse] = []
    ids_that_need_checking = list(not_in_db)
    ids_that_need_checking.extend(in_db_and_upstream)
    for mg_id_chunk in chunks(ids_that_need_checking, 1000):
        market_group_responses.extend(ep.get_group_multi(mg_id_chunk))

    # now go through them hierarchically
    base_groups: List[MarketGroupResponse] = []
    for market_group_resp in market_group_responses:
        if market_group_resp.parent_id is None:
            base_groups.append(market_group_resp)

    # now we can walk them all from the base
    mg_add_list = []
    mg_update_list = []
    stack = []
    for base_market_group in base_groups:
        stack.append(base_market_group)
        while len(stack) > 0:
            current = stack.pop()
            if current.id in not_in_db:
                mg_add_list.append(
                    dict(marketGroupID=current.id,
                         parentGroupID=current.parent_id,
                         marketGroupName=current.name,
                         description=current.description,
                         iconID=0,
                         hasTypes=(len(current.types) > 0)))
            elif current.id in in_db_and_upstream:
                mg_update_list.append(
                    dict(marketGroupID=current.id,
                         parentGroupID=current.parent_id,
                         marketGroupName=current.name,
                         description=current.description,
                         iconID=0,
                         hasTypes=(len(current.types) > 0)))

            for desc in get_descendents(market_group_responses, current.id):
                stack.append(desc)

    logger.debug('Inserting MarketGroups: %r', mg_add_list)
    logger.debug('Updating MarketGroups: %r', mg_update_list)
    db.session.bulk_insert_mappings(MarketGroup, mg_add_list)
    db.session.bulk_update_mappings(MarketGroup, mg_update_list)
示例#3
0
def update_invtypes():
    """ Updates Inventory Types and their Categories and Groups
    :throws ApiException is thrown if an error exists
    """
    all_start = time.time()
    ep: UniverseEndpoint = UniverseEndpoint()
    update_market_groups()
    update_categories_and_groups()

    types_start = time.time()

    type_responses: List[TypesResponse] = ep.get_types()

    existing_inv_ids: List[int] = []
    for resp in type_responses:
        for type_id in resp.data:
            existing_inv_ids.append(type_id)
    existing_inv_ids.sort()

    logger.debug('InvTypeIDs Upstream: %r', existing_inv_ids)

    invtype_ids_db: List[int] = [
        row[0] for row in db.session.query(InvType.typeID).order_by(
            InvType.typeID).all()
    ]
    logger.debug('InvTypeIDs in database: %r', invtype_ids_db)

    invtype_ids_to_remove: List[int] = [
        db_id for db_id in invtype_ids_db if db_id not in existing_inv_ids
    ]
    # remove categories from db if they don't exist anymore
    # this removes their groups by cascading
    logger.info('InvtypeIDs to remove: %r', invtype_ids_to_remove)
    for invtype_id in invtype_ids_to_remove:
        # this is a speciall case cause by auto increment
        # if the name of the ivntype is #System
        # we should update it to id=0
        if invtype_id == 1:
            invtype: InvType = db.session.query(InvType).get(1)
            if invtype.typeName == '#System':
                invtype.typeID = 0
                invtype_ids_db.remove(1)
                if 0 not in invtype_ids_db:
                    invtype_ids_db.append(0)
                    invtype_ids_db.sort()
                continue

        db.session.query(InvType).filter(InvType.typeID == invtype_id).delete(
            synchronize_session='evaluate')

    # lets update these in 1k chunks a time
    for typeid_chunk in chunks(existing_inv_ids, 5000):
        logger.debug('Updating InvTypes: %r', typeid_chunk)
        # lets load it first
        update_type: List[Dict[str, Any]] = []
        insert_type: List[Dict[str, Any]] = []
        insert_attribute: List[Dict[str, Any]] = []
        insert_effect: List[Dict[str, Any]] = []
        t_start = time.time()
        responses = ep.get_type_multi(typeid_chunk)
        t_end = time.time()
        logger.info('Loading chunk of size %d took %s', len(typeid_chunk),
                    t_end - t_start)
        # now go over the types in the responses
        for resp in responses:
            logger.debug('Working on InvType: %d', resp.type_id)
            data = dict(typeID=resp.type_id,
                        groupID=resp.group_id,
                        typeName=resp.name,
                        description=resp.description,
                        marketGroupID=resp.market_group_id)

            if resp.type_id in invtype_ids_db:
                logger.debug('Needs Update')
                update_type.append(data)
                # lets just remove all attrs and effects
                # and add them again later
                db.session.query(InvTypeDogmaAttribute).filter(
                    InvTypeDogmaAttribute.typeID == resp.type_id).delete(
                        synchronize_session='evaluate')
                db.session.query(InvTypeDogmaEffect).filter(
                    InvTypeDogmaEffect.typeID == resp.type_id).delete(
                        synchronize_session='evaluate')
            else:
                logger.debug('Needs Insert')
                insert_type.append(data)

            if resp.dogma_attributes is not None:
                for attr_info in resp.dogma_attributes:
                    attr_data = dict(typeID=resp.type_id,
                                     attributeID=attr_info['attribute_id'],
                                     value=attr_info['value'])
                    insert_attribute.append(attr_data)

            if resp.dogma_effects is not None:
                for effect in resp.dogma_effects:
                    effect_data = dict(typeID=resp.type_id,
                                       effectID=effect['effect_id'],
                                       isDefault=effect['is_default'])
                    insert_effect.append(effect_data)

        logger.debug('Insert: %r', insert_type)
        logger.debug('Update: %r', update_type)
        db.session.bulk_update_mappings(InvType, update_type)
        db.session.bulk_insert_mappings(InvType, insert_type)
        db.session.bulk_insert_mappings(InvTypeDogmaAttribute,
                                        insert_attribute)
        db.session.bulk_insert_mappings(InvTypeDogmaEffect, insert_effect)
        db.session.commit()

    all_end = time.time()
    logger.info('InvTypes updated in %s', all_end - types_start)
    logger.info('Categories, Groups and InvTypes updated in %s',
                all_end - all_start)
示例#4
0
def update_categories_and_groups():
    """This updates Inventory Categories and Groups
    No Commit is done
    """
    ep: UniverseEndpoint = UniverseEndpoint()

    categories_start = time.time()

    categories_resp: CategoriesResponse = ep.get_categories()
    upstream_cat_ids = categories_resp.data
    logger.debug('CategoryIDs Upstream: %r', upstream_cat_ids)
    # this is going to hold a list with all group ids these categories have
    # we can use this later and don't need to request all groups
    # this way we might miss groups without category but
    # those are of no interest to us anyway
    group_ids: Set[int] = set()
    category_ids_db: List[int] = [
        row[0] for row in db.session.query(InvCategory.categoryID).all()
    ]
    logger.debug('CategoryIDs in database: %r', category_ids_db)
    # find categories that don't exist anymore
    category_ids_to_remove: List[int] = [
        db_id for db_id in category_ids_db if db_id not in upstream_cat_ids
    ]
    # remove categories from db if they don't exist anymore
    # this removes their groups by cascading
    logger.info('Categories to remove: %r', category_ids_to_remove)
    for cat_id in category_ids_to_remove:
        db.session.query(InvCategory).filter(
            InvCategory.categoryID == cat_id).delete(
                synchronize_session='evaluate')

    update_categories: List[Dict[str, Any]] = []
    insert_categories: List[Dict[str, Any]] = []
    for cat_chunk in chunks(upstream_cat_ids, 1000):
        logger.debug('Updating: %r', cat_chunk)
        cat_info_responses: List[CategoryResponse] = ep.get_category_multi(
            cat_chunk)
        for cat_data_resp in cat_info_responses:
            for group_id in cat_data_resp.groups:
                group_ids.add(group_id)

            data = dict(categoryID=cat_data_resp.id,
                        categoryName=cat_data_resp.name,
                        published=cat_data_resp.published)

            if cat_data_resp.id in category_ids_db:
                update_categories.append(data)
            else:
                insert_categories.append(data)

    logger.debug('Insert: %r', insert_categories)
    logger.debug('Update: %r', update_categories)
    db.session.bulk_update_mappings(InvCategory, update_categories)
    db.session.bulk_insert_mappings(InvCategory, insert_categories)

    categories_end = time.time()
    logger.info('Categories updated in %s', categories_end - categories_start)

    group_ids = list(group_ids)
    group_ids.sort()

    logger.debug('GroupIDs Upstream: %r', group_ids)
    # now all categories should be up to date
    # lets update our groups

    groups_start = time.time()

    group_ids_db: List[int] = [
        row[0] for row in db.session.query(InvGroup.groupID).all()
    ]
    logger.debug('GroupIDs in database: %r', group_ids_db)
    # find groups that don't exist anymore
    group_ids_to_remove: List[int] = [
        db_id for db_id in group_ids_db if db_id not in group_ids
    ]
    logger.info('GroupIDs to remove: %r', group_ids_to_remove)
    # remove groups from db if they don't exist anymore
    # this removes their groups by cascading
    for group_id in group_ids_to_remove:
        db.session.query(InvGroup).filter(InvGroup.groupID == group_id).delete(
            synchronize_session='evaluate')

    update_groups: List[Dict[str, Any]] = []
    insert_groups: List[Dict[str, Any]] = []
    for group_chunk in chunks(group_ids, 1000):
        logger.debug('Updating: %r', group_chunk)
        group_info_responses: List[GroupResponse] = ep.get_group_multi(
            group_chunk)
        for group_data_resp in group_info_responses:
            data = dict(groupID=group_data_resp.id,
                        groupName=group_data_resp.name,
                        published=group_data_resp.published,
                        categoryID=group_data_resp.category_id)
            if int(group_data_resp.id) in group_ids_db:
                update_groups.append(data)
            else:
                insert_groups.append(data)

    logger.debug('Insert: %r', insert_groups)
    logger.debug('Update: %r', update_groups)
    db.session.bulk_update_mappings(InvGroup, update_groups)
    db.session.bulk_insert_mappings(InvGroup, insert_groups)

    groups_end = time.time()
    logger.info('Groups updated in %s', groups_end - groups_start)