Esempio n. 1
0
def import_universe_systems():
    client = EsiClient()
    systems, _ = client.get("/v1/universe/systems/")
    logger.info("dispatching system fetches")

    system_data = client.get_multiple("/v4/universe/systems/{}/", systems)
    system_objs = []
    for r in system_data.values():
        system_objs.append(
            System(
                ccp_id = r["system_id"],
                constellation_id = r["constellation_id"],
                name = r["name"],
                security_status = r["security_status"]
            )
        )
    System.objects.bulk_create(system_objs)
    logger.info("systems created & committed")

    # gen location data
    location_objs = []
    for system in System.objects.all():
        location_objs.append(
            Location(
                ccp_id = system.pk,
                system = system,
                constellation = system.constellation,
                region = system.constellation.region,
                root_location_id = system.pk,
                is_in_space = True
            )
        )
    Location.objects.bulk_create(location_objs)
    logger.info("system locations created & committed")
Esempio n. 2
0
def import_all_item_types():
    """
    Imports all eve item types. This should only be called once when the database is initially set up.
    :param self:
    :return:
    """
    client = EsiClient()
    page_count = client.get_page_count("/v1/universe/types/")

    logger.info("{} pages of items to download".format(page_count))

    data = client.get_multiple("/v1/universe/types/?page={}", [p+1 for p in range(page_count)])

    logger.info("all pages downloaded")

    item_ids = []
    for page_data in data.values():
        item_ids.extend(page_data)

    item_data = client.get_multiple("/v3/universe/types/{}/", item_ids)
    logger.info("all item data downloaded")
    item_objs = []
    for item in item_data.values():
        i = ObjectType.objects.update_or_create(
            ccp_id = item["type_id"],
            defaults= {
            "name" :item["name"],
            "volume" : item.get("volume"),
            "packaged_volume" : item.get("packaged_volume"),
            "group_id" : item["group_id"],
            "icon_id" : item.get("icon_id") if item.get("icon_id") else item["type_id"],
            "market_group" : MarketGroup.get_object(item.get("market_group_id")),
            "published":item["published"]
        }
        )
        #i.save()
        item_objs.append(i)


    logger.info("all item data has objects created")
    ObjectType.objects.bulk_create(item_objs)
    logger.info("all item data committed")
Esempio n. 3
0
def import_universe_constellations():
    client = EsiClient()
    constellations, _ = client.get("/v1/universe/constellations/")
    logger.info("dispatching constellation fetches")

    constellations_data = client.get_multiple(
        "/v1/universe/constellations/{}/", constellations)
    constellation_objs = []
    for r in constellations_data.values():
        constellation_objs.append(
            Constellation(ccp_id=r["constellation_id"],
                          region_id=r["region_id"],
                          name=r["name"]))
    Constellation.objects.bulk_create(constellation_objs)
    logger.info("constellations created & committed")
Esempio n. 4
0
def import_universe_regions():
    client = EsiClient()
    regions, _ = client.get("/v1/universe/regions/")
    logger.info("dispatching region fetches")

    regions_data = client.get_multiple("/v1/universe/regions/{}/", regions)
    regions_objs = []
    for r in regions_data.values():
        regions_objs.append(
            Region(
                ccp_id = r["region_id"],
                name = r["name"]
            )
        )
    Region.objects.bulk_create(regions_objs)
    logger.info("regions created & committed")
Esempio n. 5
0
def import_all_type_categories():
    client = EsiClient()
    category_ids, _ = client.get("/v1/universe/categories/")

    group_data = client.get_multiple("/v1/universe/categories/{}/",
                                     category_ids)

    logger.info("all category data downloaded")

    category_objects = []
    for data in group_data.values():
        m = TypeCategory(ccp_id=data["category_id"], name=data["name"])
        category_objects.append(m)
    logger.info("category data objects created")
    TypeCategory.objects.bulk_create(category_objects)
    logger.info("category data objects committed")
Esempio n. 6
0
def import_all_market_groups():
    client = EsiClient()
    group_ids, _ = client.get("/v1/markets/groups/")

    group_data = client.get_multiple("/v1/markets/groups/{}/", group_ids)

    logger.info("all group data downloaded")

    group_objects = []
    for data in group_data.values():
        # notice that this directly references category_id object, we expect categories to already be imported.
        m = MarketGroup(ccp_id=data["market_group_id"],
                        parent_id=data.get("parent_group_id"),
                        name=data["name"])
        group_objects.append(m)
    logger.info("group data objects created")
    MarketGroup.objects.bulk_create(group_objects)
    logger.info("group data objects committed")
Esempio n. 7
0
def import_all_type_groups():
    client = EsiClient()
    # very lazy will regret
    group_ids, _ = client.get("/v1/universe/groups/?page=1")
    group_ids2, _ = client.get("/v1/universe/groups/?page=2")
    group_ids.extend(group_ids2)

    group_data = client.get_multiple("/v1/universe/groups/{}/", group_ids)

    logger.info("all group data downloaded")

    group_objects = []
    for data in group_data.values():
        m = TypeGroup(
            ccp_id = data["group_id"],
            name = data["name"],
            category_id = data["category_id"]
        )
        group_objects.append(m)
    logger.info("group data objects created")
    TypeGroup.objects.bulk_create(group_objects)
    logger.info("group data objects committed")
Esempio n. 8
0
def update_region_market_history(region_id):
    logger.info("LAUNCH_TASK {} {}".format("update_region_market_history",
                                           region_id))
    with history_queue.lock_task(
            'update-region-market-history-{}'.format(region_id)):
        logger.info(
            "update_region_market_history {} LOCK ACQUIRED".format(region_id))

        # drop dbconn here
        logger.info(
            "resetting django db connection for update_region_market_history {}"
            .format(region_id))
        connection.connect()

        scan_log = MarketHistoryScanLog(region=Region.get_object(region_id))
        scan_log.save()

        items = ObjectType.get_all_tradeable_items()

        client = EsiClient(raise_application_errors=False)
        esi_url = "/v1/markets/{}/history/".format(region_id) + "?type_id={}"

        item_histories = client.get_multiple(esi_url, items)

        # check for items removed from the game/trading
        to_delete = []
        logger.info(
            "Done pulling item market history, pruning unpublished items")
        for object_id, h in item_histories.items():
            if type(h) is dict:
                if "error" in h:
                    if h["error"] == 'Type not found!':
                        ObjectType.verify_object_exists(object_id, force=True)
                        object = ObjectType.get_object(object_id)
                        if not object.published or not object.market_group:
                            to_delete.append(object_id)
                            logger.info(
                                "Setting object_id {} to unpublished".format(
                                    object_id))
                    else:
                        msg = "receiving unrecognized application error from market query. object id {} error {}".format(
                            object_id, h)
                        logger.error(msg)
                        raise Exception(msg)
                else:
                    logger.warning(
                        "dict-based item history detected {} {}".format(
                            object_id, h))

        for i in to_delete:
            del item_histories[i]

        logger.info(
            "Done pruneing unpublished items. Yanking existing markethistory entries out of the database"
        )
        # grab existing entries
        existing_entries = _get_existing_region_entries(region_id, 30)

        logger.info("Done yanking old data. Building table of new data...")
        new_entries_to_commit = []
        new_entry_count = 0

        for object_id, history in item_histories.items():
            existing = existing_entries[
                object_id] if object_id in existing_entries else []
            # only use the last 31 days of history data (ccp sorts for us)
            history = history[-31:]
            new_entries = update_market_history_for_item(
                region_id, object_id, history, existing)
            new_entries_to_commit.extend(new_entries)
            new_entry_count += len(new_entries)

            if new_entry_count > 10000:
                # early commit
                logger.info("performing early commit of market history data")
                MarketHistory.objects.bulk_create(new_entries_to_commit)
                new_entries_to_commit = []
                new_entry_count = 0

        if new_entry_count > 0:
            logger.info("performing final market history data commit")
            MarketHistory.objects.bulk_create(new_entries_to_commit)

        logger.info("done creating history items, purging cache")
        MarketPriceDAO.purge_region_dao_cache(region_id, items)
        scan_log.scan_complete = timezone.now()
        scan_log.save()
        logger.info("market history cache purged. all done")