Ejemplo n.º 1
0
    def is_new_company_info(self, company_info):
        """
        Checks if CompanyInfo response has changed since it was pulled the last time. Usually this is taken care of by
        the LastUpdatedTime filter on the API (only data which has been updated is returned), but CompanyInfo endpoint
        seems to return all the time.

        Args:
            company_info(dict): response from CompanyInfo endpoint

        Returns:
            bool: True if the company_info has changed since the last pull
        """
        company_info_updated_at = company_info.get('MetaData',
                                                   {}).get('LastUpdatedTime')

        item = Item.query(Item.org_uid == self.org_uid,
                          Item.endpoint == 'CompanyInfo',
                          Item.changeset == -1).get()
        if item:
            item_updated_at = item.data.get('MetaData',
                                            {}).get('LastUpdatedTime')
            if company_info_updated_at and company_info_updated_at == item_updated_at:
                logging.info("CompanyInfo has not been updated, ignoring")
                return False

        return True
    def count_items():
        """
        Utility method which returns the number of items in Item datastore kind.

        Returns:
            int: the number of items in Item datastore kind
        """
        return len(Item.query().fetch(keys_only=True))
Ejemplo n.º 3
0
def create_items(org_uid, provider, changeset, endpoint, item_id, data):
    """
    Creates items containing raw endpoint response ready to be saved into Item data kind (Item acts as raw endpoint
    cache and is heavily used by the normalisation part of the data ingestion pipeline, dependent items needed for
    normalisation are resolved from here as opposed to going back to the providers API).

    Two instances of Item are created: one with the actual changeset during which the items was ingested, and one with
    changeset of -1. This allows for the latest version of an item to be easily retrieved (an invoice might be ingested
    as part of one changeset, but if it gets updated it will get ingested again as part of another changeset).

    Args:
        org_uid(str): org identifier
        provider(str): data provider (eg. 'qbo', 'xerov2')
        changeset(int): update cycle identifier
        endpoint(str): endpoint which the item came from (eg. 'Invoice', 'Payment')
        item_id(str): id of the item as is in the source system (provider)
        data(object): item payload as provided by the provider's api (eg. output of the invoice endpoint)

    Returns:
        list(ndb.Model): a list of Item instances ready to be saved
    """
    datastore_item_id = "{}_{}_{}_{}".format(org_uid, changeset, endpoint,
                                             item_id)
    changeset_item = Item(id=datastore_item_id,
                          org_uid=org_uid,
                          provider=provider,
                          changeset=changeset,
                          endpoint=endpoint,
                          item_id=item_id,
                          data=data)

    latest_version_changeset = -1
    datastore_item_id = "{}_{}_{}_{}".format(org_uid, latest_version_changeset,
                                             endpoint, item_id)
    latest_version = Item(id=datastore_item_id,
                          org_uid=org_uid,
                          provider=provider,
                          changeset=latest_version_changeset,
                          endpoint=endpoint,
                          item_id=item_id,
                          data=data)

    return [changeset_item, latest_version]
    def test_missing_found_in_item(self):
        """
        Verifies that a missing item can be resolved from the Item.
        """
        self.create_org(status=CONNECTED)
        stage = MissingItemsStage('test')
        MissingItem(org_uid='test',
                    missing_items=[{
                        'type': 'Account',
                        'id': '1'
                    }]).put()
        Item(org_uid='test',
             changeset=-1,
             endpoint='Account',
             item_id='1',
             data={
                 'Id': '1'
             }).put()
        stage.next(payload={})

        # the missing item should be deleted and resolved item should added Item ready for next publish
        self.assertEqual(self.count_missing_items(), 0)
        self.assertEqual(self.count_items(), 2)
    def test_missing_without_payload_id(self):
        """
        Verifies that a missing item without the Id field in the data can be processed.
        """
        item_id = '1000_2010-01-01'
        Item(org_uid='test',
             changeset=-1,
             endpoint='AccountBalance',
             item_id=item_id,
             data={
                 'Balance': '10'
             }).put()
        MissingItem(org_uid='test',
                    missing_items=[{
                        'type': 'AccountBalance',
                        'id': item_id
                    }]).put()
        self.create_org(status=CONNECTED)
        stage = MissingItemsStage('test')
        stage.next(payload={})

        # the missing item should be deleted and resolved item should added Item ready for next publish
        self.assertEqual(self.count_missing_items(), 0)
        self.assertEqual(self.count_items(), 2)
Ejemplo n.º 6
0
    def next(self, payload):
        """
        Processes one batch of missing items, saves them for publishing only if all can be resolved (resolution is
        attempted in the cache first, then via the API if not in cache).

        Args:
            payload(dict): a payload which has been given to the adaptor last time this function ran

        Returns:
            (bool, dict): a flag indicating if the sync has finished, and a payload to be passed in on next call
        """
        results = []
        missing_item = MissingItem.query(
            MissingItem.org_uid == self.org_uid).get()

        if not missing_item:
            logging.info("no missing items, nothing to process")
            return True, {}

        for item in missing_item.missing_items:
            logging.info("processing missing item: {}".format(item))

            # handle items which do not have an ID (CompanyInfo for example)
            if item['type'] in SKIP_ID_IN_API_GET:
                item_cache = Item.query(Item.org_uid == self.org_uid,
                                        Item.endpoint == item['type'],
                                        Item.changeset == -1).get()
            else:
                item_cache = Item.query(Item.org_uid == self.org_uid,
                                        Item.endpoint == item['type'],
                                        Item.item_id == item['id'],
                                        Item.changeset == -1).get()

            if item_cache:
                data = item_cache.data
                item_id = item_cache.item_id
            else:
                logging.info(
                    "could not find {} with id {} in raw endpoint cache".
                    format(item['type'], item.get('id')))
                session = QboApiSession(self.org_uid)
                data = session.get(self._get_url(item['type'], item.get('id')),
                                   headers={'Accept': 'application/json'})
                data = data.get('QueryResponse', {}).get(item['type'], {})

                if data:
                    data = data[0]
                    item_id = data['Id']
                else:
                    message_template = (
                        "could not find {} with id {} in the api either, "
                        "ignoring and deleting this missing item record")
                    logging.warning(
                        message_template.format(item['type'], item.get('id')))
                    missing_item.key.delete()
                    return False, {}

            results.append({
                'endpoint': item['type'],
                'item_id': item_id,
                'data': data
            })

        item_objects = []

        for result in results:
            message = "saving resolved missing item into raw endpoint cache (type: {}, id: {})"
            logging.info(message.format(result['endpoint'], result['item_id']))

            item_objects.extend(
                sync_utils.create_items(self.org_uid, self.org.provider,
                                        self.org.changeset, result['endpoint'],
                                        result['item_id'], result['data']))

        sync_utils.save_items(item_objects)

        logging.info("deleting missing item")
        missing_item.key.delete()

        return False, {}