Beispiel #1
0
def get_all_providers(invalidate=False):
    """
    wrapper function to get all the providers for PACT and cache them.
    ugly for now - the number of entries is small enough that loading all and scanning on checking is small enough overhead on a single page load.
    """
    if invalidate:
        cache.delete(PACT_PROVIDERS_FIXTURE_CACHE_KEY)
    raw_cached_fixtures = cache.get(PACT_PROVIDERS_FIXTURE_CACHE_KEY, None)
    if raw_cached_fixtures is None:
        #requery and cache
        pact_hp_group = Group.by_name(PACT_DOMAIN, PACT_HP_GROUPNAME)
        providers = FixtureDataItem.by_group(pact_hp_group)
        cache.set(PACT_PROVIDERS_FIXTURE_CACHE_KEY, json.dumps([x.to_json() for x in providers]))
        return providers
    else:
        try:
            json_data = json.loads(raw_cached_fixtures)
            #not necessary in the grand scheme of things - we could really just use raw JSON
            return [FixtureDataItem.wrap(x) for x in json_data]
        except Exception as ex:
            logging.error("Error loading json from cache key %s: %s" % (PACT_PROVIDERS_FIXTURE_CACHE_KEY, ex))
            return []

#    cache.set('%s_casedoc' % self._id, json.dumps(self._case), PACT_CACHE_TIMEOUT)
#        xml_ret = cache.get('%s_schedule_xml' % self._id, None)
        pass
Beispiel #2
0
def get_all_providers(invalidate=False):
    """
    wrapper function to get all the providers for PACT and cache them.
    ugly for now - the number of entries is small enough that loading all and scanning on checking is small enough overhead on a single page load.
    """
    if invalidate:
        cache.delete(PACT_PROVIDERS_FIXTURE_CACHE_KEY)
    raw_cached_fixtures = cache.get(PACT_PROVIDERS_FIXTURE_CACHE_KEY, None)
    if raw_cached_fixtures is None:
        #requery and cache
        pact_hp_group = Group.by_name(PACT_DOMAIN, PACT_HP_GROUPNAME)
        providers = FixtureDataItem.by_group(pact_hp_group)
        cache.set(PACT_PROVIDERS_FIXTURE_CACHE_KEY, json.dumps([x.to_json() for x in providers]))
        return providers
    else:
        try:
            json_data = json.loads(raw_cached_fixtures)
            #not necessary in the grand scheme of things - we could really just use raw JSON
            return [FixtureDataItem.wrap(x) for x in json_data]
        except Exception, ex:
            logging.error("Error loading json from cache key %s: %s" % (PACT_PROVIDERS_FIXTURE_CACHE_KEY, ex))
            return []

#    cache.set('%s_casedoc' % self._id, json.dumps(self._case), PACT_CACHE_TIMEOUT)
#        xml_ret = cache.get('%s_schedule_xml' % self._id, None)
        pass
Beispiel #3
0
def iter_fixture_items_for_data_type(domain, data_type_id):
    from corehq.apps.fixtures.models import FixtureDataItem
    for row in paginate_view(FixtureDataItem.get_db(),
                             'fixtures/data_items_by_domain_type',
                             chunk_size=1000,
                             startkey=[domain, data_type_id],
                             endkey=[domain, data_type_id, {}],
                             reduce=False,
                             include_docs=True):
        yield FixtureDataItem.wrap(row['doc'])
Beispiel #4
0
def update_fixture(domain_link, tag):
    if domain_link.is_remote:
        master_results = remote_fixture(domain_link, tag)
    else:
        master_results = local_fixture(domain_link.master_domain, tag)

    master_data_type = master_results["data_type"]
    if not master_data_type.is_global:
        raise UnsupportedActionError(
            _("Found non-global lookup table '{}'.").format(
                master_data_type.tag))

    # Update data type
    master_data_type = master_data_type.to_json()
    del master_data_type["_id"]
    del master_data_type["_rev"]

    linked_data_type = get_fixture_data_type_by_tag(domain_link.linked_domain,
                                                    master_data_type["tag"])
    if linked_data_type:
        linked_data_type = linked_data_type.to_json()
    else:
        linked_data_type = {}
    linked_data_type.update(master_data_type)
    linked_data_type["domain"] = domain_link.linked_domain
    linked_data_type = FixtureDataType.wrap(linked_data_type)
    linked_data_type.save()
    clear_fixture_quickcache(domain_link.linked_domain, [linked_data_type])

    # Re-create relevant data items
    delete_fixture_items_for_data_type(domain_link.linked_domain,
                                       linked_data_type._id)
    for master_item in master_results["data_items"]:
        doc = master_item.to_json()
        del doc["_id"]
        del doc["_rev"]
        doc["domain"] = domain_link.linked_domain
        doc["data_type_id"] = linked_data_type._id
        FixtureDataItem.wrap(doc).save()

    clear_fixture_cache(domain_link.linked_domain)
Beispiel #5
0
 def _get_fixture_element(self, data_type, user_id, items):
     attrib = {'id': ':'.join((self.id, data_type.tag)), 'user_id': user_id}
     if data_type.is_indexed:
         attrib['indexed'] = 'true'
     fixture_element = ElementTree.Element('fixture', attrib)
     item_list_element = ElementTree.Element('%s_list' % data_type.tag)
     fixture_element.append(item_list_element)
     for item in items:
         try:
             xml = self.to_xml(item)
         except KeyError:
             # catch docs missed in prior lazy migrations
             xml = self.to_xml(FixtureDataItem.wrap(item).to_json())
         item_list_element.append(xml)
     return fixture_element