Example #1
0
    def update_tcms_from_save(self, json_data, save_counter):
        # Check save_counter vs queued_save_counters.
        queued_save_counter = get_cache_raw(self.QUEUED_TCM_SAVE_COUNTER_KEY,
                                            None)
        if not queued_save_counter or save_counter > queued_save_counter:
            if not get_cache_state(self.UPDATING_TCMS_KEY, None):
                set_cache_state(self.UPDATING_TCMS_KEY, True)
                for d in json.loads(json_data):

                    tcm = TableColumnMapping.objects.get(pk=d["pk"])
                    for field_name in TableColumnMapping.fields_to_save:
                        if not field_name == "pk":
                            setattr(tcm, field_name, d[field_name])
                    tcm.was_a_human_decision = True
                    tcm.save()

                if get_cache_raw(self.QUEUED_TCM_SAVE_COUNTER_KEY,
                                 False) is not False:
                    queued_data = get_cache_raw(self.QUEUED_TCM_DATA_KEY)
                    queued_time = get_cache_raw(
                        self.QUEUED_TCM_SAVE_COUNTER_KEY)
                    delete_cache(self.QUEUED_TCM_DATA_KEY)
                    delete_cache(self.QUEUED_TCM_SAVE_COUNTER_KEY)
                    delete_cache(self.UPDATING_TCMS_KEY)
                    self.update_tcms_from_save(queued_data, queued_time)

                delete_cache(self.UPDATING_TCMS_KEY)
                delete_cache(self.QUEUED_TCM_DATA_KEY)
                delete_cache(self.QUEUED_TCM_SAVE_COUNTER_KEY)
                return True

            else:
                set_cache_raw(self.QUEUED_TCM_SAVE_COUNTER_KEY, save_counter)
                set_cache_raw(self.QUEUED_TCM_DATA_KEY, json_data)
        return False
Example #2
0
    def save_to_cache(self, identifier):
        """
        Save the results to the cache database. The data in the cache are
        stored as a list of dictionaries. The data in this class are stored as
        a dict of dict. This is important to remember because the data from the
        cache cannot be simply loaded into the above structure.

        :param identifier: Import file primary key
        :return: None
        """

        # change the format of the data in the cache. Make this a list of
        # objects instead of object of objects.
        existing_results = get_cache_raw(
            DataQualityCheck.cache_key(identifier)) or []

        l = []
        for key, value in self.results.items():
            l.append(value)

        existing_results += l

        z = sorted(existing_results, key=lambda k: k['id'])
        set_cache_raw(DataQualityCheck.cache_key(identifier), z,
                      86400)  # 24 hours
Example #3
0
    def update_tcms_from_save(self, json_data, save_counter):
        # Check save_counter vs queued_save_counters.
        queued_save_counter = get_cache_raw(self.QUEUED_TCM_SAVE_COUNTER_KEY, None)
        if not queued_save_counter or save_counter > queued_save_counter:
            if not get_cache_state(self.UPDATING_TCMS_KEY, None):
                set_cache_state(self.UPDATING_TCMS_KEY, True)
                for d in json.loads(json_data):

                    tcm = TableColumnMapping.objects.get(pk=d["pk"])
                    for field_name in TableColumnMapping.fields_to_save:
                        if not field_name == "pk":
                            setattr(tcm, field_name, d[field_name])
                    tcm.was_a_human_decision = True
                    tcm.save()

                if get_cache_raw(self.QUEUED_TCM_SAVE_COUNTER_KEY, False) is not False:
                    queued_data = get_cache_raw(self.QUEUED_TCM_DATA_KEY)
                    queued_time = get_cache_raw(self.QUEUED_TCM_SAVE_COUNTER_KEY)
                    delete_cache(self.QUEUED_TCM_DATA_KEY)
                    delete_cache(self.QUEUED_TCM_SAVE_COUNTER_KEY)
                    delete_cache(self.UPDATING_TCMS_KEY)
                    self.update_tcms_from_save(queued_data, queued_time)

                delete_cache(self.UPDATING_TCMS_KEY)
                delete_cache(self.QUEUED_TCM_DATA_KEY)
                delete_cache(self.QUEUED_TCM_SAVE_COUNTER_KEY)
                return True

            else:
                set_cache_raw(self.QUEUED_TCM_SAVE_COUNTER_KEY, save_counter)
                set_cache_raw(self.QUEUED_TCM_DATA_KEY, json_data)
        return False
Example #4
0
    def initialize_cache(file_pk):
        """
        Initialize the cache for storing the results. This is called before the
        celery tasks are chunked up.

        :param file_pk: Import file primary key
        :return:
        """
        set_cache_raw(Cleansing.cache_key(file_pk), [])
Example #5
0
    def initialize_cache(file_pk):
        """
        Initialize the cache for storing the results. This is called before the
        celery tasks are chunked up.

        :param file_pk: Import file primary key
        :return:
        """
        set_cache_raw(Cleansing.cache_key(file_pk), [])
Example #6
0
    def initialize_cache(identifier):
        """
        Initialize the cache for storing the results. This is called before the
        celery tasks are chunked up.

        :param identifier: Import file primary key
        :return: string, cache key
        """

        k = DataQualityCheck.cache_key(identifier)
        set_cache_raw(k, [])
        return k
Example #7
0
    def initialize_cache(identifier=None):
        """
        Initialize the cache for storing the results. This is called before the
        celery tasks are chunked up.

        :param identifier: Identifier for cache, if None, then creates a random one
        :return: string, cache key
        """
        if identifier is None:
            identifier = randint(100, 100000)
        cache_key = DataQualityCheck.cache_key(identifier)
        set_cache_raw(cache_key, [])
        return cache_key
Example #8
0
    def _start_whole_org_match_merge_link(self, org_id, state_class_name, proposed_columns=[]):
        identifier = randint(100, 100000)
        result_key = _get_match_merge_link_key(identifier)
        set_cache_raw(result_key, {})

        progress_data = ProgressData(func_name='org_match_merge_link', unique_id=identifier)
        progress_data.delete()

        whole_org_match_merge_link.apply_async(
            args=(org_id, state_class_name, proposed_columns),
            link=cache_match_merge_link_result.s(identifier, progress_data.key)
        )

        return progress_data.key
Example #9
0
    def initialize_cache(identifier=None):
        """
        Initialize the cache for storing the results. This is called before the
        celery tasks are chunked up.

        The cache_key is different than the indentifier. The cache_key is where all the results are
        to be stored for the data quality checks, the identifier, is the random number (or specified
        value that is used to identifier both the progress and the data storage

        :param identifier: Identifier for cache, if None, then creates a random one
        :return: list, [cache_key and the identifier]
        """
        if identifier is None:
            identifier = randint(100, 100000)
        cache_key = DataQualityCheck.cache_key(identifier)
        set_cache_raw(cache_key, [])
        return cache_key, identifier
Example #10
0
    def save_to_cache(self, file_pk):
        """
        Save the results to the cache database. The data in the cache are
        stored as a list of dictionaries. The data in this class are stored as
        a dict of dict. This is important to remember because the data from the
        cache cannot be simply loaded into the above structure.

        :param file_pk: Import file primary key
        :return: None
        """

        # change the format of the data in the cache. Make this a list of
        # objects instead of object of objects.
        existing_results = get_cache_raw(Cleansing.cache_key(file_pk))

        l = []
        for key, value in self.results.iteritems():
            l.append(value)

        existing_results = existing_results + l

        z = sorted(existing_results, key=lambda k: k['id'])
        set_cache_raw(Cleansing.cache_key(file_pk), z, 3600)  # save the results for 1 hour
Example #11
0
    def save_to_cache(self, file_pk):
        """
        Save the results to the cache database. The data in the cache are
        stored as a list of dictionaries. The data in this class are stored as
        a dict of dict. This is important to remember because the data from the
        cache cannot be simply loaded into the above structure.

        :param file_pk: Import file primary key
        :return: None
        """

        # change the format of the data in the cache. Make this a list of
        # objects instead of object of objects.
        existing_results = get_cache_raw(Cleansing.cache_key(file_pk))

        l = []
        for key, value in self.results.iteritems():
            l.append(value)

        existing_results = existing_results + l

        z = sorted(existing_results, key=lambda k: k["id"])
        set_cache_raw(Cleansing.cache_key(file_pk), z, 3600)  # save the results for 1 hour
Example #12
0
 def _row_cb(i):
     set_cache_raw("export_buildings__%s" % export_id, i)
Example #13
0
def remove_buildings(project_slug, project_dict, user_pk):
    """adds buildings to a project. if a user has selected all buildings,
       then the the search parameters within project_dict are used to determine
       the total set of buildings.

       :param str project_slug: a project's slug used to get the project
       :param dict project_dict: contains search params, and browser state
           information
       :user_pk int or str: the user's pk or id
    """
    project = Project.objects.get(slug=project_slug)
    user = User.objects.get(pk=user_pk)
    project.last_modified_by = user
    project.save()

    selected_buildings = project_dict.get('selected_buildings', [])

    set_cache_raw(
        project.removing_buildings_status_percentage_cache_key,
        {'percentage_done': 0, 'numerator': 0, 'denominator': 0}
    )
    i = 0
    denominator = 1
    if not project_dict.get('select_all_checkbox', False):
        for sfid in selected_buildings:
            i += 1
            denominator = len(selected_buildings)
            set_cache_raw(
                project.removing_buildings_status_percentage_cache_key,
                {
                    'percentage_done': (
                        float(i) / max(len(selected_buildings), 1) * 100
                    ),
                    'numerator': i,
                    'denominator': denominator
                }
            )
            ab = BuildingSnapshot.objects.get(pk=sfid)
            ProjectBuilding.objects.get(
                project=project, building_snapshot=ab
            ).delete()
    else:
        query_buildings = get_search_query(user, project_dict)
        denominator = query_buildings.count() - len(selected_buildings)
        set_cache_raw(
            project.adding_buildings_status_percentage_cache_key,
            {
                'percentage_done': 10,
                'numerator': i,
                'denominator': denominator
            }
        )
        for b in query_buildings:
            ProjectBuilding.objects.get(
                project=project, building_snapshot=b
            ).delete()
        set_cache_raw(
            project.adding_buildings_status_percentage_cache_key,
            {
                'percentage_done': 50,
                'numerator': denominator - len(selected_buildings),
                'denominator': denominator
            }
        )
        for building in selected_buildings:
            i += 1
            ab = BuildingSnapshot.objects.get(source_facility_id=building)
            ProjectBuilding.objects.create(
                project=project, building_snapshot=ab
            )
            set_cache_raw(
                project.adding_buildings_status_percentage_cache_key,
                {
                    'percentage_done': (
                        float(denominator - len(selected_buildings) + i) /
                        denominator * 100
                    ),
                    'numerator': denominator - len(selected_buildings) + i,
                    'denominator': denominator
                }
            )

    set_cache_raw(
        project.removing_buildings_status_percentage_cache_key,
        {'percentage_done': 100, 'numerator': i, 'denominator': denominator}
    )
Example #14
0
def add_buildings(project_slug, project_dict, user_pk):
    """adds buildings to a project. if a user has selected all buildings,
       then the the search parameters within project_dict are used to determine
       the total set
       of buildings.
       also creates a Compliance inst. if satisfying params are present

       :param str project_slug: a project's slug used to get the project
       :param dict project_dict: contains search params, and browser state
       information
       :user_pk int or str: the user's pk or id

    """
    project = Project.objects.get(slug=project_slug)
    user = User.objects.get(pk=user_pk)
    project.last_modified_by = user
    project.save()

    selected_buildings = project_dict.get('selected_buildings', [])

    set_cache_raw(
        project.adding_buildings_status_percentage_cache_key,
        {'percentage_done': 0, 'numerator': 0, 'denominator': 0}
    )
    i = 0
    denominator = 1
    if not project_dict.get('select_all_checkbox', False):
        for sfid in selected_buildings:
            i += 1
            denominator = len(selected_buildings)
            try:
                set_cache_raw(
                    project.adding_buildings_status_percentage_cache_key,
                    {
                        'percentage_done': (
                            float(i) / len(selected_buildings) * 100
                        ),
                        'numerator': i, 'denominator': denominator
                    }
                )
            except ZeroDivisionError:
                pass
            ab = BuildingSnapshot.objects.get(pk=sfid)
            ProjectBuilding.objects.get_or_create(
                project=project, building_snapshot=ab
            )
    else:
        query_buildings = get_search_query(user, project_dict)
        denominator = query_buildings.count() - len(selected_buildings)
        set_cache_raw(
            project.adding_buildings_status_percentage_cache_key,
            {'percentage_done': 10, 'numerator': i, 'denominator': denominator}
        )
        i = 0
        for b in query_buildings:
            # todo: only get back query_buildings pks as a list, and create
            # using the pk,
            #       not the python object
            i += 1
            ProjectBuilding.objects.get_or_create(
                project=project, building_snapshot=b
            )
            set_cache_raw(
                project.adding_buildings_status_percentage_cache_key,
                {
                    'percentage_done': float(i) / denominator * 100,
                    'numerator': i, 'denominator': denominator
                }
            )
        for building in selected_buildings:
            i += 1
            project.building_snapshots.remove(
                BuildingSnapshot.objects.get(pk=building)
            )
            set_cache_raw(
                project.adding_buildings_status_percentage_cache_key,
                {
                    'percentage_done': (
                        float(denominator - len(selected_buildings) + i) /
                        denominator * 100
                    ),
                    'numerator': denominator - len(selected_buildings) + i,
                    'denominator': denominator
                }
            )

    set_cache_raw(
        project.adding_buildings_status_percentage_cache_key,
        {'percentage_done': 100, 'numerator': i, 'denominator': denominator}
    )

    deadline_date = project_dict.get('deadline_date')
    if isinstance(deadline_date, (int, float)):
        deadline_date = datetime.datetime.fromtimestamp(deadline_date / 1000)
    elif isinstance(deadline_date, basestring):
        deadline_date = parser.parse(deadline_date)
    else:
        deadline_date = None
    end_date = project_dict.get('end_date')
    if isinstance(end_date, (int, float)):
        end_date = datetime.datetime.fromtimestamp(end_date / 1000)
    elif isinstance(end_date, basestring):
        end_date = parser.parse(end_date)
    else:
        end_date = None
    if end_date:
        last_day_of_month = calendar.monthrange(
            end_date.year, end_date.month
        )[1]
        end_date = datetime.datetime(
            end_date.year, end_date.month, last_day_of_month
        )

    if project_dict.get('compliance_type'):
        compliance = Compliance.objects.create(
            compliance_type=project_dict.get('compliance_type'),
            end_date=end_date,
            deadline_date=deadline_date,
            project=project
        )
        compliance.save()
Example #15
0
def cache_match_merge_link_result(summary, identifier, progress_key):
    result_key = _get_match_merge_link_key(identifier)
    set_cache_raw(result_key, summary)

    progress_data = ProgressData.from_key(progress_key)
    progress_data.finish_with_success()