Пример #1
0
    def update_tcms_from_save(self, json_data, save_counter):
        # Check save_counter vs queued_save_counters.
        queued_save_counter = get_cache_raw(self.QUEUED_TCM_SAVE_COUNTER_KEY, None)
        if not queued_save_counter or save_counter > queued_save_counter:
            if not get_cache_state(self.UPDATING_TCMS_KEY, None):
                set_cache_state(self.UPDATING_TCMS_KEY, True)
                for d in json.loads(json_data):

                    tcm = TableColumnMapping.objects.get(pk=d["pk"])
                    for field_name in TableColumnMapping.fields_to_save:
                        if not field_name == "pk":
                            setattr(tcm, field_name, d[field_name])
                    tcm.was_a_human_decision = True
                    tcm.save()

                if get_cache_raw(self.QUEUED_TCM_SAVE_COUNTER_KEY, False) is not False:
                    queued_data = get_cache_raw(self.QUEUED_TCM_DATA_KEY)
                    queued_time = get_cache_raw(self.QUEUED_TCM_SAVE_COUNTER_KEY)
                    delete_cache(self.QUEUED_TCM_DATA_KEY)
                    delete_cache(self.QUEUED_TCM_SAVE_COUNTER_KEY)
                    delete_cache(self.UPDATING_TCMS_KEY)
                    self.update_tcms_from_save(queued_data, queued_time)

                delete_cache(self.UPDATING_TCMS_KEY)
                delete_cache(self.QUEUED_TCM_DATA_KEY)
                delete_cache(self.QUEUED_TCM_SAVE_COUNTER_KEY)
                return True

            else:
                set_cache_raw(self.QUEUED_TCM_SAVE_COUNTER_KEY, save_counter)
                set_cache_raw(self.QUEUED_TCM_DATA_KEY, json_data)
        return False
Пример #2
0
    def worksheet_progress_json(self):
        progresses = []
        some_file_has_mapping_active = not get_cache_state(self.MAPPING_ACTIVE_KEY, False)
        try:
            for f in self.files:
                progresses.append(
                    {
                        "pk": f.pk,
                        "filename": f.filename_only,
                        "delete_url": reverse("%s:delete_file" % self.app_namespace, args=(f.pk,)),
                        "mapping_url": reverse("%s:mapping" % self.app_namespace, args=(f.pk,)),
                        "cleaning_url": reverse("%s:cleaning" % self.app_namespace, args=(f.pk,)),
                        "matching_url": reverse("%s:matching" % self.app_namespace, args=(f.pk,)),
                        "num_columns": f.num_columns,
                        "num_rows": f.num_rows,
                        "num_mapping_complete": f.num_mapping_complete,
                        "num_mapping_total": f.num_mapping_total,
                        "num_mapping_remaining": f.num_mapping_remaining,
                        "mapping_active": f.mapping_active,
                        "some_file_has_mapping_active": some_file_has_mapping_active,
                        "coercion_mapping_active": f.coercion_mapping_active,
                        "cleaning_progress_pct": round(f.cleaning_progress_pct, 1),
                        "num_cleaning_remaining": f.num_cleaning_remaining,
                        "num_cleaning_complete": f.num_cleaning_complete,
                        "num_cleaning_total": f.num_cleaning_total,
                        "export_ready": f.export_ready,
                        "export_generation_pct_complete": int(round(f.export_generation_pct_complete)),
                        "export_url": f.export_url,
                        "worksheet_url": self.worksheet_url,
                        "generate_url": f.generate_url,
                        "premerge_progress_url": f.premerge_progress_url,
                        "merge_progress_url": f.merge_progress_url,
                        "force_restart_cleaning_url": f.force_restart_cleaning_url,
                        "is_espm": f.is_espm,
                    }
                )
        except:
            from traceback import print_exc

            print_exc()
        return json.dumps(progresses)
Пример #3
0
 def worksheet_progress_json(self):
     progresses = []
     some_file_has_mapping_active = not get_cache_state(self.MAPPING_ACTIVE_KEY, False)
     try:
         for f in self.files:
             progresses.append({
                 'pk': f.pk,
                 'filename': f.filename_only,
                 'delete_url': reverse("%s:delete_file" % self.app_namespace, args=(f.pk,)),
                 'mapping_url': reverse("%s:mapping" % self.app_namespace, args=(f.pk,)),
                 'cleaning_url': reverse("%s:cleaning" % self.app_namespace, args=(f.pk,)),
                 'matching_url': reverse("%s:matching" % self.app_namespace, args=(f.pk,)),
                 'num_columns': f.num_columns,
                 'num_rows': f.num_rows,
                 'num_mapping_complete': f.num_mapping_complete,
                 'num_mapping_total': f.num_mapping_total,
                 'num_mapping_remaining': f.num_mapping_remaining,
                 'mapping_active': f.mapping_active,
                 'some_file_has_mapping_active': some_file_has_mapping_active,
                 'coercion_mapping_active': f.coercion_mapping_active,
                 'cleaning_progress_pct': round(f.cleaning_progress_pct, 1),
                 'num_cleaning_remaining': f.num_cleaning_remaining,
                 'num_cleaning_complete': f.num_cleaning_complete,
                 'num_cleaning_total': f.num_cleaning_total,
                 'export_ready': f.export_ready,
                 'export_generation_pct_complete': int(round(f.export_generation_pct_complete)),
                 'export_url': f.export_url,
                 'worksheet_url': self.worksheet_url,
                 'generate_url': f.generate_url,
                 'premerge_progress_url': f.premerge_progress_url,
                 'merge_progress_url': f.merge_progress_url,
                 'force_restart_cleaning_url': f.force_restart_cleaning_url,
                 'is_espm': f.is_espm,
             })
     except:
         from traceback import print_exc
         print_exc()
     return json.dumps(progresses)
Пример #4
0
 def summary_analysis_queued(self):
     return get_cache_state(self.__class__.SUMMARY_ANALYSIS_QUEUED_KEY(self.pk), False)
Пример #5
0
 def summary_analysis_active(self):
     return get_cache_state(self.__class__.SUMMARY_ANALYSIS_ACTIVE_KEY(self.pk), False)
Пример #6
0
 def export_generation_pct_complete(self):
     return get_cache_state(self.EXPORT_PCT_COMPLETE_CACHE_KEY, False)
Пример #7
0
 def export_ready(self):
     return get_cache_state(self.EXPORT_READY_CACHE_KEY,
                            True) and self.export_file is not None and self.export_file != ""
Пример #8
0
 def coercion_mapping_queued(self):
     return get_cache_state(self.CLEANING_QUEUED_CACHE_KEY, False)
Пример #9
0
 def coercion_mapping_active(self):
     return get_cache_state(self.CLEANING_ACTIVE_CACHE_KEY, False)
Пример #10
0
 def export_ready(self):
     return get_cache_state(
         self.EXPORT_READY_CACHE_KEY,
         True) and self.export_file is not None and self.export_file != ""
Пример #11
0
 def coercion_mapping_queued(self):
     return get_cache_state(self.CLEANING_QUEUED_CACHE_KEY, False)
Пример #12
0
 def coercion_mapping_active(self):
     return get_cache_state(self.CLEANING_ACTIVE_CACHE_KEY, False)
Пример #13
0
 def worksheet_progress_json(self):
     progresses = []
     some_file_has_mapping_active = not get_cache_state(
         self.MAPPING_ACTIVE_KEY, False)
     try:
         for f in self.files:
             progresses.append({
                 'pk':
                 f.pk,
                 'filename':
                 f.filename_only,
                 'delete_url':
                 reverse('%s:delete_file' % self.app_namespace,
                         args=(f.pk, )),
                 'mapping_url':
                 reverse('%s:mapping' % self.app_namespace, args=(f.pk, )),
                 'cleaning_url':
                 reverse('%s:cleaning' % self.app_namespace, args=(f.pk, )),
                 'matching_url':
                 reverse('%s:matching' % self.app_namespace, args=(f.pk, )),
                 'num_columns':
                 f.num_columns,
                 'num_rows':
                 f.num_rows,
                 'num_mapping_complete':
                 f.num_mapping_complete,
                 'num_mapping_total':
                 f.num_mapping_total,
                 'num_mapping_remaining':
                 f.num_mapping_remaining,
                 'mapping_active':
                 f.mapping_active,
                 'some_file_has_mapping_active':
                 some_file_has_mapping_active,
                 'coercion_mapping_active':
                 f.coercion_mapping_active,
                 'cleaning_progress_pct':
                 round(f.cleaning_progress_pct, 1),
                 'num_cleaning_remaining':
                 f.num_cleaning_remaining,
                 'num_cleaning_complete':
                 f.num_cleaning_complete,
                 'num_cleaning_total':
                 f.num_cleaning_total,
                 'export_ready':
                 f.export_ready,
                 'export_generation_pct_complete':
                 int(round(f.export_generation_pct_complete)),
                 'export_url':
                 f.export_url,
                 'worksheet_url':
                 self.worksheet_url,
                 'generate_url':
                 f.generate_url,
                 'premerge_progress_url':
                 f.premerge_progress_url,
                 'merge_progress_url':
                 f.merge_progress_url,
                 'force_restart_cleaning_url':
                 f.force_restart_cleaning_url,
             })
     except BaseException:
         from traceback import print_exc
         print_exc()
     return json.dumps(progresses)
Пример #14
0
 def summary_analysis_queued(self):
     return get_cache_state(
         self.__class__.SUMMARY_ANALYSIS_QUEUED_KEY(self.pk), False)
Пример #15
0
 def summary_analysis_active(self):
     return get_cache_state(
         self.__class__.SUMMARY_ANALYSIS_ACTIVE_KEY(self.pk), False)
Пример #16
0
 def export_generation_pct_complete(self):
     return get_cache_state(self.EXPORT_PCT_COMPLETE_CACHE_KEY, False)