def handle_fixture_location_update(sender, doc, diff, backend, **kwargs): if doc.get('doc_type') == 'XFormInstance' and doc.get( 'domain') in M4CHANGE_DOMAINS: xform = XFormInstance.wrap(doc) if hasattr(xform, "xmlns") and xform.xmlns in ALL_M4CHANGE_FORMS: location_id = xform.form.get("location_id", None) if not location_id: return client = get_redis_client() redis_key = REDIS_FIXTURE_KEYS[xform.domain] redis_lock_key = REDIS_FIXTURE_LOCK_KEYS[xform.domain] lock = get_redis_lock(redis_lock_key, timeout=5, name=redis_lock_key) if lock.acquire(blocking=True): try: location_ids_str = client.get(redis_key) location_ids = [] if location_ids_str: location_ids = json.loads(location_ids_str) if location_id not in location_ids: location_ids.append(location_id) client.set(redis_key, json.dumps(location_ids)) finally: release_lock(lock, True)
def _store_excel_in_redis(file): hash_id = uuid.uuid4().hex r = get_redis_client() r.set(hash_id, file.getvalue()) r.expire(hash_id, EXPIRE_TIME) return hash_id
def zip_folder(pdf_files): zip_hash = uuid.uuid4().hex client = get_redis_client() in_memory = cStringIO() zip_file = zipfile.ZipFile(in_memory, 'w', zipfile.ZIP_DEFLATED) for pdf_file in pdf_files: file = client.get(pdf_file['uuid']) zip_file.writestr('ICDS_CAS_monthly_register_{}.pdf'.format(pdf_file['location_name']), file) zip_file.close() client.set(zip_hash, in_memory.getvalue()) client.expire(zip_hash, 24 * 60 * 60) return zip_hash
def _store_excel_in_redis(file): hash_id = uuid.uuid4().hex tmp = NamedTemporaryFile(delete=False) tmp.file.write(file.getvalue()) r = get_redis_client() r.set(hash_id, tmp.name) r.expire(hash_id, EXPIRE_TIME) _remove_temp_file.apply_async(args=[tmp.name], countdown=EXPIRE_TIME) return hash_id
def passes_trial_check(msg): if msg.domain and domain_is_on_trial(msg.domain): with CriticalSection(['check-sms-sent-on-trial-for-%s' % msg.domain], timeout=60): key = 'sms-sent-on-trial-for-%s' % msg.domain expiry = 90 * 24 * 60 * 60 client = get_redis_client() value = client.get(key) or 0 if value >= MAX_TRIAL_SMS: msg.set_system_error(SMS.ERROR_TRIAL_SMS_EXCEEDED) return False client.set(key, value + 1, timeout=expiry) return True
def create_pdf_file(pdf_hash, pdf_context): template = get_template( "icds_reports/icds_app/pdf/issnip_monthly_register.html") resultFile = cStringIO() client = get_redis_client() try: pdf_page = template.render(pdf_context) except Exception as ex: pdf_page = str(ex) pisa.CreatePDF(pdf_page, dest=resultFile, show_error_as_pdf=True) client.set(pdf_hash, resultFile.getvalue()) client.expire(pdf_hash, 24 * 60 * 60) resultFile.close() return pdf_hash
def __init__(self, domain_object=None): self.domain_object = domain_object if domain_object: self.date = ServerTime(datetime.utcnow()).user_time( domain_object.get_default_timezone()).done().date() else: self.date = datetime.utcnow().date() self.key = 'outbound-daily-count-for-%s-%s' % ( domain_object.name if domain_object else '', self.date.strftime('%Y-%m-%d')) # We need access to the raw redis client because calling incr on # a django_redis RedisCache object raises an error if the key # doesn't exist. self.client = get_redis_client().client.get_client()
def __init__(self, domain_object=None): self.domain_object = domain_object if domain_object: self.date = ServerTime(datetime.utcnow()).user_time(domain_object.get_default_timezone()).done().date() else: self.date = datetime.utcnow().date() self.key = 'outbound-daily-count-for-%s-%s' % ( domain_object.name if domain_object else '', self.date.strftime('%Y-%m-%d') ) # We need access to the raw redis client because calling incr on # a django_redis RedisCache object raises an error if the key # doesn't exist. self.client = get_redis_client().client.get_client()
def print_response(self): """ Returns the report for printing. """ self.is_rendered_as_email = True self.use_datatables = False self.override_template = "opm/met_print_report.html" self.update_report_context() cache = get_redis_client() value = cache.get(self.redis_key) if value is not None: rows = pickle.loads(value) else: rows = self.rows """ Strip user_id and owner_id columns """ for row in rows: with localize('hin'): row[self.column_index('readable_status')] = _( row[self.column_index('readable_status')]) row[self.column_index('cash_received_last_month')] = _( row[self.column_index('cash_received_last_month')]) del row[self.column_index('closed_date')] del row[self.column_index('case_id')] link_text = re.search('<a href=.*>(.*)</a>', row[self.column_index('name')]) if link_text: row[self.column_index('name')] = link_text.group(1) rows.sort(key=lambda r: r[self.column_index('serial_number')]) total_row = self.total_row with localize('hin'): total_row[0] = _(total_row[0]) rows.append(total_row) self.context['report_table'].update(rows=rows) rendered_report = render_to_string(self.template_report, self.context, context_instance=RequestContext( self.request)) return HttpResponse(rendered_report)
def print_response(self): """ Returns the report for printing. """ self.is_rendered_as_email = True self.use_datatables = False self.override_template = "opm/met_print_report.html" self.update_report_context() cache = get_redis_client() value = cache.get(self.redis_key) if value is not None: rows = pickle.loads(value) else: rows = self.rows """ Strip user_id and owner_id columns """ for row in rows: with localize('hin'): row[self.column_index('readable_status')] = _(row[self.column_index('readable_status')]) row[self.column_index('cash_received_last_month')] = _(row[self.column_index( 'cash_received_last_month')]) del row[self.column_index('closed_date')] del row[self.column_index('case_id')] link_text = re.search('<a href=.*>(.*)</a>', row[self.column_index('name')]) if link_text: row[self.column_index('name')] = link_text.group(1) rows.sort(key=lambda r: r[self.column_index('serial_number')]) total_row = self.total_row with localize('hin'): total_row[0] = _(total_row[0]) rows.append(total_row) self.context['report_table'].update( rows=rows ) rendered_report = render_to_string(self.template_report, self.context, context_instance=RequestContext(self.request)) return HttpResponse(rendered_report)
def generate_fixtures_for_locations(): client = get_redis_client() start_date, end_date = get_last_n_months(1)[0] db = FixtureReportResult.get_db() data_source = M4ChangeReportDataSource() for domain in M4CHANGE_DOMAINS: redis_key = REDIS_FIXTURE_KEYS[domain] redis_lock_key = REDIS_FIXTURE_LOCK_KEYS[domain] lock = client.lock(redis_lock_key, timeout=5) location_ids = [] if lock.acquire(blocking=True): try: location_ids_str = client.get(redis_key) location_ids = json.loads( location_ids_str if location_ids_str else "[]") client.set(redis_key, '[]') finally: lock.release() for location_id in location_ids: data_source.configure( config={ "startdate": start_date, "enddate": end_date, "location_id": location_id, "domain": domain }) report_data = data_source.get_data() for report_slug in report_data: # Remove cached fixture docs db.delete_docs( FixtureReportResult.all_by_composite_key( domain, location_id, start_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"), report_slug)) rows = dict(report_data[report_slug].get("data", [])) name = report_data[report_slug].get("name") FixtureReportResult.save_result(domain, location_id, start_date.date(), end_date.date(), report_slug, rows, name)
def generate_fixtures_for_locations(): client = get_redis_client() start_date, end_date = get_last_n_months(1)[0] db = FixtureReportResult.get_db() data_source = M4ChangeReportDataSource() for domain in M4CHANGE_DOMAINS: redis_key = REDIS_FIXTURE_KEYS[domain] redis_lock_key = REDIS_FIXTURE_LOCK_KEYS[domain] lock = get_redis_lock(redis_lock_key, timeout=5, name=redis_lock_key) location_ids = [] if lock.acquire(blocking=True): try: location_ids_str = client.get(redis_key) location_ids = json.loads(location_ids_str if location_ids_str else "[]") client.set(redis_key, '[]') finally: release_lock(lock, True) for location_id in location_ids: data_source.configure(config={ "startdate": start_date, "enddate": end_date, "location_id": location_id, "domain": domain }) report_data = data_source.get_data() for report_slug in report_data: # Remove cached fixture docs db.delete_docs( FixtureReportResult.all_by_composite_key( domain, location_id, json_format_date(start_date), json_format_date(end_date), report_slug) ) rows = dict(report_data[report_slug].get("data", [])) name = report_data[report_slug].get("name") FixtureReportResult.save_result(domain, location_id, start_date.date(), end_date.date(), report_slug, rows, name)
def handle_fixture_location_update(sender, doc, diff, backend, **kwargs): if doc.get('doc_type') == 'XFormInstance' and doc.get('domain') in M4CHANGE_DOMAINS: xform = XFormInstance.wrap(doc) if hasattr(xform, "xmlns") and xform.xmlns in ALL_M4CHANGE_FORMS: location_id = xform.form.get("location_id", None) if not location_id: return client = get_redis_client() redis_key = REDIS_FIXTURE_KEYS[xform.domain] redis_lock_key = REDIS_FIXTURE_LOCK_KEYS[xform.domain] lock = get_redis_lock(redis_lock_key, timeout=5, name=redis_lock_key) if lock.acquire(blocking=True): try: location_ids_str = client.get(redis_key) location_ids = [] if location_ids_str: location_ids = json.loads(location_ids_str) if location_id not in location_ids: location_ids.append(location_id) client.set(redis_key, json.dumps(location_ids)) finally: release_lock(lock, True)
def _store_rows_in_redis(self, rows): r = get_redis_client() r.set(self.redis_key, pickle.dumps(rows)) r.expire(self.slug, 60 * 60)
def __init__(self, config): self.config = config self._client = get_redis_client().client.get_client() self._key = get_redis_key_for_config(config)