def save(params, chunk, chunks_info): global collection_name, column_keymap; upload_date = fn.getNestedElement(params, 'date'); data = File.readChunkData(chunk); dbManager = SharedMemoryManager.getInstance(); db = dbManager.query(); current_index = fn.getNestedElement(chunks_info, 'current', 0); total_index = fn.getNestedElement(chunks_info, 'total', len(data)); total_length = len(data); queue_info = chunks_info['queue'] # Logger.v('Running Index:', chunks_info['queue']['running']); chunks_info['queue']['current']+=1; # Logger.v('Saving from... {0}/{1}, current package: {2}'.format(current_index, total_index, total_length) ); fn.printProgressBar(queue_info['current'], queue_info['total'], 'Processing Chunk Insertion'); for idx in range(0, total_length): row = data[idx]; # Logger.v('row', row); obj_ = transformToLowercase(row); date_only = obj_['approved_date'].split(' ')[0]; # Logger.v('date_only', date_only); obj_.update({ 'approved_year_month': DateTime.getDateCategoryName(date=date_only, element='year_month_digit'), 'upload_date': upload_date, }); dbManager.addBulkInsert(collection_name, obj_, batch=True); ModelSIIntegrity.update(data=obj_); retrieveIssueOption(obj_); #ensure all data is save properly dbManager.executeBulkOperations(collection_name); return chunks_info;
def getMissingDates(data): dbManager = SharedMemoryManager.getInstance(); db = dbManager.query(); missing_dates = {}; today = DateTime.now(tzinfo=msia_tz); # date only state_by = 'state_code'; states = list(db['state'].find({},{'_id': 0, state_by: 1})); current_year = DateTime.getDateCategoryName(date=DateTime.now(tzinfo=msia_tz), element='year'); for rk in data: row = data[rk]; if rk not in missing_dates: missing_dates[rk] = []; dates = groupDates(params={'states': states, 'state_by': state_by}, data=row); for date in dates['missing']: end_date_of_month = DateTime.getDaysAgo(days_to_crawl=1, datefrom=DateTime.getNextMonth(DateTime.convertDateTimeFromString(date))); day_diff = DateTime.getDifferenceBetweenDuration([today, end_date_of_month]); if day_diff >= 0: date_str = DateTime.toString(today); else: date_str = DateTime.toString(end_date_of_month); if date_str not in dates['crawled']: missing_dates[rk].append(date_str); # Logger.v('day_diff', day_diff); # Logger.v('date', DateTime.getDaysAgo(days_to_crawl=1, datefrom=DateTime.getNextMonth(DateTime.convertDateTimeFromString(ed)))); missing_dates[rk] = sorted(list(set(missing_dates[rk])), reverse=True); return missing_dates;
def getMonthRange(params): start_month = fn.getNestedElement(params, 'start_month'); number_of_month = fn.getNestedElement(params, 'number_of_month', 1); month_range = [start_month]; new_month = '{0}-01'.format(start_month); for month_count in range(0, number_of_month - 1): # included start_month, so total month less 1 new_month = DateTime.getNextMonth(DateTime.convertDateTimeFromString(new_month)); year_month = DateTime.getDateCategoryName(new_month, element='year_month_digit'); month_range.append(year_month); return month_range;
def extractYear(data): year = [] for past_duration in data: # Logger.v('len(past_duration)', len(past_duration)) for idx in range(len(past_duration) - 1, -1, -1): pd = past_duration[idx] # Logger.v('pd', pd); y = DateTime.getDateCategoryName(date=pd, element='year', offset=8) if y not in year: year.append(y) return year
def generateExcelStructure(params, data): result = [] metadata = {} group_by_key = fn.getNestedElement(params, 'group_by_key') column_to_add = { 'all': [ 'state_name', 'drug_nondrug_name', 'code', 'packaging', 'quantity_by_month', 'sku' ], 'state': [ 'facility_name', 'drug_nondrug_name', 'code', 'packaging', 'quantity_by_month', 'sku' ], 'facility': [ 'requester_group_name', 'drug_nondrug_name', 'code', 'packaging', 'quantity_by_month', 'sku' ], 'requester': [ 'drug_nondrug_name', 'code', 'packaging', 'batch_no', 'expiry_date', 'quantity_by_month', 'sku' ], } name_mapping = { 'state_name': 'state', 'facility_name': 'facility', 'requester_group_name': 'requester group', 'code': 'item code', 'drug_nondrug_name': 'drug/non-drug name', 'sku': 'sku', 'quantity': 'quantity', 'batch_no': 'batch no', 'expiry_date': 'expiry date', 'packaging': 'packaging description', } new_data = [] # Logger.v('asd', data); for idx in range(0, len(data)): row = data[idx] for drug_code in row['summary']['detail']: new_data += row['summary']['detail'][drug_code] for idx in range(0, len(new_data)): row = new_data[idx] # Logger.v('row', row) state_name = row['id'].split('|')[0].replace('_', ' ') obj_ = {} for col in column_to_add[group_by_key]: if col == 'state_name': obj_.update({ name_mapping[col]: state_name, }) elif col == 'quantity_by_month': for month in row[col]: date = '{0}-01'.format(month) month_string = DateTime.getDateCategoryName( date=date, element='month')[:3].lower() # Logger.v('month_string', month_string); key_name = 'quantity ({0})'.format(month_string) obj_.update({ key_name: row[col][month], }) else: obj_.update({ name_mapping[col]: row[col], }) # Logger.v('group_by_key', group_by_key); result.append(obj_) metadata = generateExportMeta(params=params, data={ 'state_name': state_name, 'row': row }) # Logger.v('result', result); # Logger.v('metadata', metadata); return { 'data': result, 'metadata': metadata, }