Esempio n. 1
0
def generateChildren(params, data):
    report_name = Report.getReportName(params)
    min_purchase_amount = fn.getNestedElement(params,
                                              'filter.min_purchase_amount', 0)
    if type(min_purchase_amount) == str and min_purchase_amount:
        min_purchase_amount = int(min_purchase_amount)
    children = []
    for d in data:
        # Logger.v('d', d);
        obj_ = {}
        # Logger.v('global_children_key',global_children_key)
        # Logger.v('report_name',report_name)
        for gck in global_children_key[report_name]:
            # Logger.v('gck', gck)
            value = fn.getNestedElement(d, gck)
            if value or value == 0:
                obj_[gck] = value
            else:
                if report_name == 'budget':
                    # Logger.v('Report.generateChildren: {0} not found, sum up after data cleaning process.'.format(gck));
                    if gck == 'total_allocation':
                        obj_[gck] = d['first_allocation'] + d[
                            'additional_allocation']
                    elif gck == 'balance_amount':
                        obj_[gck] = d['first_allocation'] + d[
                            'additional_allocation'] - d['pending_amount'] - d[
                                'liablity_amount'] - d['utilized_amount']

        # Logger.v('obj_', obj_);
        if report_name == 'procurement':
            if obj_['purchase_amount'] > min_purchase_amount:
                children.append(obj_)
        else:
            children.append(obj_)
    return children
Esempio n. 2
0
def update(data):
    global msia_tz, column_keymap, collection_name
    dbManager = SharedMemoryManager.getInstance()
    db = dbManager.query()
    state_facility_code = '_'.join(
        [str(data['state']), str(data['facility_code'])])
    if state_facility_code not in list(set(unique_facility)):
        state_name = fn.getNestedElement(data, 'state')
        state_code = fn.getNestedElement(data, 'state')
        facility_name = fn.getNestedElement(data, 'facility_name')
        facility_code = fn.getNestedElement(data, 'facility_code')
        date = fn.getNestedElement(data, 'date')
        date_string = DateTime.toString(date)
        values = {
            'state_name': state_name,
            'state_code': state_code,
            'facility_name': facility_name,
            'facility_code': facility_code,
            'state_updated_at': date_string,
            'facility_updated_at': date_string,
            'date': date_string,
        }
        dbManager.addBulkInsert(collection_name, values, batch=True)
        unique_facility.append(state_facility_code)
    dbManager.executeBulkOperations(collection_name)
Esempio n. 3
0
def insertNthChild(params, data, is_last=False):
    po = fn.getNestedElement(params, 'po.po')
    gk = fn.getNestedElement(params, 'po.gk')
    naming_keymap = fn.getNestedElement(params, 'po.naming_keymap')
    info = data[data[po] == gk]

    if is_last:
        obj_ = {
            'id': fn.convertToSnakecase(gk),
            'name': info['item_desc'].values[0],
            'code': info['item_code'].values[0],
            'quantity': int(info['available_quantity'].values[0]),
            'pku': round(float(info['pku'].values[0]), 2),
        }

    else:
        obj_ = {
            'id': fn.convertToSnakecase(gk),
            'name': info[naming_keymap[po]].unique().tolist()[0],
            'code': info[po].unique().tolist()[0],
        }

    result = {
        'obj_': obj_,
        'info': info,
    }
    return result
Esempio n. 4
0
def stockIntegrity(params, data):
    Debug = DebugManager.DebugManager()
    Debug.start()
    global msia_tz, date_retrieve_limit
    result = []
    today = DateTime.now(tzinfo=msia_tz)
    start_date = DateTime.getDaysAgo(date_retrieve_limit, datefrom=today)
    durations = DateTime.getBetween([start_date, today],
                                    element='date',
                                    offset=24)['order']
    # offset 24 hour to include today
    state_data = fn.getNestedElement(data, 'state')
    facility_data_by_state = fn.getNestedElement(data, 'state_facility')

    check_data = combinedFacilityList(data=facility_data_by_state)
    result = getIntegrity(params={
        'durations': durations,
    },
                          data={
                              'facility': facility_data_by_state,
                              'state': state_data,
                              'to_update': result,
                              'check_data': check_data,
                          })
    updateStateData(result)
    result = list(sorted(result, key=lambda k: k['name'], reverse=False))
    Debug.end()
    Debug.show('Model.Structure.stockIntegrity')
    return result
Esempio n. 5
0
def generateItemDetail(params, data):
    result = {}
    report_name = Report.getReportName(params)
    split_uk = fn.getNestedElement(params, 'split_uk')

    for sm in global_group_order[report_name][:len(split_uk)]:
        if sm == 'facility_type':
            smk = 'facility'
        else:
            smk = sm
        if sm == 'facility_type':
            id_ = '{0}_type'.format(smk)
            name_ = '{0}_type'.format(smk)
            name_2 = '{0}_type'.format(smk)
            code_ = '{0}_type'.format(smk)
        else:
            id_ = '{0}_seq_no'.format(smk)
            name_ = '{0}_name'.format(smk)
            name_2 = '{0}_desc'.format(smk)
            code_ = '{0}_code'.format(smk)

    try:
        result['name'] = data[0][name_]
    except Exception as e:
        result['name'] = fn.getNestedElement(
            data[0], name_2, fn.getNestedElement(data[0], code_))

    result['id'] = data[0][id_]
    result['code'] = data[0][code_]

    return result
Esempio n. 6
0
def upload(params):
	Debug = DebugManager.DebugManager();
	Debug.start();
	Debug.trace('start');
	dbManager = SharedMemoryManager.getInstance();
	db = dbManager.query();
	date = fn.getNestedElement(params, 'date');
	path = fn.getNestedElement(params, 'path');
	# url = fn.getNestedElement(params, 'callback_url'); # required params to handle callback_url
	paths, should_reset = ModelUpload.getPath(params);
	for idx in range(0, len(paths)):
		p = paths[idx];
		processed_filename = File.converExcelFileToCsv(p, ignore_index=True);
		Logger.v('processed_filename', processed_filename);
		Debug.trace('convert to json : path {0}'.format( processed_filename ) );
		if idx == 0 and should_reset: #reset once at the beginning
			Logger.v('Reset Database.');
			reset(date); #reset stock_issue collection
			ModelSIIntegrity.reset(date); #reset stock_issue_datalog by date given
		File.readCsvFileInChunks(processed_filename, save, params, chunksize=chunksize);
		Debug.trace('uploaded to mongo.');
	generateIndex();
	ModelSIIntegrity.generateIndex();
	Debug.trace('indexing mongo collection.');
	saveIssueOption();
	Debug.trace('save option to json.');
	trigger_params = copy.deepcopy(params);
	trigger_params['result'] = 'data count: {0}'.format(params['data_count'][path]);
	# Logger.v('trigger_params', trigger_params);
	dbManager.executeBulkOperations(None); # Insert all the remaining job at once.
	ReportStock.triggerOnComplete(trigger_params);
	Debug.trace('trigger api on complete.');
	Debug.end();
	Debug.show('Stock.upload');
Esempio n. 7
0
def save(params, chunk, chunks_info):
	global collection_name, column_keymap;
	upload_date = fn.getNestedElement(params, 'date');
	data = File.readChunkData(chunk);
	dbManager = SharedMemoryManager.getInstance();
	db = dbManager.query();
	current_index = fn.getNestedElement(chunks_info, 'current', 0);
	total_index = fn.getNestedElement(chunks_info, 'total', len(data));

	total_length = len(data);
	queue_info = chunks_info['queue']
	# Logger.v('Running Index:', chunks_info['queue']['running']);
	chunks_info['queue']['current']+=1;
	# Logger.v('Saving from... {0}/{1}, current package: {2}'.format(current_index, total_index, total_length) );
	fn.printProgressBar(queue_info['current'], queue_info['total'], 'Processing Chunk Insertion');
	for idx in range(0, total_length):
		row = data[idx];
		# Logger.v('row', row);
		obj_ = transformToLowercase(row);
		date_only = obj_['approved_date'].split(' ')[0];
		# Logger.v('date_only', date_only);
		obj_.update({
			'approved_year_month': DateTime.getDateCategoryName(date=date_only, element='year_month_digit'),
			'upload_date': upload_date,
		});
		dbManager.addBulkInsert(collection_name, obj_, batch=True);
		ModelSIIntegrity.update(data=obj_);
		retrieveIssueOption(obj_);
	#ensure all data is save properly
	dbManager.executeBulkOperations(collection_name);
	return chunks_info;
Esempio n. 8
0
def generateTemplate(params):
	result = {};
	report_keys = fn.getNestedElement(params, 'keys.report', ['procurement', 'budget']);
	first_date = fn.getNestedElement(params, 'first_date');
	last_date = fn.getNestedElement(params, 'last_date');
	state_by = fn.getNestedElement(params, 'state_by');
	states = fn.getNestedElement(params, 'states');
	today = DateTime.now(tzinfo=msia_tz); # date only
	for rk in report_keys:

		if rk not in result:
			result[rk] = {};
		
		for date in DateTime.getBetween([first_date, last_date], element='date')['order']:
			end_date_of_month = DateTime.getDaysAgo(days_to_crawl=1, datefrom=DateTime.getNextMonth(DateTime.convertDateTimeFromString(date)));
			year_month = date[:7];
			day_diff = DateTime.getDifferenceBetweenDuration([today, end_date_of_month]);

			if day_diff >= 0:
				date_str = DateTime.toString(today);
			else:
				date_str = DateTime.toString(end_date_of_month);

			if date_str not in result[rk]:
				result[rk][date_str] = {};

			result[rk][date_str].update({
				'date': date_str,
			})
			for idx in range(0, len(states)):
				state = states[idx][state_by];
				result[rk][date_str].update({
					state: 0,
				});
	return result;
Esempio n. 9
0
def recordCrawledFile(data):
	global global_check_data;
	path = fn.getNestedElement(data, 'path');
	report = fn.getNestedElement(data, 'report');
	split_path = path.split('/');
	file = split_path[-1];
	date = file.split('.')[0];
	year = date.split('-')[0];
	if file == '.json':
		os.remove(path);
		Logger.v('Removed', path);
	indexes = {
		'budget': {
			'year': -4,
			'state': -3,
			'all_facility': -2,
		},
		'procurement': {
			'year': -5,
			'state': -4,
			'all_facility': -2,
		},
	};
	state_idx = indexes[report]['state'];
	year_idx = indexes[report]['year'];
	all_facility_idx = indexes[report]['all_facility'];

	conditions = {
		'budget': split_path[year_idx] == 'year_{0}'.format(year),
		'procurement':  True,
	};
	
	if not file in ['.DS_Store', '.json'] and split_path[all_facility_idx] == 'all_facility' and conditions[report]:
		state_code = split_path[state_idx].replace('state_', '');
		global_check_data[report].append('_'.join([date, state_code]));
Esempio n. 10
0
def save(params, chunk, chunks_info):
    global latest_collection_name, history_collection_name

    data = File.readChunkData(chunk)
    dbManager = SharedMemoryManager.getInstance()
    db = dbManager.query()
    current_index = fn.getNestedElement(chunks_info, 'current', 0)
    total_index = fn.getNestedElement(chunks_info, 'total', len(data))

    date = fn.getNestedElement(params, 'date')
    datetime = DateTime.convertDateTimeFromString(date)
    total_length = len(data)
    queue_info = chunks_info['queue']
    # Logger.v('Running Index:', chunks_info['queue']['running']);
    chunks_info['queue']['current'] += 1
    # Logger.v('Saving from... {0}/{1}, current package: {2}'.format(current_index, total_index, total_length) );
    fn.printProgressBar(queue_info['current'], queue_info['total'],
                        'Processing Chunk Insertion')
    for idx in range(0, total_length):
        # insert stock_latest
        row = data[idx]
        obj_ = transformToLowercase(data=row, datetime=datetime)
        ModelStockIntegrity.update(data=obj_)
        dbManager.addBulkInsert(latest_collection_name, obj_, batch=True)
        # dbManager.addBulkInsert(history_collection_name, obj_, batch=True); # temporary off (need 7 day data only)

        # insert items
        # d = data[idx];
        ModelItem.saveItem(row)
        # fn.printProgressBar(current_index+idx, total_index, 'Processing Item Insertion');

    #ensure all data is save properly
    # dbManager.executeBulkOperations(history_collection_name); # temporary off (need 7 day data only)
    dbManager.executeBulkOperations(latest_collection_name)
    return chunks_info
Esempio n. 11
0
def get(params):
	drug_codes = fn.getNestedElement(params, 'drug_nondrug_code', []);
	state = fn.getNestedElement(params, 'state');
	requester_group = fn.getNestedElement(params, 'requester_group');
	issue_type = fn.getNestedElement(params, 'issue_type');
	dbManager = SharedMemoryManager.getInstance();
	db = dbManager.query();
	match_query = {
		'state': state.replace('_', ' '),
	};
	if drug_codes:
		match_query['drug_nondrug_code'] = {'$in': drug_codes};

	if not requester_group == 'all' and requester_group:
		match_query['requester_group_name'] = requester_group.replace('_', ' ');

	if not issue_type == 'all' and issue_type:
		match_query['issue_type'] = issue_type.replace('_', ' ');

	data = list(db[collection_name].aggregate([
		{
			'$match': match_query,
		},
		{
			'$project': {'_id': 0, 'inserted_at': 0, 'updated_at': 0}
		}
	]));
	data_length = len(data);
	# Logger.v('data length', data_length, data);
	return data;
Esempio n. 12
0
def getTotalCount(params, data):
    filter_key = fn.getNestedElement(params, 'filter_key')
    key = fn.getNestedElement(params, 'key')
    row = fn.getNestedElement(data, 'row')
    facility = fn.getNestedElement(data, 'facility')
    if facility:
        if filter_key:
            state_code = filter_key
            facility_code = key
            # use this after confirm facility file structure
            # count = len(list(filter(lambda x: x['state_code'] == state_code and x['facility_code'] == facility_code, facility)));
            #XXX Temporary use state_name as state_code, follow stock data
            count = len(
                list(
                    filter(
                        lambda x: x['state_name'] == state_code and x[
                            'facility_code'] == facility_code, facility)))
        else:
            state_code = key
            # use this after confirm facility file structure
            # count = len(list(filter(lambda x: x['state_code'] == state_code, facility)));
            #XXX Temporary use state_name as state_code, follow stock data
            count = len(
                list(filter(lambda x: x['state_name'] == state_code,
                            facility)))
    else:
        count = fn.getNestedElement(row, 'count', 0)
    return count
Esempio n. 13
0
def updateDropdownOptions(params):
    option_keys = fn.getNestedElement(params, 'keys.option', ['state'])
    today = fn.getNestedElement(
        params, 'schedule_params.today',
        DateTime.toString(DateTime.now(tzinfo=msia_tz)))
    data = {}
    crawled_data = {}
    # crawl from API URL (get options from API)
    # for key in keys:
    # 	url = api_links[key];
    # 	# url = generateUrl(api_links[key]);
    # 	response = requests.get(url);
    # 	json_response = json.loads(response.text);
    # 	Logger.v('json_response', json_response);
    # 	crawled_data[key] = json_response;
    # 	Logger.v('Crawled', url);
    # Logger.v('Done crawling.');
    # save(data);

    # read from file
    for key in option_keys:
        filename = api_files[key]
        crawled_data[key] = File.readJson(filename)

    # convert key to snakecase, value to lower
    for key in crawled_data:
        if key not in data:
            data[key] = []
        for idx in range(0, len(crawled_data[key])):
            row = crawled_data[key][idx]
            obj_ = {}
            for row_key in row:
                row_value = row[row_key]
                new_key = fn.camelToSnakecase(str=row_key)
                if type(row_value) == str:
                    new_value = row_value.lower()
                elif row_value is None:
                    new_value = 'null'
                else:
                    new_value = row_value
                obj_[new_key] = new_value

            data[key].append(obj_)

    for key in data:
        folder_path = '/'.join([crawl_folder, key])
        if not os.path.exists(folder_path):
            os.makedirs(folder_path)
        filename = '{0}/{1}'.format(folder_path, today)
        Logger.v('Saving', filename)
        fn.writeJSONFile(filename='{0}.json'.format(filename), data=data[key])

    for key in option_keys:
        directory = '/'.join([crawl_folder, key])
        raw = File.readLatestFile(directory=directory)
        refresh_collection = refreshIsRequired(data=raw, collection_name=key)
        if refresh_collection:
            refreshCollection(data=raw, collection_name=key)
            Logger.v('refreshed', key)
Esempio n. 14
0
def filterQuantity(params, data):
    filter_quantity = fn.getNestedElement(params, 'filter.quantity', None)
    main = fn.getNestedElement(data, 'main')
    sub = fn.getNestedElement(data, 'sub')
    qty = sub['quantity']
    can_append = ModelStock.quantityWithinRange(params=params, quantity=qty)
    if can_append or not filter_quantity:
        main.append(sub)
Esempio n. 15
0
def getMonthRange(params):
	start_month = fn.getNestedElement(params, 'start_month');
	number_of_month = fn.getNestedElement(params, 'number_of_month', 1);
	month_range = [start_month];
	new_month = '{0}-01'.format(start_month);
	for month_count in range(0, number_of_month - 1): # included start_month, so total month less 1
		new_month = DateTime.getNextMonth(DateTime.convertDateTimeFromString(new_month));
		year_month = DateTime.getDateCategoryName(new_month, element='year_month_digit');
		month_range.append(year_month);
	return month_range;
Esempio n. 16
0
def getCodeColumn(params):
    global extra_columns
    process_order = fn.getNestedElement(params, 'process_order')
    group_by_key = fn.getNestedElement(params, 'group_by_key')
    extra_col = copy.deepcopy(process_order[:-1])
    extra_col += extra_columns[group_by_key]
    if 'item_code' not in extra_col:
        extra_col.append('item_code')
    if 'state' in extra_col:
        extra_col.remove('state')
    return extra_col
Esempio n. 17
0
def createSchedules(args={}):  #upid, page_type
    global filter_page_type
    Debug = DebugManager.DebugManager()
    Debug.start()
    dbManager = SharedMemoryManager.getInstance()
    # crawl_duration = fn.getNestedElement(fn.config,'CRAWL_DURATION', 12);
    incomplete_task, incomplete_task_count = checkRemaining()
    new_queue_count = 0
    # Logger.v(incomplete_task_count, incomplete_task, filter_page_type);
    extra_params = {
        'crawl_comment': fn.getNestedElement(args, 'crawl_comment', None)
    }
    extra_params = {k: v for k, v in extra_params.items() if v is not None}

    for platform in filter_page_type:
        if args and not platform in fn.getNestedElement(
                args, 'page_type', platform).split(','):
            Logger.v('Skip Platform:%s' % (platform))
            continue
            # skip when page_type appear and not same
        pages = fn.getNestedElement(args, 'pages.{0}'.format(platform), [])
        Logger.v('platform', platform)
        # Logger.v('page', args['pages']['budget']);
        for page in pages:  #Create queue for each
            # Logger.v('page', page);
            Queue.create(page,
                         extra_params=extra_params,
                         priority=fn.getNestedElement(args, 'priority',
                                                      'daily'),
                         batch=True)
            new_queue_count += 1
            Logger.v('new_queue_count', new_queue_count)
        # Debug.trace();

    Logger.v('Incomplete:%s, New Queue: %s' %
             (incomplete_task_count, new_queue_count))
    if incomplete_task_count > (new_queue_count *
                                int(fn.config['DEBUG_CRAWL_WARNING']) /
                                100) or incomplete_task_count > int(
                                    fn.config['DEBUG_CRAWL_WARNING']):
        # Mail.send('[%s]Incomplete Crawl [%s], Current Schedule: [%s]'%(DateTime.getReadableDate(DateTime.now()),
        # 	incomplete_task_count, new_queue_count),
        # 		 fn.dumps(incomplete_task, encode=False)
        # );
        pass

    result = {
        'pending_count': new_queue_count,
        'incomplete_count': incomplete_task_count
    }
    dbManager.executeBulkOperations(None)
    # Debug.show('Create Schedule');
    return Params.generate(True, result)
Esempio n. 18
0
def checkEmpty(params):
	global global_check_data;
	dbManager = SharedMemoryManager.getInstance();
	db = dbManager.query();
	custom_params = copy.deepcopy(params);
	report_keys = fn.getNestedElement(params, 'keys.report', ['procurement', 'budget']);
	interval = fn.getNestedElement(params, 'interval', 1);
	past_dates = DateTime.getPastDate(count=12, duration=interval); # check previous 12 month data
	year = Crawl.extractYear(data=past_dates[0]);
	first_date = past_dates[0][-1][0];
	last_date = past_dates[0][0][1];
	# Logger.v('first_date', first_date, 'last_date', last_date);
	state_by = 'state_code';
	states = list(db['state'].find({},{'_id': 0, state_by: 1}));
	result = {};
	datetime = DateTime.toString(DateTime.now(tzinfo=msia_tz), date_format='%Y-%m-%d-%H-%M-%S');

	custom_params['first_date'] = first_date;
	custom_params['last_date'] = last_date;
	custom_params['state_by'] = state_by;
	custom_params['states'] = states;
	temp_result = generateTemplate(params=custom_params);

	for rk in report_keys:
		if rk not in global_check_data:
			global_check_data[rk] = [];

		for y in year:
			root_path = '{0}/{1}/year_{2}'.format(crawl_folder, rk, y);
			openDir(root_path, rk);
			for gcd in global_check_data[rk]:
				date = gcd.split('_')[0];
				state = gcd.split('_')[1];
				if DateTime.inrange(date, [first_date, last_date]):
					try:
						temp_result[rk][date][state] += 1;
					except Exception as e:
						# Logger.v('Main.checkEmpty:', e);
						pass;

	for rk in temp_result:
		if rk not in result:
			result[rk] = [];
		for date in temp_result[rk]:
			result[rk].append(temp_result[rk][date]);

		filename = '{0}/{1}_check_moh_empty'.format(test_folder, rk);
		# filename = 'tests/{0}_{1}_check_moh_empty'.format(rk, datetime);
		fn.writeExcelFile(filename=filename, data=result[rk]);
	global_check_data = {};
	return result;
Esempio n. 19
0
def updateLog(params):
    global upload_log_collection_name
    dbManager = SharedMemoryManager.getInstance()
    db = dbManager.query()
    date = fn.getNestedElement(params, 'date')
    path = fn.getNestedElement(params, 'path')
    group = fn.getNestedElement(params, 'group')
    data_part = fn.getNestedElement(params, 'data_part', 'default')
    if type(group) == str:
        group = group.lower()
    if type(data_part) == str:
        data_part = data_part.lower()
    query = {
        'date': date,
        'collection': group,
    }
    stock_upload_log = list(db[upload_log_collection_name].find(
        query, {
            '_id': 0,
            'inserted_at': 0,
            'updated_at': 0
        }))
    # Logger.v('stock_upload_log', stock_upload_log);
    if not stock_upload_log and group == 'stock':
        # Logger.v('backdate collection');
        ModelStock.backdateCollection()

    # Logger.v('update upload_log collection');
    values = {}
    if stock_upload_log:
        if 'part_of_the_day' not in values:
            values['part_of_the_day'] = []

        for part in stock_upload_log[0]['part_of_the_day']:
            # Logger.v('part', part);
            values['part_of_the_day'].append(part)
            values[part] = stock_upload_log[0][part]
        if data_part not in stock_upload_log[0]['part_of_the_day']:
            values['part_of_the_day'].append(data_part)
            values[data_part] = path

    else:
        values['part_of_the_day'] = [data_part]
        values[data_part] = path
    # Logger.v('query', query, values)
    # exit();
    dbManager.addBulkUpdate(upload_log_collection_name,
                            query,
                            values,
                            upsert=True,
                            batch=False)
Esempio n. 20
0
def generateUniqueKeys(params, data):
    report_name = Report.getReportName(params)
    structure_keymap = fn.getNestedElement(params, 'structure_keymap')
    unique_keys = []

    for idx in range(0, len(data)):
        row = data[idx]
        key = {}
        key['facility_type'] = fn.getNestedElement(row, 'facility_type')
        key['facility_code'] = fn.getNestedElement(row, 'facility_code')
        key['budget_type_code'] = fn.getNestedElement(row, 'budget_type_code')
        key['object_code'] = fn.getNestedElement(row, 'object_code')
        key['drug_code'] = fn.getNestedElement(row, 'drug_code')
        key['item_group_code'] = fn.getNestedElement(row, 'item_group_code')
        key['state_code'] = fn.getNestedElement(row, 'state_code')

        push_data = []
        for sm in global_group_order[report_name]:
            mapped_key = global_group_order_kepmap[sm]
            push_data.append(key[mapped_key])
            unique_keys.append('_'.join(push_data))

    # Logger.v('unique_keys', len(unique_keys));
    unique_keys = sorted(list(set(unique_keys)))
    # Logger.v('unique_keys', len(unique_keys));
    return unique_keys
Esempio n. 21
0
def addData(params, data):
    key = fn.getNestedElement(params, 'key')
    process_order = fn.getNestedElement(params, 'process_order')
    main = fn.getNestedElement(data, 'main')
    sub = fn.getNestedElement(data, 'sub')
    misc = fn.getNestedElement(data, 'misc')
    if sub:
        portion_key = renameDictKey(key=key, process_order=process_order)
        main.append({
            'id': misc['id'],
            'name': misc['name'],
            'code': misc['code'],
            portion_key: sub,
        })
Esempio n. 22
0
def preprocessData(params, data):
    report_name = Report.getReportName(params)
    key_to_join = fn.getNestedElement(global_key_to_join, report_name)
    df = pd.DataFrame(data, dtype=str)
    # Logger.v('df', df);
    joined_key = []
    joined_ = []
    joined_columns_list = [key_to_join[0]]
    df['first_allocation'] = pd.to_numeric(df['first_allocation'])
    df['additional_allocation'] = pd.to_numeric(df['additional_allocation'])
    df['pending_amount'] = pd.to_numeric(df['pending_amount'])
    df['utilized_amount'] = pd.to_numeric(df['utilized_amount'])
    df['liablity_amount'] = pd.to_numeric(df['liablity_amount'])
    df['trans_in_amount'] = pd.to_numeric(df['trans_in_amount'])
    df['trans_out_amount'] = pd.to_numeric(df['trans_out_amount'])
    df['deduction_amount'] = pd.to_numeric(df['deduction_amount'])
    df['current_actual_amount'] = pd.to_numeric(df['current_actual_amount'])
    df['total_allocation'] = pd.to_numeric(df['total_allocation'])
    df['balance_amount'] = pd.to_numeric(df['balance_amount'])
    # df.info();
    for idx in range(0, len(key_to_join)):
        ktj = key_to_join[idx]
        joined_key.append(ktj)

        if idx > 0:
            joined_.append(['_'.join(joined_key[:-1]), ktj])
            columns = joined_[idx - 1]
            joined_columns = '_'.join(columns)
            joined_columns_list.append(joined_columns)
            df[joined_columns] = df[columns[0]].str.cat(df[columns[1]],
                                                        sep="|")
    return df
Esempio n. 23
0
def check(params):
    global msia_tz, date_retrieve_limit, date_count, collection_name
    dbManager = SharedMemoryManager.getInstance()
    db = dbManager.query()
    today = DateTime.now(tzinfo=msia_tz)
    start_date = DateTime.getDaysAgo(date_retrieve_limit, datefrom=today)
    durations = DateTime.getBetween([start_date, today],
                                    element='date',
                                    offset=24)['order']
    # offset 24 to include today
    Logger.v('durations', durations)
    data = db[collection_name].aggregate([{
        '$match': {
            'state_updated_at': {
                '$in': durations
            },
            'facility_updated_at': {
                '$in': durations
            }
        }
    }, {
        '$project': {
            '_id': 0,
            'inserted_at': 0,
            'updated_at': 0
        }
    }])
    data = list(data)
    Logger.v('Total stock issue integrity in', date_retrieve_limit, 'days:',
             len(data))
    state_data = {}
    facility_data_by_state = {}

    for idx in range(0, len(data)):
        row = data[idx]
        state_code = fn.getNestedElement(row, 'state_code')
        if state_code not in facility_data_by_state:
            facility_data_by_state[state_code] = {}

        state_data = addIntegrityData(data={
            'row': row,
            'to_update': state_data
        },
                                      category='state')
        facility_data_by_state[state_code] = addIntegrityData(
            data={
                'row': row,
                'to_update': facility_data_by_state[state_code]
            },
            category='facility')

        if date_count > date_retrieve_limit:  # limit loop data/ show data in N days
            break
        date_count = 0
        # reset to 0th day
    return {
        'state': state_data,
        'state_facility': facility_data_by_state,
    }
Esempio n. 24
0
def generateQuery(params):
    report_name = Report.getReportName(params)
    filter_params = {}
    query = {}
    filter_keymap = {
        'procurement': {
            'year': 'txn_year',
            'months': 'txn_month',
            'facility_type': 'facility_type',
            'facility': 'facility_code',
            'ptj': 'ptj_code',
            'state': 'state_code',
            'procurement_type': 'procurement_type',
        },
        'budget': {
            'year': 'financial_year',
            'facility_type': 'facility_type',
            'facility': 'facility_code',
            'ptj': 'ptj_code',
            'state': 'state_code',
            'budget_type': 'budget_type_code',
        },
    }
    filters = fn.getNestedElement(params, 'filter', {})
    for f in list(filters.keys()):
        key = fn.getNestedElement(filter_keymap,
                                  '{0}.{1}'.format(report_name, f), None)
        if key:
            if key not in filter_params:
                filter_params[key] = []
            values = fn.getNestedElement(filters, f, '').split(',')
            for val in values:
                if val:
                    if key == 'txn_month':
                        filter_params[key].append(val.zfill(2))

                    filter_params[key].append(val)

    for fp in filter_params:
        val = filter_params[fp]
        if val:
            query[fp] = {
                '$in': val
            }

    return query
Esempio n. 25
0
def check(params):
    Debug = DebugManager.DebugManager()
    Debug.start()
    Debug.trace('start')
    global process_order
    global key_to_join
    global group_by
    group_by_list = fn.getNestedElement(params, 'group_by', [])
    filter_quantity = fn.getNestedElement(params, 'filter.quantity', [])
    export = fn.getNestedElement(params, 'export', None)
    custom_params = copy.deepcopy(params)
    result = {}

    # filter and read mongo db
    data = ModelStockIssue.get(params)
    Debug.trace('read mongo')

    # filtering by group
    if group_by_list:
        group_by = group_by_list[-1]['id']
    else:
        group_by = 'state'

    custom_params['group_by_key'] = group_by
    custom_params['process_order'] = process_order[group_by]
    custom_params['key_to_join'] = key_to_join[group_by]
    custom_params['item_key_to_show'] = item_key_to_show[group_by]

    # processing data
    if data:
        temp_result = ModelStockIssue.calculateData(params=custom_params,
                                                    data=data)
        Debug.trace('calculate data')
        result = toOutputStructure(params=custom_params, data=temp_result)
        Debug.trace('structure data')

    if result == {}:
        result = []
    Debug.end()
    Debug.show('StockIssue.run')
    if export:
        export_result = generateExcelStructure(params=custom_params,
                                               data=result)
        return Params.generate(True, export_result)
    else:
        return Params.generate(True, result)
Esempio n. 26
0
def groupDates(params, data):
	states = fn.getNestedElement(params, 'states');
	state_by = fn.getNestedElement(params, 'state_by');
	missing_list = [];	
	crawled_list = [];
	for state in states:
		filtered_data = list(filter(lambda d: d[state[state_by]] == 0, data));
		if filtered_data:
			missing_list += filtered_data;

		c_filtered_data = list(filter(lambda d: not d[state[state_by]] == 0, data));
		if c_filtered_data:
			crawled_list += c_filtered_data;

	return {
		'crawled': sorted(list(set([date['date'] for date in crawled_list])), reverse=True),
		'missing': sorted(list(set([date['date'] for date in missing_list])), reverse=True),
	};
Esempio n. 27
0
def organiseStructure(data, key):
	limit = 10;
	result = [];
	if data:
		if key in ['state', 'ptj', 'facility', 'facility_type']:
			# generate id for 'all' option #XXX
			# all_value = [];
			# for idx in range(0, len(data)):
			# 	row = data[idx];
			# 	code = fn.getNestedElement(structure_map_name, '{0}.code'.format(key), '');
			# 	val = fn.getNestedElement(row, code);
			# 	# Logger.v('code', code, 'val', val, 'key', key)
			# 	if val not in all_value:
			# 		all_value.append(val);

			# obj_ = {
			# 	'id': ','.join(all_value),	
			# 	'name': 'all',	
			# }
			# if obj_ not in result:
			# 	result.append(obj_);

			# generate id for each option
			for idx in range(0, len(data)):
				row = data[idx];
				seq_no = fn.getNestedElement(structure_map_name, '{0}.seq_no'.format(key), '');
				name = fn.getNestedElement(structure_map_name, '{0}.name'.format(key), '');
				code = fn.getNestedElement(structure_map_name, '{0}.code'.format(key), '');
				obj_ = {
					# use code as 'id':
					'id': fn.getNestedElement(row, code),	
					'name': fn.getNestedElement(row, name),	

					# use seq_no as 'id'
					# 'id': fn.getNestedElement(row, seq_no),	
					# 'name': fn.getNestedElement(row, name),	
					# 'code': fn.getNestedElement(row, code),	
				}
				if obj_ not in result:
					result.append(obj_);

	return result[:limit];
Esempio n. 28
0
def getQuery(params):
    duration = fn.getNestedElement(params, 'duration',
                                   ['2020-03-30', '2020-03-30'])
    item_codes = fn.getNestedElement(params, 'item_codes', [])
    item_desc = fn.getNestedElement(params, 'item_desc')
    group_by_list = fn.getNestedElement(params, 'group_by', [])
    facility_group = fn.getNestedElement(params, 'facility_group', [])

    dates = DateTime.getBetween(duration, element='date')['order']
    query = {}
    if item_desc:  # TEST wildcard search
        query.update({'item_desc': {
            '$regex': item_desc.lower()
        }})
    if item_codes:
        query.update({
            'item_code': {
                '$in': [c.lower() for c in item_codes]
            },
        })
    if facility_group:
        facility_code_list = ModelFacility.getFacilityCodeList(
            facility_group=facility_group)
        query.update({
            'facility_code': {
                '$in': facility_code_list
            },
        })

    for gbl in group_by_list:
        gbl_id = gbl['id']
        gbl_value = gbl['value']
        val = gbl_value
        if type(val) == str:
            val = val.lower()
            if gbl_id == 'state':
                val = val.replace('_', ' ')
        query.update({
            query_key[gbl_id]: val,
        })
    Logger.v('query', query)
    return query
Esempio n. 29
0
def generateExportMeta(params, data):
    metadata = {}
    group_by_key = fn.getNestedElement(params, 'group_by_key')
    state_name = fn.getNestedElement(data, 'state_name')
    row = fn.getNestedElement(data, 'row')
    if group_by_key == 'all':
        metadata['group_by'] = 'combine'
    elif group_by_key == 'state':
        metadata['group_by'] = group_by_key
        metadata['state_name'] = state_name
    elif group_by_key == 'facility':
        metadata['group_by'] = group_by_key
        metadata['state_name'] = state_name
        metadata['facility_name'] = row['facility_name']
    elif group_by_key == 'requester':
        metadata['group_by'] = group_by_key
        metadata['state_name'] = state_name
        metadata['facility_name'] = row['facility_name']
        metadata['requester_unit_desc'] = row['requester_unit_desc']
    return metadata
Esempio n. 30
0
def retrieveIssueOption(data):
	global stock_issue_options;
	if 'state' not in stock_issue_options:
		stock_issue_options['state'] = [];
	if 'requester_group' not in stock_issue_options:
		stock_issue_options['requester_group'] = [];
	if 'issue_type' not in stock_issue_options:
		stock_issue_options['issue_type'] = [];

	state = fn.getNestedElement(data, 'state');
	if state:
		stock_issue_options['state'].append(state);

	requester_group = fn.getNestedElement(data, 'requester_group_name');
	if requester_group:
		stock_issue_options['requester_group'].append(requester_group);

	issue_type = fn.getNestedElement(data, 'issue_type');
	if issue_type:
		stock_issue_options['issue_type'].append(issue_type);

	return stock_issue_options;