Esempio n. 1
0
def upload(params):
	Debug = DebugManager.DebugManager();
	Debug.start();
	Debug.trace('start');
	dbManager = SharedMemoryManager.getInstance();
	db = dbManager.query();
	date = fn.getNestedElement(params, 'date');
	path = fn.getNestedElement(params, 'path');
	# url = fn.getNestedElement(params, 'callback_url'); # required params to handle callback_url
	paths, should_reset = ModelUpload.getPath(params);
	for idx in range(0, len(paths)):
		p = paths[idx];
		processed_filename = File.converExcelFileToCsv(p, ignore_index=True);
		Logger.v('processed_filename', processed_filename);
		Debug.trace('convert to json : path {0}'.format( processed_filename ) );
		if idx == 0 and should_reset: #reset once at the beginning
			Logger.v('Reset Database.');
			reset(date); #reset stock_issue collection
			ModelSIIntegrity.reset(date); #reset stock_issue_datalog by date given
		File.readCsvFileInChunks(processed_filename, save, params, chunksize=chunksize);
		Debug.trace('uploaded to mongo.');
	generateIndex();
	ModelSIIntegrity.generateIndex();
	Debug.trace('indexing mongo collection.');
	saveIssueOption();
	Debug.trace('save option to json.');
	trigger_params = copy.deepcopy(params);
	trigger_params['result'] = 'data count: {0}'.format(params['data_count'][path]);
	# Logger.v('trigger_params', trigger_params);
	dbManager.executeBulkOperations(None); # Insert all the remaining job at once.
	ReportStock.triggerOnComplete(trigger_params);
	Debug.trace('trigger api on complete.');
	Debug.end();
	Debug.show('Stock.upload');
Esempio n. 2
0
def save(params, chunk, chunks_info):
	global collection_name, column_keymap;
	upload_date = fn.getNestedElement(params, 'date');
	data = File.readChunkData(chunk);
	dbManager = SharedMemoryManager.getInstance();
	db = dbManager.query();
	current_index = fn.getNestedElement(chunks_info, 'current', 0);
	total_index = fn.getNestedElement(chunks_info, 'total', len(data));

	total_length = len(data);
	queue_info = chunks_info['queue']
	# Logger.v('Running Index:', chunks_info['queue']['running']);
	chunks_info['queue']['current']+=1;
	# Logger.v('Saving from... {0}/{1}, current package: {2}'.format(current_index, total_index, total_length) );
	fn.printProgressBar(queue_info['current'], queue_info['total'], 'Processing Chunk Insertion');
	for idx in range(0, total_length):
		row = data[idx];
		# Logger.v('row', row);
		obj_ = transformToLowercase(row);
		date_only = obj_['approved_date'].split(' ')[0];
		# Logger.v('date_only', date_only);
		obj_.update({
			'approved_year_month': DateTime.getDateCategoryName(date=date_only, element='year_month_digit'),
			'upload_date': upload_date,
		});
		dbManager.addBulkInsert(collection_name, obj_, batch=True);
		ModelSIIntegrity.update(data=obj_);
		retrieveIssueOption(obj_);
	#ensure all data is save properly
	dbManager.executeBulkOperations(collection_name);
	return chunks_info;
Esempio n. 3
0
def save(params, chunk, chunks_info):
    global latest_collection_name, history_collection_name

    data = File.readChunkData(chunk)
    dbManager = SharedMemoryManager.getInstance()
    db = dbManager.query()
    current_index = fn.getNestedElement(chunks_info, 'current', 0)
    total_index = fn.getNestedElement(chunks_info, 'total', len(data))

    date = fn.getNestedElement(params, 'date')
    datetime = DateTime.convertDateTimeFromString(date)
    total_length = len(data)
    queue_info = chunks_info['queue']
    # Logger.v('Running Index:', chunks_info['queue']['running']);
    chunks_info['queue']['current'] += 1
    # Logger.v('Saving from... {0}/{1}, current package: {2}'.format(current_index, total_index, total_length) );
    fn.printProgressBar(queue_info['current'], queue_info['total'],
                        'Processing Chunk Insertion')
    for idx in range(0, total_length):
        # insert stock_latest
        row = data[idx]
        obj_ = transformToLowercase(data=row, datetime=datetime)
        ModelStockIntegrity.update(data=obj_)
        dbManager.addBulkInsert(latest_collection_name, obj_, batch=True)
        # dbManager.addBulkInsert(history_collection_name, obj_, batch=True); # temporary off (need 7 day data only)

        # insert items
        # d = data[idx];
        ModelItem.saveItem(row)
        # fn.printProgressBar(current_index+idx, total_index, 'Processing Item Insertion');

    #ensure all data is save properly
    # dbManager.executeBulkOperations(history_collection_name); # temporary off (need 7 day data only)
    dbManager.executeBulkOperations(latest_collection_name)
    return chunks_info
def server_run_segment(config: dict):
    input_dir = config["input_dir"]
    output_dir = config["output_dir"]
    file_pattern = config["files"]
    threshold_method = config["threshold_method"]
    threshold_params = config["threshold_params"]
    utils.check_dir_exists(input_dir)
    utils.check_dir_exists(output_dir)
    _save_config(config)

    files_to_segment = glob.glob(f"{input_dir}/{file_pattern}")
    logger.info(f"{files_to_segment}")
    for i, file_name in enumerate(files_to_segment):
        logger.info(f"{i+1}/{len(files_to_segment)}")
        channel_file = File.from_tiff(file_name)
        segmented_file = segmentation.segment_stack(channel_file,
                                                    threshold_method, 3, 90000,
                                                    **threshold_params)
        segmented_file.save_to_tiff(output_dir)
Esempio n. 5
0
import json
import sys

from lib import File

File = File()

secret_filename = raw_input('''
Enter the path of your secret json key file.
Example : /Users/Desktop/credentials.json

$: ''')

json_file = File.read(secret_filename)
data = json.loads(json_file)["web"]

client_id = data["client_id"]
project_id = data["project_id"]
auth_uri = data["auth_uri"]
token_uri = data["token_uri"]
auth_provider_x509_cert_url = data["auth_provider_x509_cert_url"]
client_secret = data["client_secret"]
Esempio n. 6
0
from facebook_business.api import FacebookAdsApi

from facebook_business.adobjects.adaccount import AdAccount
from facebook_business.adobjects.ad import Ad

import json
import sys

from lib import File

File = File()


def arrayToTSVLine(array):
    line = ""
    for elem in array:
        line += "\"" + elem + "\"" + "\t"
    return line[:-1]


FORMAT_TEXT = '''

The format must be the following:
{
    "app_id" : "<YOUR APP_ID HERE>",
    "app_secret" : "<YOUR APP_SECRET HERE>",
    "access_token" : "<YOUR ACCESS_TOKEN HERE>",
    "ad_account" : "<YOUR_AD_ACCOUNT_HERE>"
}
'''
        with open(stats_file) as f:
            for line in f:
                print(line, end='')
    else:
        print(stats)
    print(useless_bar + "\n")


#################################
#      MAIN                   ##
################################
if __name__ == '__main__':
    print("\n")
    define_arguments()

    contentsFile = File.File(cmdargs)
    print(" Architecture was set to " + contentsFile.get_arch())
    print(" Repository URL is: " + contentsFile.get_url())

    # clear all artifacts
    if cmdargs.clear:
        print(" Cleaning old files..")
        plot_list = Path(CONFIG["statistics"]["plot_folder"]).glob("*.png")
        csv_list  = Path(CONFIG["statistics"]["plot_folder"]).glob("*.csv")
        stat_list = Path(CONFIG["statistics"]["folder"]).glob("statistics_*")
        down_list = Path(CONFIG["downloadFolder"]).glob("*")
        for file in itertools.chain(plot_list, csv_list, stat_list, down_list):
            file.unlink()

    # check repo metadata to avoid downloading the same file twice
    stats_file = file_utils.check_remote_file(contentsFile)
Esempio n. 8
0
def getOption(params):
    data = []
    filename = '{0}/stock_issue_options.json'.format(reference_folder)
    data = File.readJSONFile(filename)
    return data
Esempio n. 9
0
from lib import File, segmentation, image_adjustments


def reverse_palette(palette):
    list_palette = [x for x in palette]
    list_palette.reverse()
    return list_palette


file_name = sys.argv[1]

RGreens7 = reverse_palette(Greens7)
ROrRd3 = reverse_palette(OrRd3)

channel_file = File.from_tiff(file_name)
segmented_image_stack = segmentation.segment_stack(
    channel_file, "fixed", threshold=channel_file.image.mean())
segmented_image_stack = segmented_image_stack.labelled_image > 1

nominal_image_stack = image_adjustments.rescale_intensity(channel_file)

main_fig = figure(plot_height=500,
                  plot_width=500,
                  title="Segmentation",
                  sizing_mode="scale_both")
print((nominal_image_stack.max()))
layer = Slider(start=0,
               end=len(nominal_image_stack),
               value=0,
               step=1,