def run(): """ Change the password for a user and set up a cron job to change it again based on the frequency """ # Since this is mostly a fire and forget script it needs # broad exception handling so whatever traceback gets generated # is sent out in the email msg = 'General Failure' success = 'Failure' db_info = get_cfg()['config'] reciever, sender = get_addresses(db_info) try: username, freq = arg_parser() old_pass = current_pass(db_info) new_pass = change_pass(old_pass) update_db(new_pass, db_info) update_cron(username, freq) msg = 'User: {0} password has been updated'.format(username) success = 'Successful' except Exception: msg = str(traceback.format_exc()) finally: send_email(sender, reciever, EMAIL_SUBJECT.format(success), msg)
def run(): opts = arg_parser() cfg = get_cfg()['config'] if opts.cron: setup_cron() if opts.prev: proc_prevmonth(cfg)
def main(): logging.basicConfig( level=__LOGLEVEL__, format="%(asctime)s : " + "%(module)s (%(lineno)s) - %(levelname)s - %(message)s") cfg_file = sys.argv[1] if len(sys.argv) > 1 else None cfg = get_cfg(cfg_file) qa = QuestionAnswerer(cfg) qa.run()
def test(): import sys from utils import get_cfg cfg = get_cfg(sys.argv[1]) m = Magyarlanc(cfg) test_sens = ["valamely asztalon vagy padon az ablakra illesztett keret"] # test_sens = ["Egy", "Hat", "Nyolc"] for sen in test_sens: for line in m.tag(sen): print line
def main(): logging.basicConfig( level=__LOGLEVEL__, format="%(asctime)s : " + "%(module)s (%(lineno)s) - %(levelname)s - %(message)s") cfg_file = sys.argv[1] if len(sys.argv) > 1 else None cfg = get_cfg(cfg_file) text_to_4lang = TextTo4lang(cfg) text_to_4lang.process()
def initialize(cfg=None): cfg = get_cfg(parse_args()) if cfg is None else cfg # launch multi-process for DDP # - processes will be branched off at this point # - subprocess ignores launching process and returns None if cfg.num_machines * cfg.num_gpus > 1: log.info(C.green(f"[!] Lauching Multiprocessing..")) cfg.spawn_ctx = launch(main_func=initialize, num_gpus_per_machine=cfg.num_gpus, num_machines=cfg.num_machines, machine_rank=cfg.machine_rank, dist_url=cfg.dist_url, args=(cfg, )) else: cfg.spawn_ctx = None # scatter save_dir to all of non-main ranks cfg.save_dir = comm.scatter(cfg.save_dir) # finalize config C.set_enabled(not cfg.no_color) # for sub-processes cfg.device = comm.get_local_rank() cfg.freeze() # file logging on the local ranks set_stream_handler('comm', cfg.log_level) # for sub-processes log_rank_file = f"log_rank_{comm.get_rank()}.txt" set_file_handler('main', cfg.log_level, cfg.save_dir, log_rank_file) set_stream_handler('error', cfg.log_level) set_file_handler('error', cfg.log_level, cfg.save_dir, "log_error.txt") if comm.is_main_process(): set_file_handler('result', cfg.log_level, "./", "log_result.txt") # log distriubted learning if comm.get_world_size() > 1: log.info(f"[DDP] dist_url: {cfg.dist_url}") log.info(f"[DDP] global_world_size = {comm.get_world_size()}") log.info(f"[DDP] num_gpus_per_machine = {torch.cuda.device_count()}") log.info(f"[DDP] machine_rank {cfg.machine_rank} / " f"num_machines = {cfg.num_machines}") comm.synchronize() log_comm.info(f"[DDP] rank (local: {comm.get_local_rank()}, " f"global: {comm.get_rank()}) has been spawned.") comm.synchronize() log.info(f"[DDP] Synchronized across all the ranks.") if not cfg.spawn_ctx: # This structure (including customized launch.py) is for compatibility # with our internal API. There is no functional difference from the # typical usage of distributed package. Please don't mind this # pecularity and focus on the main algorithm. for _ in train(cfg): pass return cfg
def get_crops(self): """Handles the request of the API and input of the image """ cfg = get_cfg() valid_augs_list = [ load_obj(i['class_name'])(**i['params']) for i in cfg['augmentation']['valid']['augs'] ] valid_bbox_params = OmegaConf.to_container( (cfg['augmentation']['valid']['bbox_params'])) valid_augs = A.Compose(valid_augs_list, bbox_params=valid_bbox_params) test_dataset = ImgDataset(None, 'test', self.imageDir, cfg, valid_augs) test_loader = DataLoader(test_dataset, batch_size=cfg.data.batch_size, num_workers=cfg.data.num_workers, shuffle=False, collate_fn=collate_fn) device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') model = torch.load( os.path.dirname(os.path.abspath(__file__)) + f"/{str(self.imageDir).lower().split('/')[-1]}/model.pth", map_location=device) detection_threshold = 0.5 results = [] model.eval() hparams = flatten_omegaconf(cfg) lit_model = LitImg(hparams=hparams, cfg=cfg, model=model) self.results = eval_model(test_loader, results, detection_threshold, device, lit_model) for i in range(len(self.results)): if self.results[i]['image_id'] + '.JPG' == self.imageList[ self.cur - 1].split('/')[-1]: self.mainPanel.create_rectangle( int(int(self.results[i]['x1']) * self.scale), int(int(self.results[i]['y1']) * self.scale), int(int(self.results[i]['x2']) * self.scale), int(int(self.results[i]['y2']) * self.scale), width=2, outline='red') self.text_label.config(text='Crop: \n' + str(self.imageDir)[40:] + '\nTotal: \n' + str(len(self.results))) self.sub_button.config(state='disabled')
def main(): logging.basicConfig( level=__LOGLEVEL__, format="%(asctime)s : " + "%(module)s (%(lineno)s) - %(levelname)s - %(message)s") cfg_file = sys.argv[1] if len(sys.argv) > 1 else None cfg = get_cfg(cfg_file) # test_build_bulk(cfg) # test_build(cfg) test_lookup(cfg)
def main(): logging.basicConfig( level=logging.INFO, format="%(asctime)s : " + "%(module)s (%(lineno)s) - %(levelname)s - %(message)s") cfg_file = sys.argv[1] if len(sys.argv) > 1 else None cfg = get_cfg(cfg_file) lemmatizer = Lemmatizer(cfg) while True: word = raw_input('> ') print lemmatizer.lemmatize(word)
def main(): logging.basicConfig( level=logging.INFO, format="%(asctime)s : " + "%(module)s (%(lineno)s) - %(levelname)s - %(message)s") cfg_file = sys.argv[1] if len(sys.argv) > 1 else None cfg = get_cfg(cfg_file) dep_to_4lang = DepTo4lang(cfg) dep_to_4lang.dep_to_4lang() dep_to_4lang.save_machines() dep_to_4lang.print_graphs()
def run(): opts = arg_parser() cfg = utils.get_cfg() if not opts.environment: raise ValueError('You must set the -e variable') env = opts.environment if opts.cron: setup_cron(env) if opts.prev: proc_prevmonth(cfg['db'], env)
def main(): logging.basicConfig( level=logging.INFO, format="%(asctime)s : " + "%(module)s (%(lineno)s) - %(levelname)s - %(message)s" ) cfg_file = sys.argv[1] if len(sys.argv) > 1 else None cfg = get_cfg(cfg_file) sim_type = cfg.get("sim", "similarity_type") if sim_type == "sentence": main_sen_sim(cfg) elif sim_type == "word": raise Exception("main function for word sim not implemented yet") elif sim_type == "word_test": main_word_test(cfg) else: raise Exception("unknown similarity type: {0}".format(sim_type))
def main(): logging.basicConfig( level=logging.INFO, format="%(asctime)s : " + "%(module)s (%(lineno)s) - %(levelname)s - %(message)s") cfg_file = sys.argv[1] if len(sys.argv) > 1 else None cfg = get_cfg(cfg_file) sim_type = cfg.get('sim', 'similarity_type') if sim_type == 'sentence': main_sen_sim(cfg) elif sim_type == 'word': raise Exception("main function for word sim not implemented yet") elif sim_type == 'word_test': main_word_test(cfg) else: raise Exception('unknown similarity type: {0}'.format(sim_type))
def main(): logging.basicConfig( level=__LOGLEVEL__, format="%(asctime)s : " + "%(module)s (%(lineno)s) - %(levelname)s - %(message)s") cfg_file = sys.argv[1] if len(sys.argv) > 1 else None max_sens = int(sys.argv[2]) if len(sys.argv) > 2 else None cfg = get_cfg(cfg_file) text_to_4lang = TextTo4lang(cfg) input_fn = cfg.get('data', 'input_sens') sens = [line.strip() for line in open(input_fn)] if max_sens is not None: sens = sens[:max_sens] words_to_machines = text_to_4lang.process(sens, print_deps=True) fn = print_text_graph(words_to_machines, cfg.get('machine', 'graph_dir')) logging.info('wrote graph to {0}'.format(fn))
def run(): rng = date_range() defaults = { 'begin': rng[0], 'stop': rng[1], 'conf_file': utils.CONF_FILE, 'dir': os.path.join(os.path.expanduser('~'), 'temp-logs'), 'sensors': 'ALL' } opts = arg_parser(defaults) cfg = utils.get_cfg(opts['conf_file'], section='config') if opts['sensors'] == 'ALL': opts['sensors'] = [k for k in SENSOR_KEYS if k != 'invalid'] elif opts['sensors'] == 'MODIS': opts['sensors'] = [k for k in SENSOR_KEYS if k.startswith('m')] elif opts['sensors'] == 'LANDSAT': opts['sensors'] = [ k for k in SENSOR_KEYS if any([s in k for s in ('tm4', 'tm5', 'etm7', 'oli')]) ] msg = '' receive, sender, debug = get_addresses(cfg) subject = EMAIL_SUBJECT.format(begin=opts['begin'], stop=opts['stop']) try: msg = process_monthly_metrics(cfg, opts['environment'], opts['dir'], opts['begin'], opts['stop'], tuple(opts['sensors'])) except Exception: exc_msg = str(traceback.format_exc()) + '\n\n' + msg if not opts['debug']: utils.send_email(sender, debug, subject, exc_msg) msg = ('There was an error with statistics processing.\n' 'The following have been notified of the error: {0}.'.format( ', '.join(debug))) raise finally: if not opts['debug']: utils.send_email(sender, receive, subject, msg)
def __init__(self, _database): super().__init__() self.database = _database self.iconbitmap(icon) self.title('RSA&AES Encryption') self.geometry('442x252+200+100') self.resizable(0, 0) self.getkeylist() self.setupUI() self.cfg = utils.get_cfg(self.database) self.cfg_url_entry.insert('0', self.cfg[0]) self.dir_save_entry.insert('0', self.cfg[1]) self.dir_out_entry.insert('0', self.cfg[1]) if self.thirdkeylist: self.select_thirdkey(self.thirdkeylist[0]) self.thirdkey_ls.current(0) if self.userkeylist: self.select_userkey( self.cfg[2] if self.cfg[2] else self.userkeylist[0]) self.userkey_ls.current(0) self.wm_attributes('-topmost', 1)
def run(): """ Change the password for a user """ # Since this is mostly a fire and forget script it needs # broad exception handling so whatever traceback gets generated # is sent out in the email msg = 'General Failure' status = 'Failure' username, cfg_path = arg_parser() db_info = get_cfg(cfg_path, section='config') reciever, sender = get_addresses(db_info) try: old_pass = current_pass(db_info) new_pass = change_pass(old_pass) update_db(new_pass, db_info) msg = 'User: {0} password has been updated'.format(username) status = 'Successful' except Exception: msg = str(traceback.format_exc()) finally: send_email(sender, reciever, EMAIL_SUBJECT.format(status), msg)
def setUp(self) -> None: """Set't up the tests variables. """ imageDir = '/home/p1gm1/Documents/DroneSky/test_img/oil_palm' cfg = get_cfg() valid_augs_list = [ load_obj(i['class_name'])(**i['params']) for i in cfg['augmentation']['valid']['augs'] ] valid_bbox_params = OmegaConf.to_container( (cfg['augmentation']['valid']['bbox_params'])) valid_augs = A.Compose(valid_augs_list, bbox_params=valid_bbox_params) self.test_dataset = ImgDataset(None, 'test', imageDir, cfg, valid_augs) self.test_loader = DataLoader(self.test_dataset, batch_size=cfg.data.batch_size, num_workers=cfg.data.num_workers, shuffle=False, collate_fn=collate_fn)
key = word1 + "_" + word2 if key in self.shortest_path_dict.keys(): return self.shortest_path_dict[key] else: return 0 class MachineGraphOptions(): # nodename_option: # 0: all nodes are unique # 1: all nodes are printnames # 2: only: uppercase + 'lack', 'before', 'not', 'have' are unique def __init__(self, fullgraph_options): self.nodename_option = fullgraph_options.nodename_option self.upper_excl = fullgraph_options.upper_excl self.embedding_weighted = fullgraph_options.embedding_weighted if fullgraph_options.embedding_weighted: self.embedding_model = fullgraph_options.embedding_model self.color_based = fullgraph_options.color_based if __name__ == "__main__": logging.basicConfig( level=logging.INFO, format="%(asctime)s : " + "%(module)s (%(lineno)s) - %(levelname)s - %(message)s") cfg_file = sys.argv[1] if len(sys.argv) > 1 else None cfg = get_cfg(cfg_file) lexicon = Lexicon.build_from_4lang(cfg) lexicon.save_to_binary(cfg.get("machine", "definitions_binary"))
def run(): rng = date_range() defaults = {'begin': rng[0], 'stop': rng[1], 'conf_file': utils.CONF_FILE, 'dir': os.path.join(os.path.expanduser('~'), 'temp-logs'), 'sensors': 'ALL', 'plotting': False} opts = arg_parser(defaults) cfg = utils.get_cfg(opts['conf_file'], section='config') if opts['sensors'] == 'ALL': opts['sensors'] = [k for k in SENSOR_KEYS if k != 'invalid'] if opts['sensors'] == ['MODIS']: opts['sensors'] = [k for k in SENSOR_KEYS if k.lower().startswith('m')] if opts['sensors'] == ['VIIRS']: opts['sensors'] = [k for k in SENSOR_KEYS if k.lower().startswith('v')] if opts['sensors'] == ['LANDSAT']: opts['sensors'] = [k for k in SENSOR_KEYS if k != 'invalid' and not k.lower().startswith('m')] msg = '' receive, sender, debug = get_addresses(cfg) subject = EMAIL_SUBJECT.format(begin=opts['begin'], stop=opts['stop']) # FIXME: adding cruft to the codebase... time constraints.... if not opts['plotting']: try: msg = process_monthly_metrics(cfg, opts['environment'], opts['dir'], opts['begin'], opts['stop'], tuple(opts['sensors'])) except Exception: exc_msg = str(traceback.format_exc()) + '\n\n' + msg utils.send_email(sender, debug, subject, exc_msg) msg = ('There was an error with statistics processing.\n' 'The following have been notified of the error: {0}.' .format(', '.join(debug))) raise finally: utils.send_email(sender, receive, subject, msg) else: # msg = '⚠ PLOTTING IS STILL UNDER DEVELOPMENT! ⚠' files = [] try: files.append(graphics.sensor_barchart(cfg, opts['begin'], opts['stop'])) files.append(graphics.pathrow_heatmap(cfg, opts['begin'], opts['stop'], 'ALL')) info = db_top10stats(opts['begin'], opts['stop'], tuple(opts['sensors']), cfg) for i, (email, _) in zip(range(3), info): files.append(graphics.pathrow_heatmap(cfg, opts['begin'], opts['stop'], email)) except Exception: exc_msg = str(traceback.format_exc()) + '\n\n' + msg utils.send_email(sender, debug, subject, exc_msg) msg = ('There was an error with statistics processing.\n' 'The following have been notified of the error: {0}.' .format(', '.join(debug))) raise finally: utils.send_email(sender, receive, subject, msg, files)
__author__ = 'mjrao' __time__ = '2017/4/27' import web import utils import json import jwt import re urls = ( '/phonebooks','Phonebooks', ) dbinfo= utils.get_cfg('db.ini','mysql') jwtinfo = utils.get_cfg('db.ini','jwt') db = web.database(dbn='mysql', user=dbinfo.get('user',''),pw=dbinfo.get('password',''),db=dbinfo.get('database',''),host=dbinfo.get('host',''), port=int(dbinfo.get('port',''))) auth_operates = (1005,1006,10061,10062,1007,1009,1010) class Phonebooks: def POST(self): data = web.data() req_json = json.loads(data) op = req_json.get('op',0) ''' 1001: register 1002: forgot password 1003: login
def __init__(self): cfg = get_cfg(None) self.textto4lang = TextTo4lang(cfg) self.textto4lang.graphs_dir = '/home/adaamko/AACS18/4lang/data' self.textto4lang.abstract = False self.textto4lang.expand = False
content = file_list[j] npos = content.index(',') path = content[:npos] classid = content[npos + 1:len(content) - 1] image = utils.load_image(path, w, h, rgb2gray) image = image / 255.0 images.append(image) i += batchsize images = np.array(images, dtype=np.float32) yield epoch, images if __name__ == '__main__': # Training Data cfg = utils.get_cfg(sys.argv[1]) file_list = utils.load_data(cfg['trainpath']) batch = minibatch(file_list, cfg['batchsize'], cfg['width'], cfg['height'], True) # Training Parameters learning_rate = cfg['learningrate'] num_steps = cfg['maxepoch'] batch_size = cfg['batchsize'] # Network Parameters num_hidden_1 = cfg['hidden1num'] # 1st layer num features num_hidden_2 = cfg['hidden2num'] # 2nd layer num features (the latent dim) num_input = cfg['width'] * cfg['height'] * 1 # data input network = ae.Autoencoder(num_input, num_hidden_1, num_hidden_2)
def _generate_mismatches_pairs(self): """ Generate all mismatches pairs """ for i, name in enumerate(os.listdir(self.data_dir)): if name == ".DS_Store": continue remaining = os.listdir(self.data_dir) remaining = [f_n for f_n in remaining if f_n != ".DS_Store"] # del remaining[i] # deletes the file from the list, so that it is not chosen again other_dir = random.choice(remaining) with open(self.pairs_filepath, "a") as f: for i in range(3): file1 = random.choice(os.listdir(self.data_dir + name)) file2 = random.choice(os.listdir(self.data_dir + other_dir)) f.write(name + "\t" + file1 + '\t' + other_dir + "\t" + file2 + '\n') f.write("\n") if __name__ == '__main__': print('load all config') model_config = Map(yaml.safe_load(open('config/face_base.yaml'))) cfg = _merge_a_into_b(model_config, get_cfg(), get_cfg(), []) data_dir = cfg.DATASETS.FOLDER + 'test/' pairs_filepath = cfg.DATASETS.FOLDER + "pairs.txt" img_ext = ".png" generatePairs = GeneratePairs(data_dir, pairs_filepath, img_ext) generatePairs.generate()
from utils import get_cfg, pi_camera from PIL import Image import cv2 import numpy as np def show(x): cv2.namedWindow('image', cv2.WINDOW_NORMAL) cv2.resizeWindow('image',640,480) cv2.imshow('image',x) cv2.waitKey(0) cv2.destroyAllWindows() if __name__ == "__main__": cfg = get_cfg("/home/pi/Desktop/iris/cfg/cfg.yaml") cam = pi_camera(cfg) cam.capture_images('left')
if key in self.shortest_path_dict.keys(): return self.shortest_path_dict[key] else: return 0 class MachineGraphOptions(): # nodename_option: # 0: all nodes are unique # 1: all nodes are printnames # 2: only: uppercase + 'lack', 'before', 'not', 'have' are unique def __init__(self, fullgraph_options): self.nodename_option = fullgraph_options.nodename_option self.upper_excl = fullgraph_options.upper_excl self.weighted = fullgraph_options.weighted self.embedding_model = fullgraph_options.embedding_model self.color_based = fullgraph_options.color_based if __name__ == "__main__": logging.basicConfig( level=logging.INFO, format="%(asctime)s : " + "%(module)s (%(lineno)s) - %(levelname)s - %(message)s") cfg_file = sys.argv[1] if len(sys.argv) > 1 else None cfg = get_cfg(cfg_file) lexicon = Lexicon.build_from_4lang(cfg) fourlang_fn = cfg.get("machine", "definitions_binary") ensure_dir(os.path.dirname(fourlang_fn)) lexicon.save_to_binary(fourlang_fn)
from pymongo import MongoClient from utils import get_cfg db_name = get_cfg()['db_data']['test_db_name'] client = MongoClient( "mongodb+srv://main_exterminated:[email protected]/partymakers_test?retryWrites=true&w=majority" ) data = client.get_database('partymakers_test').get_collection('parties') def get_last_id(): return get_all_parties()[-1]['id'] def create_party(**party_data): try: insert = data.insert_one({ "id": get_last_id() + 1, "name": party_data['name'], "desc": party_data['desc'], "date": party_data['date'], 'coordinates': [*party_data['coordinates']], 'location': { 'main': party_data['location_main'], 'add': party_data['location_add'], }, 'price': party_data['price'], 'age': party_data['age'] }) if not insert: raise Exception('')