def main(): time_all = time.time() """Main olm function""" logging.basicConfig(stream=sys.stdout, level=logging.INFO) logging.info("Beginning static site generation") settings_file_path = os.path.abspath( os.path.join(sys.argv[1], 'settings.py')) global CONTEXT if os.path.isfile(settings_file_path): CONTEXT = load_settings(sys.argv[1], settings_file_path) else: CONTEXT = load_settings(sys.argv[1]) load_plugins(CONTEXT) signal_sender = signal(Signals.INITIALISED) signal_sender.send((CONTEXT)) subsites = generateSite() base_folder = CONTEXT.BASE_FOLDER source_folder = CONTEXT.SOURCE_FOLDER for subsite in subsites: logging.info("Found subsite '%s'", subsite[1:]) CONTEXT.OUTPUT_FOLDER = os.path.abspath( os.path.join(base_folder, 'dist', subsite[1:])) CONTEXT.BASE_FOLDER = os.path.join(source_folder, subsite) CONTEXT.SOURCE_FOLDER = os.path.join(source_folder, subsite) generateSite() logging.info("Completed everything in %f seconds", (time.time() - time_all))
def main(): GPIO.cleanup() settings.load_settings() threads = [Logic.Logic(), Display.DisplayThread(), threading.Thread(target=RestThread.run_server)] # threads.append(WatchdogThread.WatchdogThread()) # threads.append(ReminderThread.ReminderThread()) for th in threads: th.start() time.sleep(0.5) while len(threads) > 0: try: temp_threads = [] for t in threads: if t is not None and t.isAlive(): t.join(0.2) temp_threads.append(t) threads = temp_threads time.sleep(0.1) except (KeyboardInterrupt, SystemExit): Register.EXIT_FLAG = True Helpers.log("ZAPISYWANIE KONFIGURACJI") settings.save_settings() Helpers.log("ZAMYKANIE APLIKACJI")
def main(): GPIO.cleanup() settings.load_settings() threads = [ Logic.Logic(), Display.DisplayThread(), threading.Thread(target=RestThread.run_server) ] # threads.append(WatchdogThread.WatchdogThread()) # threads.append(ReminderThread.ReminderThread()) for th in threads: th.start() time.sleep(0.5) while len(threads) > 0: try: temp_threads = [] for t in threads: if t is not None and t.isAlive(): t.join(0.2) temp_threads.append(t) threads = temp_threads time.sleep(0.1) except (KeyboardInterrupt, SystemExit): Register.EXIT_FLAG = True Helpers.log("ZAPISYWANIE KONFIGURACJI") settings.save_settings() Helpers.log("ZAMYKANIE APLIKACJI")
def at_start_open_file_from_settings_if_option_is_selected(): check_option = st.load_settings(1, st.set_file_name)[3] # If Ture open newest file from selected in settings default server row 2 if check_option: # selected_server_id load from settings.csv row = 2 int_value witch represent id server # selected_server_id return 2 values selected_server_id = st.load_settings(1, st.set_file_name)[2] natural_number_server_id = int(selected_server_id) file_tuple = sfd.newest_file( sfd.get_files_by_id(natural_number_server_id)) newest_file = file_tuple[1] join_path = os.path.join(sfd.make_path(natural_number_server_id), newest_file[3]) print(join_path) root.filename = (join_path) my_json = root.filename print('Load selected server from settings ' + newest_file[3]) root.opened_json_object = raf.AhFile(my_json) root.opened_json_object.create_dependency()
def make_bash_script(filename, cmd, crate_dir=None, settings=None): """ Make an executable bash script out of the given command. """ # os.system('ls %s' %(filename)) if crate_dir == None: crate_dir = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) if settings != None: load_settings(settings) cmd_name = Settings.get_cluster_command() #Create and exception for SLURM queuing system if cmd_name == "sbatch": f = open(filename, 'w') f.write("#!/bin/bash\n") f.write("export PATH=$PATH:%s\n" %(crate_dir)) f.write("source ~/.bash_profile\n") f.write("cd %s\n" %(crate_dir)) #write_cluster_preface(f) f.write("srun " + cmd + "\n") f.close() os.system('chmod +x \"%s\"' %(filename)) else: f = open(filename, 'w') f.write("#!/bin/bash\n") f.write("export PATH=$PATH:%s\n" %(crate_dir)) f.write("source ~/.bash_profile\n") f.write("cd %s\n" %(crate_dir)) #write_cluster_preface(f) f.write(cmd + "\n") f.close() os.system('chmod +x \"%s\"' %(filename))
def main(): load_settings() token = getenv("BOT_TOKEN") client = discord.Client() @client.event async def on_message(message): # we do not want the bot to reply to itself if message.author == client.user: return if message.content.startswith('!hello'): msg = 'Hello {0.author.mention}'.format(message) await client.send_message(message.channel, msg) @client.event async def on_ready(): print('Logged in as') print(client.user.name) print(client.user.id) print('------') client.run(token)
def run(): # 对运行环境进行初始化,设置flask参数 load_settings() # 要求系统执行指令 from web_site.documents import create_app app = create_app() app.run()
def event(self): if not Register.LIRC_EVENTS: return lirc_event = LircEvents.get_event() if lirc_event == LircEvents.KEY_0: settings.load_settings() if lirc_event == LircEvents.KEY_LEFT: self.i = self.i - 1 % 18
def run_on_cluster(cmd, job_name, cluster_output_dir, cluster_scripts_dir=None, queue_type=None, cmd_name="qsub", settings_fname=None): print "Submitting job: %s" % (job_name) queue_name = None # Load command name from settings file if settings_fname != None: load_settings(settings_fname) cmd_name = Settings.get_cluster_command() if queue_type == "long": queue_name = Settings.get_long_queue_name() elif queue_type == "short": queue_name = Settings.get_short_queue_name() else: print "Warning: Unknown queue type: %s" % (queue_type) queue_name = queue_type if queue_type is None: print " - queue type: unspecified" else: print " - queue type: %s" % (queue_type) if queue_name is None: print " - queue name unspecified" else: print " - queue name: %s" % (queue_name) misc_utils.make_dir(cluster_output_dir) if cluster_scripts_dir == None: cluster_scripts_dir = os.path.join(cluster_output_dir, 'cluster_scripts') misc_utils.make_dir(cluster_scripts_dir) scripts_output_dir = os.path.join(cluster_output_dir, 'scripts_output') misc_utils.make_dir(scripts_output_dir) scripts_output_dir = os.path.abspath(scripts_output_dir) cluster_call = '%s -o \"%s\" -e \"%s\"' % (cmd_name, scripts_output_dir, scripts_output_dir) # Add queue type if given one if queue_name != None: cluster_call += ' -q \"%s\"' % (queue_name) script_name = \ valid_cluster_name(os.path.join(cluster_scripts_dir, '%s_time_%s.sh' \ %(job_name, time.strftime("%m-%d-%y_%H:%M:%S")))) make_bash_script(script_name, cmd) cluster_cmd = cluster_call + ' \"%s\"' % (script_name) job_id = launch_job(cluster_cmd, cmd_name) return job_id
def init_run(m_app): settings.load_settings() settings.update_setting('port',8080) m_app.add_url_rule('/','index',requests.index) m_app.add_url_rule('/t','test',test) m_app.add_url_rule('/get_picture','get_picture',requests.get_picture) m_app.add_url_rule('/get_preview','get_preview',requests.get_preview) m_app.add_url_rule('/take_picture','take_picture',requests.take_picture) print_info(m_app) m_app.run(host = "0.0.0.0",port = settings.g_port)
def run_on_cluster(cmd, job_name, cluster_output_dir, cluster_scripts_dir=None, queue_type=None, cmd_name="qsub", settings=None): print "Submitting job: %s" %(job_name) queue_name = None # Load command name from settings file if settings != None: load_settings(settings) cmd_name = Settings.get_cluster_command() if queue_type == "long": queue_name = Settings.get_long_queue_name() elif queue_type == "short": queue_name = Settings.get_short_queue_name() else: print "Warning: Unknown queue type: %s" %(queue_type) queue_name = queue_type if queue_type == None: print " - queue: unspecified" else: print " - queue: %s, using queue name %s" %(queue_type, queue_name) #print " - cmd: %s" %(cmd) if cluster_scripts_dir == None: cluster_scripts_dir = os.path.join(cluster_output_dir, 'cluster_scripts') if not os.path.isdir(cluster_scripts_dir): os.mkdir(cluster_scripts_dir) scripts_output_dir = os.path.join(cluster_output_dir, 'scripts_output') if not os.path.isdir(scripts_output_dir): os.mkdir(scripts_output_dir) scripts_output_dir = os.path.abspath(scripts_output_dir) #qsub_call = 'qsub -V -q \"%s\" -o \"%s\" -e \"%s\"' %(queue_type, scripts_output_dir, scripts_output_dir) qsub_call = '%s -o \"%s\" -e \"%s\"' %(cmd_name, scripts_output_dir, scripts_output_dir) # Add queue type if given one if queue_name != None: qsub_call += ' -q \"%s\"' %(queue_name) script_name = valid_qsub_name(os.path.join(cluster_scripts_dir, '%s_time_%s.sh' %(job_name, time.strftime("%m-%d-%y_%H:%M:%S")))) make_bash_script(script_name, cmd) qsub_cmd = qsub_call + ' \"%s\"' %(script_name) os.system(qsub_cmd)
async def schedule_submenu(message, attachments, env): user_setting = settings.load_settings(message.from_id) if user_setting: await env.reply('Расписание (меню)', keyboard=json.dumps(keyboards.schedule_menu, ensure_ascii=False)) else: text = 'Для начала установите группу через пункт меню настройки.' await env.reply(text, keyboard=json.dumps(keyboards.keyboard_main, ensure_ascii=False))
def main(args=None): parser = argparse.ArgumentParser('Download backup') parser.add_argument('--meta', help='The meta file describing what to download') args = parser.parse_args(args=args) cfg = settings.load_settings() s3 = aws_utils.connect_to_s3(cfg) if args.meta is None: print('Finding the most recent backup...') key = get_most_recent(s3, cfg) else: print('Loading the meta file...') with open(args.meta, 'r') as infile: meta = json.load(infile) key = meta['key'] local_name = key.split('/')[-1] print(f'Downloading {key} to {local_name}') s3.download_file(cfg['AWS_S3_BUCKET'], key, local_name) print('Done!') if os.path.exists('downloaded.dump'): print('Deleting downloaded.dump') os.remove('downloaded.dump') if os.path.exists('downloaded.json'): print('Deleting downloaded.json') os.remove('downloaded.json') print(f'Moving {local_name} to downloaded.dump') os.rename(local_name, 'downloaded.dump') print('Storing meta info in downloaded.json') with open('downloaded.json', 'w') as outfile: json.dump({'key': key}, outfile)
def main(args): global settings # seq = [keyboard.KEYS.VK_SHIFT|keyboard.MODEFIERS.HOLD] + ([keyboard.KEYS.VK_RIGHT, 0, 0]*30) + [keyboard.KEYS.VK_SHIFT|keyboard.MODEFIERS.RELEASE] # time.sleep(3) # seq = [keyboard.KEYS.VK_SHIFT|keyboard.MODEFIERS.HOLD] + [keyboard.KEYS.VK_LBUTTON] + [keyboard.KEYS.VK_SHIFT|keyboard.MODEFIERS.RELEASE] # keyboard.execute_sequence(seq) # exit(0) # Load Settings settings = SETTINGS.load_settings() # Start threads bl_args = ( settings[SETTINGS.BROADCAST_PORT], settings[SETTINGS.TCP_PORT], ) broadcast_listener_thread = threading.Thread(target=broadcast_listener, args=bl_args) broadcast_listener_thread.start() window_monitor_thread = threading.Thread(target=window_monitor) window_monitor_thread.start() client_server_thread = threading.Thread(target=client_server) client_server_thread.start() # Set Tray Icon tray_thread = threading.Thread(target=tray_handler) tray_thread.start() tray_thread.join() # time.sleep(60) event_shutdown.set() client_server_thread.join() broadcast_listener_thread.join() window_monitor_thread.join()
def _show_time(self): commit_label = self._builder.get_object('CommitLabel') sha_entry = self._builder.get_object('ShaEntry') data = settings.load_settings() git_dir = data['directory'] commit_text = git.show(git_dir, sha_entry.get_text()) commit_label.set_text(commit_text)
def __init__(self): super().__init__() welcome() settings = load_settings() self._upload = settings['upload'] # 上传的图片所在目录 self._download = settings['download'] # 下载的文件 self.sleep_time = settings["sleep_time"] # 下载网页源代码所等待的时间 self.separate = settings["separate"] # 是否分割开下载的数据文件和当前的图片 self.extention = settings["extention"] self.mirror = settings["mirror"] # 是否使用镜像网站 brower = settings["brower"] profile = settings["profile_path"] # profile 自定义可以参考 https://blog.csdn.net/weixin_44676081/article/details/106322068 if brower == "firefox": try: self.driver = webdriver.Firefox( firefox_profile=FirefoxProfile(profile), executable_path=settings["webdriver_path"]) except Exception as e: print(e) else: try: options = webdriver.ChromeOptions() options.add_argument("--user-data-dir=" + profile) self.driver = webdriver.Chrome( executable_path=settings["webdriver_path"]) except Exception as e: print(e)
def backup_database(local_file): """Backs up the database to the given local file""" cfg = settings.load_settings() db_host = cfg['DATABASE_HOST'] db_port = int(cfg['DATABASE_PORT']) db_user = cfg['DATABASE_USER'] db_pass = cfg['DATABASE_PASSWORD'] db_name = cfg['DATABASE_DBNAME'] pg_dump_version = subprocess.check_output('pg_dump --version', shell=True) print( f'Initiating database backup to {local_file} using {pg_dump_version}') old_pg_pass = os.environ.get('PGPASSWORD') os.environ['PGPASSWORD'] = db_pass status = os.system(f'pg_dump -Fc {db_name} -h {db_host} -p {db_port} ' f'-U {db_user} > {local_file}') if old_pg_pass is not None: os.environ['PGPASSWORD'] = old_pg_pass else: del os.environ['PGPASSWORD'] if status == 0: print('Backup finished') else: print(f'Backup failed with status {status}') sys.exit(1)
def main(): conn = setup_connection() cursor = conn.cursor() tbls = Schema('information_schema').tables cursor.execute( Query.from_(tbls).where(tbls.table_type == 'BASE TABLE').where( tbls.table_schema == 'public').select(tbls.table_name).get_sql(), ) rows = cursor.fetchall() rows = map(lambda r: r[0], rows) cursor.close() conn.close() cfg = settings.load_settings() db_host = cfg['DATABASE_HOST'] db_port = int(cfg['DATABASE_PORT']) db_user = cfg['DATABASE_USER'] db_pass = cfg['DATABASE_PASSWORD'] db_name = cfg['DATABASE_DBNAME'] old_pg_pass = os.environ.get('PGPASSWORD') os.environ['PGPASSWORD'] = db_pass for tbl in rows: print('=' * 50) print(f'DESCRIBE {tbl}') os.system(f'psql -d {db_name} -h {db_host} -p {db_port} ' f'-U {db_user} -c "\\d+ {tbl}"') print() if old_pg_pass is not None: os.environ['PGPASSWORD'] = old_pg_pass else: del os.environ['PGPASSWORD']
def restore_database(local_file): """Backs up the database to the given local file""" cfg = settings.load_settings() db_host = cfg['DATABASE_HOST'] db_port = int(cfg['DATABASE_PORT']) db_user = cfg['DATABASE_USER'] db_pass = cfg['DATABASE_PASSWORD'] auth_str = f'-h {db_host} -p {db_port} -U {db_user}' old_pg_pass = os.environ.get('PGPASSWORD') os.environ['PGPASSWORD'] = db_pass pg_restore_version = subprocess.check_output('pg_restore --version', shell=True) print(f'Initiating restore from {local_file} using {pg_restore_version}') status = os.system( f'pg_restore -Fc --clean --create --dbname template1 {auth_str} {local_file}' ) if old_pg_pass is not None: os.environ['PGPASSWORD'] = old_pg_pass else: del os.environ['PGPASSWORD'] if status == 0: print('Restore finished') else: print(f'Status failed with code {status}') sys.exit(1)
def run_on_cluster( cmd, job_name, cluster_output_dir, cluster_scripts_dir=None, queue_type=None, cmd_name="qsub", settings_fname=None ): print "Submitting job: %s" % (job_name) queue_name = None # Load command name from settings file if settings_fname != None: load_settings(settings_fname) cmd_name = Settings.get_cluster_command() if queue_type == "long": queue_name = Settings.get_long_queue_name() elif queue_type == "short": queue_name = Settings.get_short_queue_name() else: print "Warning: Unknown queue type: %s" % (queue_type) queue_name = queue_type if queue_type is None: print " - queue type: unspecified" else: print " - queue type: %s" % (queue_type) if queue_name is None: print " - queue name unspecified" else: print " - queue name: %s" % (queue_name) misc_utils.make_dir(cluster_output_dir) if cluster_scripts_dir == None: cluster_scripts_dir = os.path.join(cluster_output_dir, "cluster_scripts") misc_utils.make_dir(cluster_scripts_dir) scripts_output_dir = os.path.join(cluster_output_dir, "scripts_output") misc_utils.make_dir(scripts_output_dir) scripts_output_dir = os.path.abspath(scripts_output_dir) cluster_call = '%s -o "%s" -e "%s"' % (cmd_name, scripts_output_dir, scripts_output_dir) # Add queue type if given one if queue_name != None: cluster_call += ' -q "%s"' % (queue_name) script_name = valid_cluster_name( os.path.join(cluster_scripts_dir, "%s_time_%s.sh" % (job_name, time.strftime("%m-%d-%y_%H:%M:%S"))) ) make_bash_script(script_name, cmd) cluster_cmd = cluster_call + ' "%s"' % (script_name) job_id = launch_job(cluster_cmd, cmd_name) return job_id
def setup_connection(): """Create a psycopg2 connection to the postgres database""" cfg = settings.load_settings() return psycopg2.connect(host=cfg['DATABASE_HOST'], port=int(cfg['DATABASE_PORT']), user=cfg['DATABASE_USER'], password=cfg['DATABASE_PASSWORD'], dbname=cfg['DATABASE_DBNAME'])
def _get_chat_settings(self, chat_id): s = settings.load_settings(chat_id) rating = s["rating"] character = s["character"] copyright = s["copyright"] general = s["general"] return (rating, character, copyright, general)
def __init__(self): self.title = 'Problem Details' data = problem.retrieve_problem_data() self.settings = settings.load_settings() prob = problem.get_problem(data, self.settings['Chapters']) self.options = [ 'Problem: {}'.format(prob['Problem']), 'Page: {}'.format(prob['Page']), ] self.prompt = '\nPress Enter to return to main menu...'
def __init__(self): """ Constructor for VideoProcessor """ self.prev_frames = [] self.clf = joblib.load("clf.p") self.scaler = pickle.load(open("scaler.p", "rb")) self.settings = load_settings() self.log = dict() return
def _add_settings(self, chat_id, category, tagList): tl = map(lambda s: string.replace(s, "_", " "), tagList) s = settings.load_settings(chat_id) try: s[category] |= set(tl) except: s[category] = set(tl) settings.save_settings(chat_id, s)
def _add_settings(self, chat_id, category, tagList): tl = map(lambda s : string.replace(s, "_", " "), tagList) s = settings.load_settings(chat_id) try: s[category] |= set(tl) except: s[category] = set(tl) settings.save_settings(chat_id, s)
def __init__(self): self.title = 'Settings Menu' self.settings = settings.load_settings() chaps = self.settings['Chapters'] if len(chaps) < 1: chaps = 'All' self.options = [ 'Current chapters selected: {}'.format(chaps), '\nEnter a comma separated list of chapters to add:', ] self.prompt = '\n>> '
def __init__(self, root): self.settings = load_settings() self.seconds = self.settings['timer_seconds'] self.pomodoro_work = self.settings['pomodoro_work'] self.pomodoro_break = self.settings['pomodoro_break'] self.display_mode = IntVar(value=self.settings['display_mode']) self.timer_mode = IntVar(value=self.settings['timer_mode']) self.state = None self.root = root self.states = {} self.toggle_timer_mode()
def _load_settings(self): dir_entry = self._builder.get_object('DirEntry') handle_entry = self._builder.get_object('HandleEntry') key_entry = self._builder.get_object('KeyEntry') pid_entry = self._builder.get_object('PIDEntry') data = settings.load_settings() dir_entry.set_text(data['directory']) handle_entry.set_text(data['handle']) key_entry.set_text(data['key']) pid_entry.set_text(data['pid'])
def test_init(self): settings.update(load_settings("tests", "minimal_with")) settings["optimization"]["optimize"] = True settings["optimization"]["constrained"] = False model = Model() self.assertEqual(model.n_const, 0) self.assertFalse(model.trained) self.assertEqual(model.no_samples, 0) self.assertEqual(model.sampling_iterations, 0) self.assertFalse(model.optimization_converged)
def last_deploy_update(montagu_version): our_settings = settings.load_settings() last_restore = None if our_settings['initial_data_source'] == 'restore': last_restore = last_restore_read() dat = { 'time': str(datetime.datetime.now()), 'versions': versions.as_dict(), 'settings': our_settings, 'last_restore': last_restore, 'montagu': montagu_version } with open(path_last_deploy, 'w') as f: json.dump(dat, f, indent = 4)
def run(): settings = load_settings() logging.info("Beginning backup run for these targets: ") for target in settings.targets: logging.info(" - " + target.id) for target in settings.targets: logging.info("\n" + ("*" * 79)) logging.info(target.id) logging.info("- Doing pre-backup step") target.before_backup() logging.info("- About to backup {paths} to {bucket}".format( paths=target.paths, bucket=target.bucket)) run_duplicati(target, settings)
def update_setting(option): settingsList = settings.load_settings() generalCheckboxMap = {'enableChatMode': enableChatModeBox.on, 'enableSleep': enableSleepBox.on, 'verboseLogging': verboseLoggingBox.on} tumblrCheckboxMap = {'publishOutput': publishOutputBox.on, 'enablePostPreview': enablePostPreviewBox.on, 'enableAskReplies': enableAskRepliesBox.on, 'enableAskDeletion': enableAskDeletionBox.on, 'fetchRealAsks': fetchRealAsksBox.on, 'enableReblogs': enableReblogsBox.on, 'enableDreams': enableDreamsBox.on} if option in generalCheckboxMap.keys(): group = 'general' value = generalCheckboxMap[option] elif option in tumblrCheckboxMap.keys(): group = 'tumblr' value = tumblrCheckboxMap[option] settingsList[group][option] = value with open('settings.json', 'w') as settingsFile: json.dump(settingsList, settingsFile)
def do_crawl(): #spider = NatureSpider() #settings = Settings() #settings.setmodule('crawler.agg.settings', priority='project') #settings.set('SPIDER_MODULES', 'crawler.agg.spiders', priority='project') #print settings.getdict('ITEM_PIPELINES') #crawler = Crawler(spider, settings) #crawler.signals.connect(reactor.stop, signal=signals.spider_closed) #crawler.crawl() #reactor.run() #process = CrawlerProcess(settings) #process.crawl(spider) #process.start() #settings.set('SPIDER_MODULES', 'crawler.agg.spiders', priority='project') #pdb.set_trace() #process = CustomCrawler(settings) #process.crawl(spider) #process.start(stop_after_crawl=True) # crawler = Crawler(spider, settings) # crawler.signals.connect(reactor.stop, signal=signals.spider_closed) # crawler.crawl() #runner = CrawlerRunner(settings) #d = runner.crawl(spider) #d.addBoth(lambda _: reactor.stop()) #reactor.run() # Subprocess settings = load_settings() for spider in Spiders.spiders: if settings[spider]["enabled"]: script = [ "scrapy", "crawl", spider, "-a", "sync_length=%s" % settings[spider]["sync_length"] ] try: p = Popen(script, cwd='%s/crawler/agg' % os.getcwd()) p.wait() print('Crawl Finished!') except subprocess.CalledProcessError: pass except OSError: pass
def options_init(option): if option == '1': #New Email message = client.create_email() if message == False: options_init(option) else: #this will need to populate email object and pass detailsobj and message in tp email class and the pass full email to be sent to_addr = message[0] subj = message[1] msg_body = message[2] #This be of type detailsobj with futher developement detailsobj = settings.load_settings() host = detailsobj[0] port = detailsobj[1] sender = detailsobj[2] username = detailsobj[3] email = client.email(host,port,sender,to_addr,username,subj,msg_body) client.send_email(email) time.sleep(2) logger = secure.log(email) logger.write_log() clearscr() print start enter_options() elif option == '2': #Reset settings success_of_record = dump_settings() if success_of_record == '1': option = enter_options() else: exit("Error writing settings, please try again") else: if option == '3': print "PGP support coming soon" elif option == '4': exit("Bye!") else: print "Option must be either 1,2,3 or 4"
def __init__(self, config_filename, hands): """ Constructor Args: config_filename (string): Name of the file to read hands (int) : The current hand position as seconds from 12:00:00 """ # Read the config file describing the pulse clock setup clock_settings = settings.load_settings(config_filename) # Initialise the actual pulse clock self.pc = pulseclock.PulseClock(clock_settings, hands % 60) # Keep a copy of where the hands are pointing #print("Initialising hands to {}".format(hands)) self.hands = hands self.mode = "Wait"
def setUp(self): # Find out the current directory self.miso_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) self.tests_data_dir = os.path.join(self.miso_path, "test-data") #Read the python executable name from settings self.settings = load_settings(None) self.python_executable = Settings.get_python_executable() self.events_analysis_cmd = "%s %s " %(self.python_executable, os.path.join(self.miso_path, "run_events_analysis.py")) self.tests_output_dir = os.path.join(self.miso_path, "test-output") self.test_sam_filename = os.path.join(self.tests_data_dir, "sam-data", "c2c12.Atp2b1.sam") self.gff_events_dir = os.path.join(self.miso_path, "gff-events") self.sam_to_bam_script = os.path.join(self.miso_path, "sam_to_bam.py") self.index_gff_script = os.path.join(self.miso_path, "index_gff.py")
def __init__(self): self.settings = settings.load_settings() chaps = self.settings['Chapters'] self.title = 'Settings Menu' self.menu = { 1: self.add_chapter, 2: self.rm_chapter, 3: self.reset, 0: lambda: None, } self.options = [ 'Current chapters selected: {}'.format(chaps), '1) Add chapters', '2) Remove chapters', '3) Reset chapters', '\n0) Return to main menu', ] self.prompt = '\nEnter choice >> '
def do_activate(self): logger.debug("activating ListenBrainz plugin") self.settings = load_settings() self.__client = ListenBrainzClient(logger=logger) self.settings.connect("changed::user-token", self.on_user_token_changed) self.on_user_token_changed(self.settings) self.__current_entry = None self.__current_start_time = 0 self.__current_elapsed = 0 self.__queue = ListenBrainzQueue(self.__client) with self.__lock: try: self.__queue.load() except Exception as e: _handle_exception(e) self.__queue.activate() shell_player = self.object.props.shell_player shell_player.connect("playing-song-changed", self.on_playing_song_changed) shell_player.connect("elapsed-changed", self.on_elapsed_changed)
def main(args=None): cfg = settings.load_settings() for k in REQUIRED_CFG: if not cfg[k]: raise Exception( f'Environment variable {k} is required but not set') localf = f'{time.time()}.dump' key = os.path.join(cfg['AWS_S3_FOLDER'], localf) backup_database(localf) upload_to_aws(localf, cfg['AWS_S3_BUCKET'], key, cfg) if os.path.exists('uploaded.dump'): print('Deleting uploaded.dump') os.remove('uploaded.dump') if os.path.exists('uploaded.json'): print('Deleting uploaded.json') os.remove('uploaded.json') print(f'Moving local file {localf} to uploaded.dump') os.rename(localf, 'uploaded.dump') print('Saving meta info to uploaded.json') with open('uploaded.json', 'w') as outfile: json.dump({'key': key}, outfile)
async def schedule(message, attachments, env): # TODO настройки языка user_setting = settings.load_settings(message.from_id) if user_setting: now = datetime.now(tz=pytz.timezone('Europe/Moscow')) week_day = now.weekday() + 1 week_days = {"ru": ["понедельник", "вторник", "среду", "четверг", "пятницу", "субботу", "воскресенье"], "en": ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"]} days = {"en": {"today": {"day": week_day, "delta": 0}, "tomorrow": {"day": week_day + 1 if week_day in range(6) else 1, "delta": 1}}, "ru": {"сегодня": {"day": week_day, "delta": 0}, "завтра": {"day": week_day + 1 if week_day in range(6) else 1, "delta": 1}}} for i in range(1, 8): days["ru"][week_days["ru"][i - 1]] = i days["en"][week_days["en"][i - 1]] = i if env.body in days['ru'].keys(): day = days["ru"][env.body] elif env.body in days['en'].keys(): day = days["en"][env.body] else: day = datetime.strptime(env.body, '%d.%m.%Y').weekday() + 1 tmp = ["today", "tomorrow", "сегодня", "завтра"] if env.body in tmp: res = get_schedule(delta=day["delta"], group=user_setting['group']) text = "{}({}/{}):\n{}".format(message.text, week_days['ru'][day["day"] - 1], res[0], res[1]) elif (env.body in days['ru'].keys() or env.body in days['en'].keys()) and env.body not in tmp: res = get_schedule(day=day, w=1, group=user_setting['group']) text = "{}({}):\n{}".format(message.text, res[0], res[1]) # next week res = get_schedule(day=day, w=2, group=user_setting['group']) text += "\n{}({}):\n{}".format(message.text, res[0], res[1]) else: date = datetime.strptime(env.body, '%d.%m.%Y') res = get_schedule(date=date, group=user_setting['group']) text = "{}({}/{}):\n{}".format(message.text, week_days['ru'][day - 1], res[0], res[1]) else: text = 'Для начала установите группу через пункт меню настройки.' await env.reply(text)
def load_pipeline_settings(self): """ Load the settings filename """ if not os.path.isfile(self.settings_filename): print "Error: %s is not a settings filename." % (self.settings_filename) sys.exit(1) self.settings = settings.load_settings(self.settings_filename) self.settings_info, self.parsed_settings = self.settings self.genome = self.settings_info["mapping"]["genome"] # Determine if we're in paired-end mode self.is_paired_end = False if self.settings_info["mapping"]["paired"]: self.is_paired_end = True # Load the sequence files self.load_sequence_files() # Load the directory where pipeline output should go self.output_dir = utils.pathify(self.settings_info["data"]["outdir"]) print "Loaded pipeline settings (source: %s)." % (self.settings_filename) # Pipeline init directory self.init_dir = os.path.join(self.settings_info["pipeline-files"]["init_dir"]) # Loading group information if there is any self.load_groups()
#!/usr/bin/python # This Python file uses the following encoding: utf-8 import yaml from dateutil.parser import parse import datetime import dateutil.tz as tz import sys import os import os.path import subprocess import settings from mako.template import Template config=settings.load_settings() START = datetime.datetime.strptime(config['start_date'],"%Y/%m/%d") HERE = os.path.dirname(__file__) def get_balance(acct): p = subprocess.Popen(['ledger', '-f', os.path.join(HERE,'ledger'), '-n', 'balance', acct], stdout=subprocess.PIPE) (out, _) = p.communicate() try: return int(out.split()[0][1:]) except: return 0 def get_debts(): p = subprocess.Popen(['ledger', '-f', os.path.join(HERE, 'ledger'), '-n', 'balance', 'Pool:Owed:'],
args = parser.parse_args() import matplotlib as mpl mpl.use('Agg') import gv import numpy as np import amitgroup as ag import glob import matplotlib.pylab as plt import os from settings import load_settings import itertools as itr settings = load_settings(args.settings) #d = gv.Detector.load('uiuc-supermodel01.npy') parts_file = settings[settings['detector']['descriptor']]['file'] descriptor = gv.BinaryDescriptor.getclass('polarity-parts').load(parts_file) esett = descriptor.bedges_settings() #files = sorted(glob.glob(os.path.expandvars('$UIUC_DIR/TestImages/*.pgm'))) files = sorted(glob.glob(os.path.expandvars('$VOC_DIR/JPEGImages/*.jpg'))) #files = [np.random.uniform(size=(200, 200))] #esett['minimum_contrast'] = 0.2 all_ors = []
from helpers import scanserial from settings import load_settings settings = load_settings() def build_device_list(): device_settings = settings.core.printer.devices match_includes = len(device_settings.includes) > 0 raw_devices = scanserial() device_dict = {} index = 0 for device in raw_devices: if match_includes: if not device in device_settings.includes: continue if device in device_settings.excludes: continue device_dict[index] = device index = index + 1 return device_dict devices = build_device_list() device_occupations = {} sessions = {} ws_handles = {} def delete_expired_objects():
args = parser.parse_args() settings_file = args.settings model_file = args.model output_file = args.output import matplotlib matplotlib.use("Agg") import matplotlib.pylab as plt import numpy as np import gv import amitgroup as ag import glob from skimage.transform import pyramid_reduce, pyramid_expand from settings import load_settings settings = load_settings(settings_file) detector = gv.Detector.load(model_file) descriptor = detector.descriptor def create_bkg_generator(size, files): i = 0 prng = np.random.RandomState(0) yielded = 0 while True: im = gv.img.asgray(gv.img.load_image(files[i])) to = [im.shape[i]-size[i]+1 for i in xrange(2)] #print min(to) if min(to) > 0: x, y = [prng.randint(0, to[i]) for i in xrange(2)] yielded += 1 yield im[x:x+size[0],y:y+size[1]]
def execute_files(settings_files): for settings in load_settings(settings_files): execute(settings)
import os import time import datetime import parse_movies import find_torrent import settings from gmail import Gmail if os.path.isfile("KickassMoviesSettings.data"): try: downloaded_movies_location, users, sleep_time, email_address, plex, filebot_location = settings.load_settings() except EOFError: os.remove("KickassMoviesSettings.data") downloaded_movies_location, users, sleep_time, email_address, plex, filebot_location = settings.initial_setup() else: downloaded_movies_location, users, sleep_time, email_address, plex, filebot_location = settings.initial_setup() if input("Would you like to change the settings? (y/n): ").lower() == "y": downloaded_movies_location, users, sleep_time, email_address, plex, filebot_location = settings.change_settings() if plex: downloaded_movies_location = filebot_location print_length = 50 new_movies_location = downloaded_movies_location old_downloaded = os.listdir(downloaded_movies_location) gmail = Gmail(email_address) gmail.login_yagmail()
if options.dry_run: talks_api.print_dry_run_output(all_instructions) else: try: talks_api.upload(all_instructions) except: log_exception(logger, "Failed to upload events to OxTalks") try: if __name__ == "__main__": parser = OptionParser() parser.add_option("-t", "--trawler", dest="trawler", help="name of trawler to run") parser.add_option("-l", "--list", dest="list_trawlers", action='store_true', help="list names of available trawlers") parser.add_option("-d", "--dry_run", dest="dry_run", action='store_true', help="Dry run, don't upload to talks website") (options, positional_args) = parser.parse_args() options.settings_filename, = positional_args settings = load_settings(options.settings_filename) load_main_logging(settings.logging_config_filename) if options.list_trawlers: list_trawlers(options, settings) else: pull_events(options, settings) except: log_exception(logger, "Unrecoverable Error")
def main(settings_filename): settings.load_settings(settings_filename) runner = get_runner(settings.device_type) runner.init() runner.stop()
def on_chat_message(self, msg): content_type, chat_type, chat_id = telepot.glance(msg) mt = None if content_type == "photo": (rating, character, copyright, general) = self._get_tag_list(msg) (warn_rating, warn_character, warn_copyright, warn_general) = self._get_chat_settings(chat_id) if character & warn_character: mt = messages.tag_error(character & warn_character) if copyright & warn_copyright: mt = messages.tag_error(copyright & warn_copyright) if general & warn_general: mt = messages.tag_error(general & warn_general) if rating in warn_rating: mt = messages.rating_error if mt: self.sendMessage(chat_id, mt, reply_to_message_id = msg["message_id"]) if content_type == "text": command = msg['text'].strip().lower() if re.match(r"^/tagmgr(" + self.username + r")?\b", command): command = re.sub(r"^/tagmgr(" + self.username + r")?\s", "", command) mt = messages.tagmgr_usage if re.match(r"^add.+$", command): params = re.sub(r"^add\s", "", command).split() if len(params) == 0 or params[0] not in ['rating', 'character', 'copyright', 'general']: mt = "Please specify tag category and tags to add." else: self._add_settings(chat_id, params[0], params[1:]) mt = messages.okay if re.match(r"^rm.+$", command): params = re.sub(r"^rm\s", "", command).split() if len(params) == 0 or params[0] not in ['rating', 'character', 'copyright', 'general']: mt = "Please specify tag category and tags to remove." else: self._rm_settings(chat_id, params[0], params[1:]) mt = messages.okay if re.match(r"^clear.+$", command): params = re.sub(r"^clear\s", "", command).split() if len(params) != 1 or params[0] not in ['rating', 'character', 'copyright', 'general']: mt = "Please specify tag category to clear." else: all_entries = settings.load_settings(chat_id)[params[0]] self._rm_settings(chat_id, params[0], all_entries) mt = messages.okay if re.match(r"^show$", command): (rating, character, copyright, general) = self._get_chat_settings(chat_id) mt = "Warned tag list\n" mt += "Rating: " + ", ".join(rating) + "\n" mt += "Character: " + ", ".join(character) + "\n" mt += "Copyright: " + ", ".join(copyright) + "\n" mt += "General: " + ", ".join(general) self.sendMessage(chat_id, mt, reply_to_message_id = msg["message_id"]) if re.match(r"^/showtags(" + self.username + r")?$", command): mt = "Please use this command as a reply to an image message." reply_id = msg["message_id"] if "reply_to_message" in msg: rmsg = msg["reply_to_message"] rcontent_type, rchat_type, rchat_id = telepot.glance(rmsg) if rcontent_type == "photo": (rating, character, copyright, general) = self._get_tag_list(rmsg) mt = "Inferred tags for this image\n" mt += "Rating: " + rating + "\n" mt += "Character: " + ", ".join(character) + "\n" mt += "Copyright: " + ", ".join(copyright) + "\n" mt += "General: " + ", ".join(general) reply_id = rmsg["message_id"] self.sendMessage(chat_id, mt, reply_to_message_id = reply_id, disable_notification = True)
def run_SGEarray_cluster( arg_list, argfile, cluster_output_dir, queue_type="long", cluster_scripts_dir=None, chunk=2500, settings=None, cmd_name="qsub", job_name="miso_job", ): """ Run MISO jobs on cluster using SGE. Function contributed by Michael Lovci, UCSD. """ misc_utils.make_dir(cluster_output_dir) # Create arguments file to pass on to job f = open(argfile, "w") nargs = len(arg_list) if nargs % chunk == 0: njobs = nargs / chunk else: njobs = 1 + (nargs / chunk) for args in arg_list: f.write(args[0] + "\n") f.close() if cluster_scripts_dir == None: cluster_scripts_dir = os.path.join(cluster_output_dir, "cluster_scripts") misc_utils.make_dir(cluster_scripts_dir) scripts_output_dir = os.path.join(cluster_output_dir, "scripts_output") misc_utils.make_dir(scripts_output_dir) scripts_output_dir = os.path.abspath(scripts_output_dir) script_error = os.path.join(scripts_output_dir, string.join([job_name, "err"], ".")) script_out = os.path.join(scripts_output_dir, string.join([job_name, "out"], ".")) cluster_script = os.path.join(cluster_scripts_dir, "run_miso.sh") if settings != None: load_settings(settings) cmd_name = Settings.get_cluster_command() if queue_type == "long": queue_name = Settings.get_long_queue_name() elif queue_type == "short": queue_name = Settings.get_short_queue_name() else: raise Exception, "Unknown queue type: %s" % (queue_type) if queue_type == None: print " - queue: unspecified" else: print " - queue: %s, using queue name %s" % (queue_type, queue_name) cs = open(cluster_script, "w") cs.write("#!/bin/sh" + "\n") cs.write("#$ -N %s\n" % (job_name)) cs.write("#$ -S /bin/sh\n") cs.write("#$ -p -1023\n") cs.write("#$ -o %s\n" % (script_out)) cs.write("#$ -e %s\n" % (script_error)) cs.write("#$ -t 1-%s\n" % (njobs)) ##execute from current working directory cs.write("#$ -cwd\n") ## import environment variables cs.write("#$ -V\n") if queue_name: cs.write("#$ -l %s\n" % (queue_name)) cs.write('echo "hostname is:"\n') cs.write("hostname\n") cs.write("ARGFILE=%s\n" % argfile) cs.write("SEQ=/usr/bin/seq\n") cs.write("index=0\n") cs.write("lastindex=0\n") cs.write('let "index = $SGE_TASK_ID * %s"\n' % (chunk)) chunk2 = chunk - 1 cs.write('let "lastindex = $index - %s"\n' % (chunk2)) if chunk2 > 0: cs.write("for i in `$SEQ $lastindex $index`\n") else: cs.write("for i in $index\n") # if user chooses 1 for chunk size cs.write("do\n") cs.write(" line=$(cat $ARGFILE | head -n $i | tail -n 1)\n") cs.write(" eval $line\n") cs.write("done\n") cs.close() # Make script executable os.system('chmod +x "%s"' % (cluster_script)) qsub_cmd = cmd_name + ' "%s"' % (cluster_script) os.system(qsub_cmd)
else: exit("Error writing settings, please try again") else: if option == '3': print "PGP support coming soon" elif option == '4': exit("Bye!") else: print "Option must be either 1,2,3 or 4" if result == False: print "Please complete the following details" dump_settings() else: settings_package = settings.load_settings() #details for sending email host = settings_package[0] port = settings_package[1] sender = settings_package[2] username = settings_package[3] def enter_options(): option = raw_input("\n\tOptions\n\t1: New Email\t\t2: Reset Settings\n\t3: Encrpyted PGP Email\t4: Quit\n\nPlease enter an option @> ") options_init(option) return option enter_options()
''' import random import json import time import argparse from GUI import Window, Label, CheckBox, Button, application from GUI.StdColors import grey import settings import emma import tumblrclient import utilities settingsList = settings.load_settings() def run_emma(): # If we aren't in chat mode, every 15 minutes, try to make a post. Replying to asks is most likely, followed by dreams, and reblogging a post is the least likely if settings.option('general', 'enableChatMode'): emma.chat() else: if settings.option('tumblr', 'fetchRealAsks'): askList = tumblrclient.get_asks() else: askList = utilities.fakeAsks print "Choosing activity..." activities = [] if settings.option('tumblr', 'enableReblogs'): activities.append('reblogPost') if settings.option('tumblr', 'enableDreams'): activities.extend(['dream'] * 2) if settings.option('tumblr', 'enableAskReplies') and askList != []: activities.extend(['replyToAsks'] * 3) activity = random.choice(activities)
def __init__(self): super(MainFrame, self).__init__(None, -1, "AppSalesGraph") self.version = 1.0 self.sales_downloader = None self.updater = None icon = wx.Icon("images/key.ico", wx.BITMAP_TYPE_ICO) self.SetIcon(icon) settings.load_settings() self.Bind(wx.EVT_CLOSE, self.OnCloseWindow) # Set structures EVT_RESULT(self, self.OnResult) self.loaded_dates = [] self.selected_products = [] self.event_levels = [] self.delayed = False # Build up GUI self.ConfigureMenus() # self.ConfigureToolbars() self.ConfigureSizers() self.ConfigureBottomBar() self.ConfigureDatePicker() self.ConfigureOtherPanels() self.ConfigureListCtrls() self.ConfigureTopPanel() self.ConfigureBottomPanel() self.graphics_sizer.AddMany(((self.top_bar_area, 0, wx.ALIGN_LEFT|wx.EXPAND|wx.LEFT, 5), (self.notebook_frame, 4, wx.EXPAND, 5))) self.main_sizer.AddMany(((self.products, 1, wx.EXPAND|wx.ALL, 0), (self.graphics_sizer, 3, wx.EXPAND))) # label_to.Add(wx.StaticText(self, label="To:"), 1, wx.EXPAND|wx.ALL, 2) self.uber_sizer.Add(self.main_sizer, 5, wx.EXPAND|wx.ALL, 0) # wx.ALL is for sides that border applies to self.uber_sizer.Add(item=self.bottom_box, proportion=0, flag=wx.EXPAND|wx.ALL|wx.FIXED_MINSIZE|wx.ALIGN_BOTTOM, border=0) # self.graph_renderer = operator.attrgetter('paid_downloads') self.SetSizerAndFit(self.uber_sizer) self.CentreOnScreen() try: self.products.Select(0) self.OnProductSelected(None) except: pass self.Update() self.sales_period = SalesPeriod(self) self.notebook.sales_period = self.sales_period self.popularity_list.SetData(self.sales_period) if not self.sales_panel == None: self.sales_panel.sales_period = self.sales_period self.LoadSalesFiles() # wx.CallLater(500, self.CheckForUpdateFile, None) self.OnProfit(None)
import tornado.ioloop import tornado.autoreload import tornado.web import settings import email_sender import urls import logging import datetime import traceback from tornado.options import options if __name__ == "__main__": # Load settings settings.load_settings() application = tornado.web.Application(urls.urls, **options.as_dict()) application.listen(options.port) server_instance = tornado.ioloop.IOLoop.instance() # tornado.autoreload.add_reload_hook(database.release) try: server_instance.start() except KeyboardInterrupt: logging.error("Existing") exit_error = u'Keyboard Exit' except Exception, e: logging.exception(e) exit_error = traceback.format_exec() finally: