def test_save_config(): with TemporaryDirectory() as d: file = Path(d) / "settings.json" values = {"settings_slic3r_exe": "now"} save_config(values=values, path=file) r = read_config(file) assert r.slic3r_exe == "now"
def run(args): client = Client(base_url=args.base_url, base_auth=args.base_authentication) pp = pprint.PrettyPrinter() if hasattr(args, 'list'): if args.user: plugins = list_plugins(client, key='userInstalled', value='boolean', pattern='true') elif args.system: plugins = list_plugins(client, key='userINstalled', value='boolean', pattern='false') elif args.key: plugins = list_plugins(client, key='key', value='regex', pattern=args.key) elif args.key_configuration_file: config = read_config(args.key_configuration_file) key = get_key_config(config) plugins = list_plugins(client, key='key', value='regex', pattern=key) else: plugins = list_plugins(client) pp.pprint(plugins) elif hasattr(args, 'show'): plugin = show_plugin(client, args.key) pp.pprint(plugin)
def load_unitsets(cls): config = configuration.read_config() car_unit = config['datafile']['car_unit'] unit_set = config['datafile']['unit_set'] # read car unit information from csv file with open(car_unit, 'rt') as csvfile: reader = csv.DictReader(csvfile) for row in reader: cn = CarUnit(unitid=row['unitid'], cars=int(row['cars']), max_speed=float(row['max_speed']), acceleration=float(row['acceleration']), deceleration=float(row['deceleration']), emergency_factor=float(row['emergency_factor'])) Simulator.carunit_list.append(cn) Simulator.carunit_dict[cn.unitid] = cn # read unit set information from csv file with open(unit_set, 'rt') as csvfile: reader = csv.DictReader(csvfile) for row in reader: # convert str to list for assigned units lrange = row['assigned_unit'].strip("[']").split("', '") assigned_unit = list( [Simulator.carunit_dict[x] for x in lrange]) us = UnitSet(unitsetid=row['unitsetid'], location=str(row['location']), assigned_unit=assigned_unit) Simulator.unitset_list.append(us) Simulator.unitset_dict[us.unitsetid] = us
def _run_command(): parser = create_parser() args = parser.parse_args() config = configuration.read_config() args.func(args, config)
def check_mod_versions(verbose=True): ''' Check for mod updates :param verbose: In non verbose mode will simply return True or False when it finds first mod to be updated. In verbose mode will output modids_to_update, mod_names, memory, data required for updater function to work. :return: See above ''' log.info("Checking for mod updates...") modids = get_active_mods() memory = read_config("mod_updater_data") if not memory: memory = {} for modid in modids: memory[modid] = {'last_update': datetime(year=1991, month=1, day=1)} # write_config("mod_updater_data", memory) for modid in modids: if modid not in memory: memory[modid] = {'last_update': datetime(year=1991, month=1, day=1)} write_config("mod_updater_data", memory) modids_to_update = [] mod_names = [] for modid in modids: log.debug(f"Querying info on mod {modid}") mod_info_html = requests.get(f"https://steamcommunity.com/sharedfiles/filedetails/?id={modid}").text soup = BeautifulSoup(mod_info_html, features="html.parser") date_string = \ soup.find("div", {"id": "mainContents"}).find("div", {"class": "workshopItemPreviewArea"}).find_all( "div", {"class": "detailsStatRight"})[2].text try: workshop_updated_date = datetime.strptime(date_string, "%d %b, %Y @ %H:%M%p") except ValueError: workshop_updated_date = datetime.strptime(date_string, "%d %b @ %H:%M%p") workshop_updated_date = workshop_updated_date.replace(year=datetime.now().year) if workshop_updated_date > memory[modid]['last_update']: log.debug( f"Update required for mod: {modid}; Last updated date: {memory[modid]['last_update']}; Workshop date: {workshop_updated_date}") modids_to_update.append(modid) mod_name = soup.find("div", {"class": "workshopItemTitle"}).text mod_names.append(mod_name) # memory[modid]['last_update'] = datetime.now() # write_config("mod_updater_data", memory) if len(modids_to_update) > 0: log.info(f"Update required for mods: {modids_to_update}") data = { 'mod_ids': modids_to_update, 'mod_names': mod_names, } return data else: log.info("All mods are up to date.") return None
def background_tasks(): tasks = read_config('updater_config')['background_tasks'] for task in tasks: try: getattr(am, task)() except Exception as e: am.discord_message(f"Background task failed: {task}; {e}")
def __init__(self): super().__init__() self.res_file = None self.setup_ui() self.texture_archive = None try: self.configuration = read_config() print("Config file loaded") except FileNotFoundError as e: print("No config file found, creating default config...") self.configuration = make_default_config() self.pathsconfig = self.configuration["default paths"] #self.editorconfig = self.configuration["gen editor"] self.current_gen_path = None self.current_coordinates = None self.editing_windows = {} self.add_object_window = None self.object_to_be_added = None self.edit_spawn_window = None self._window_title = "" self._user_made_change = False self._justupdatingselectedobject = False self.addobjectwindow_last_selected = None self.lastshow = None self.modelindices = {}
def load_unitsets(cls): config = configuration.read_config() car_unit = config['datafile']['car_unit'] unit_set = config['datafile']['unit_set'] # read car unit information from csv file with open(car_unit, 'rt') as csvfile: reader = csv.DictReader(csvfile) for row in reader: cn = CarUnit(unitid=row['unitid'], cars=int(row['cars']), max_speed=float(row['max_speed']), acceleration=float(row['acceleration']), deceleration=float(row['deceleration']), emergency_factor=float(row['emergency_factor'])) Simulator.carunit_list.append(cn) Simulator.carunit_dict[cn.unitid] = cn # read unit set information from csv file with open(unit_set, 'rt') as csvfile: reader = csv.DictReader(csvfile) for row in reader: # convert str to list for assigned units lrange = row['assigned_unit'].strip("[']").split("', '") assigned_unit = list([Simulator.carunit_dict[x] for x in lrange]) us = UnitSet(unitsetid=row['unitsetid'], location=str(row['location']), assigned_unit=assigned_unit) Simulator.unitset_list.append(us) Simulator.unitset_dict[us.unitsetid] = us
def setup_communication(cls): config = configuration.read_config() print('Simulation mode') Simulator.host = config['sim_server']['host_sim'] Simulator.port = config['sim_server']['port_sim'] try: Simulator.client.connect("tcp://%s:%s" % (Simulator.host, Simulator.port)) except: print('Connection to server failed at %s' % datetime.now())
def __init__(self): # init global in_queue in_queue = multiprocessing.JoinableQueue() global out_queue out_queue = multiprocessing.Queue() global catalog_ns catalog_ns = '{http://www.unidata.ucar.edu/namespaces/thredds/InvCatalog/v1.0}' self.config = config.read_config() self.data_node = self.config['nodes']['data_node']
def layout(): from configuration import read_config settings = read_config() return [ [ sg.T("Slic3r:"), sg.I(default_text=settings.slic3r_exe, key="settings_slic3r_exe"), sg.FileBrowse(), ], [sg.Button("Save", key="settings_save_config")], ]
def perform_checks(checks=None, auto_update=True): if not checks: checks = read_config('updater_config')['updater_checks'] queue = Lock("queue") if queue.is_locked: queue_data = yaml.load(queue.message) first_detection = False else: queue_data = {"items": {}} first_detection = True notification = "" if first_detection: notification += "Restart scheduled ~{} minutes from now.\n".format( RESTART_DELAY) # Go through all checks and run functions from arm_manager, storing returned dict values # Return values must be dicts that will later be passed down to notification strings and action functions detected = False for check_id, check in enumerate(checks): if check_id not in queue_data['items']: check_response = getattr(am, check['check_function'])() if check_response: queue_data['items'][check_id] = check checks[check_id]['check_response'] = check_response log.debug("Adding to queue: {}; {};".format( check['update_name'], checks[check_id]['check_response'])) notification += "{} detected, adding to queue.\n".format( check['update_name'].format( **checks[check_id]['check_response'])) detected = True # Send notification if new update was detected regardless of whether it's first update or not. # If no updates detected it's not first detection, there's no queue and no notification will be sent. if detected and notification.strip(): queue_data['update_time'] = datetime.now() + timedelta( minutes=RESTART_DELAY) queue.lock(yaml.dump(queue_data, default_flow_style=False)) am.broadcast(notification, True) # Now check if datetime for scheduled restart is near. If not warn about upcoming update. If it is then start # performing restart and required actions. if queue.is_locked: if datetime.now() >= queue_data['update_time']: if auto_update: update(queue, queue_data)
def post_data(process: str, data: dict): config = read_config() try: r = requests.post( "https://gerbil.cld.vogelcc.com/", json={ "psk": "gerberman", "uuid": config.uuid, "type": process, "data": data, }, ) except Exception as e: pass
def _run_command(): parser = create_parser() args = parser.parse_args() if args.need_metadata: metadata = sqlplus.get_metadata(args.schema_dir) else: metadata = None if args.need_config: config = configuration.read_config(args.config) else: config = None args.func(args, metadata, config)
def update_mods(mod_ids, **kwargs): success = True try: ModDodo(os.path.dirname(STEAMCMD), mod_ids, ARK_SERVER_DIR, False, False) except: log.error("Unable to update mods.", exc_info=True) success = False if success: memory = read_config("mod_updater_data") for modid in mod_ids: memory[modid]['last_update'] = datetime.now() write_config("mod_updater_data", memory) return success
def write_script(values: dict): settings = read_config() post_data("optimization_raw", values) psj_file = ( Path(settings.project_dir) / settings.optimization_project / settings.script_dir / "optimization_script.txt" ) psj_dir = ( Path(settings.project_dir) / settings.optimization_project / settings.script_dir ) psj_dir.absolute().mkdir(parents=True, exist_ok=True) with open(psj_file.absolute(), "w") as f: f.write("\n".join(format_optimization_job_lines(values))) logger.info(f"Exported optimization file {psj_file.absolute()}")
def __init__(self): self.config = config.read_config() self.account = self.config['account'] self.idp_server = self.config['nodes']['idp_node'] # Abort test if esgf-web-fe is not reachable r = requests.get("https://{0}/esgf-web-fe".format(self.idp_server), verify=False, timeout=1) assert r.status_code == 200 self.browser = Browser('firefox') # Mapping user data to fit to web-fe user creation form self.elements = {'firstName' : self.account['firstname'], 'lastName' : self.account['lastname'], 'email' : self.account['email'], 'userName' : self.account['username'], 'password1' : self.account['password'], 'password2' : self.account['password']}
def run(args): client = Client(base_url=args.base_url, base_auth=args.base_authentication) try: if hasattr(args, 'list'): if args.user: plugins = list_plugins(client, key='userInstalled', value='boolean', pattern='true') elif args.system: plugins = list_plugins(client, key='userInstalled', value='boolean', pattern='false') elif args.key: plugins = list_plugins(client, key='key', value='regex', pattern=args.key) elif args.key_configuration_file: config = read_config(args.key_configuration_file) key = get_key_config(config) plugins = list_plugins(client, key='key', value='regex', pattern=key) else: plugins = list_plugins(client) if plugins is not None: print plugins elif hasattr(args, 'show'): plugin = show_plugin(client, args.key) print plugin elif hasattr(args, 'install'): token = get_upm_token(client) client.request.url = args.base_url install_plugin(client, token, args.plugin) elif hasattr(args, 'delete'): delete_plugin(client, args.key) elif hasattr(args, 'activate'): activate_plugin(client, args.key) elif hasattr(args, 'deactivate'): deactivate_plugin(client, args.key) except ClientError as e: print >> sys.stderr, "%s: %s" % ('upmctl', e) sys.exit(1) sys.exit(0)
def extract_slicer_command(file_path: Path, params: PrintParams): settings = read_config() slicer = Path(settings.slic3r_exe) cmd = (f"{slicer.absolute()} --export-gcode --dont-arrange " f"--nozzle-diameter {params.width} " f"--first-layer-height {params.width} " f"--layer-height {params.width} " f"--filament-retract-lift {params.travel_height} " f"--retract-speed {params.approach_speed} " f"--travel-speed {params.travel_speed} " "--infill-only-where-needed --infill-overlap 30% " f"--first-layer-extrusion-width {params.width} " "--perimeters 2 " f"--external-perimeter-extrusion-width {params.width + .001} " f"--external-perimeter-extrusion-width {params.width + .001} " f"--perimeter-extrusion-width {params.width + .001} " f"--infill-extrusion-width {params.width + .001} " f"--first-layer-speed {params.print_speed:.3f} " f"--infill-first --infill-only-where-needed --skirts 0 " " " f"{file_path.absolute()}") return cmd
def __init__(self): self.config = config.read_config() self.cacertdir = os.path.expanduser("~/.esg/certificates") self.credsfile = os.path.expanduser("~/.esg/credentials.pem") self.myproxy = MyProxyClient(hostname=self.config['nodes']['idp_node']) self.myproxy._setCACertDir(self.cacertdir)
def __init__(self): super().__init__() self.setupUi(self) self.retranslateUi(self) self.pikmin_routes = RouteTxt() self.pikminroutes_screen.pikmin_routes = self.pikmin_routes self.collision = None self.current_coordinates = None self.button_delete_waypoints.pressed.connect( self.action_button_delete_wp) self.button_ground_waypoints.pressed.connect( self.action_button_ground_wp) self.button_move_waypoints.pressed.connect(self.action_button_move_wp) self.button_add_waypoint.pressed.connect(self.action_button_add_wp) self.button_connect_waypoints.pressed.connect( self.action_button_connect_wp) self.pikminroutes_screen.customContextMenuRequested.connect( self.mapview_showcontextmenu) QtWidgets.QShortcut(Qt.Key_M, self).activated.connect(self.action_button_move_wp) QtWidgets.QShortcut(Qt.Key_G, self).activated.connect( self.action_button_ground_wp) QtWidgets.QShortcut(Qt.CTRL + Qt.Key_A, self).activated.connect(self.action_button_add_wp) QtWidgets.QShortcut(Qt.Key_C, self).activated.connect( self.action_button_connect_wp) QtWidgets.QShortcut(Qt.Key_Delete, self).activated.connect( self.action_button_delete_wp) self.button_delete_waypoints.setToolTip("Shortcut: Delete") self.button_move_waypoints.setToolTip("Shortcut: M") self.button_ground_waypoints.setToolTip("Shortcut: G") self.button_add_waypoint.setToolTip("Shortcut: Ctrl+A") self.button_connect_waypoints.setToolTip("Shortcut: C") self.lineedit_xcoordinate.editingFinished.connect( self.action_lineedit_change_x) self.lineedit_ycoordinate.editingFinished.connect( self.action_lineedit_change_y) self.lineedit_zcoordinate.editingFinished.connect( self.action_lineedit_change_z) self.lineedit_radius.editingFinished.connect( self.action_lineedit_change_radius) self.pikminroutes_screen.connect_update.connect( self.action_connect_waypoints) self.pikminroutes_screen.move_points.connect( self.action_move_waypoints) self.pikminroutes_screen.create_waypoint.connect( self.action_create_waypoint) self.disable_lineedits() self.last_render = None self.current_route_path = None try: self.configuration = read_config() print("config loaded") except FileNotFoundError as e: print(e) print("creating file...") self.configuration = make_default_config() #self.ground_wp_when_moving = self.configuration["ROUTES EDITOR"].getboolean("groundwaypointswhenmoving") self.pathsconfig = self.configuration["default paths"] self.editorconfig = self.configuration["routes editor"] self.pikminroutes_screen.editorconfig = self.editorconfig print("We are now ready!")
import pyglet, time, argparse import render, dot, configuration, renderqueue, scene parse = argparse.ArgumentParser() parse.add_argument("configfile") args = parse.parse_args() config = configuration.read_config(configuration.open_config(args.configfile)) conf = pyglet.gl.Config(sample_buffers=1, samples=4) window = pyglet.window.Window(width=config['window']['width'], height=config['window']['height'], config=conf) pyglet.gl.glClearColor(*config['window']['bgcolor'], 1) rqueue = renderqueue.RenderQueue(window) scene.display_scene(window, config, "fast", rqueue, True) def update(dt): pass pyglet.clock.schedule_interval(update, 1 / 240) @window.event def on_draw(): window.clear() rqueue.render()
def create(self, **query): log.debug("Creating new Entry: " + " ".join([self.title])) ret = super(Entry, self).create(**query) return ret def save(self, *args, **kwargs): log.debug("Updating Entry info: " + str("") + " - " + str(self.title)) self.date_updated = datetime.now() ret = super(Entry, self).save(*args, **kwargs) return ret # region Migration config = read_config() if config['database_migrate']: log.debug("=====================") log.debug("Migration stuff...") try: from playhouse.migrate import * migrator = SqliteMigrator(db) open_count = IntegerField(default=0) migrate(migrator.add_column('Entry', 'open_count', open_count)) log.debug("Migration success") log.debug("=====================") config['database_migrate'] = False
import configuration import sqlite3 import csv import pickle config = configuration.read_config() file = config['database']['file'] def migrage_station_name(): conn = sqlite3.connect('toms.sqlite') cursor = conn.cursor() station_name_dict = {} values_to_insert = [] with open('station_list.csv', 'rt') as csvfile: reader = csv.DictReader(csvfile) for row in reader: # List is not required as this only be used to get name text station_name_dict[row['code']] = row['name'] values_to_insert = [] for entry in station_name_dict.items(): values_to_insert.append(entry) cursor.executemany("""INSERT INTO station_name ('code', 'name') VALUES (?, ?)""", values_to_insert) conn.commit() conn.close() def migrate_zone_specific_station(): conn = sqlite3.connect('toms.sqlite') cursor = conn.cursor()
def test_read_settings(tmp_path): file = Path(tmp_path) / "settings.json" write_config(file) s = read_config(file) assert s.optimization_project == "optimization"
""" Created on Tue Jan 26 15:48:49 2020 @author: susan Exports telemanagement (telegestao) data to a csv -> one sensor per file """ import mysql.connector import pandas as pd import sys sys.path.append('../Functions') import configuration root = configuration.read_config() path_init = configuration.get_path(root) db_config = configuration.get_db(root) wmes = configuration.get_wmes(root) mydb = mysql.connector.connect( host=db_config['host'], user=db_config['user'], passwd=db_config['pw'] ) print(mydb) print("\nExport initiated") cursor = mydb.cursor(buffered=True)
def process_startup(**kwargs): if config_path.is_file(): return read_config() return write_config()