def read_config(self, config): self.enable_registration = bool( strtobool(str(config["enable_registration"])) ) if "disable_registration" in config: self.enable_registration = not bool( strtobool(str(config["disable_registration"])) ) self.registrations_require_3pid = config.get("registrations_require_3pid", []) self.allowed_local_3pids = config.get("allowed_local_3pids", []) self.registration_shared_secret = config.get("registration_shared_secret") self.bcrypt_rounds = config.get("bcrypt_rounds", 12) self.trusted_third_party_id_servers = config["trusted_third_party_id_servers"] self.default_identity_server = config.get("default_identity_server") self.allow_guest_access = config.get("allow_guest_access", False) self.invite_3pid_guest = ( self.allow_guest_access and config.get("invite_3pid_guest", False) ) self.auto_join_rooms = config.get("auto_join_rooms", []) for room_alias in self.auto_join_rooms: if not RoomAlias.is_valid(room_alias): raise ConfigError('Invalid auto_join_rooms entry %s' % (room_alias,)) self.autocreate_auto_join_rooms = config.get("autocreate_auto_join_rooms", True)
def no_cache_dir_callback(option, opt, value, parser): """ Process a value provided for the --no-cache-dir option. This is an optparse.Option callback for the --no-cache-dir option. """ # The value argument will be None if --no-cache-dir is passed via the # command-line, since the option doesn't accept arguments. However, # the value can be non-None if the option is triggered e.g. by an # environment variable, like PIP_NO_CACHE_DIR=true. if value is not None: # Then parse the string value to get argument error-checking. try: strtobool(value) except ValueError as exc: raise_option_error(parser, option=option, msg=str(exc)) # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool() # converted to 0 (like "false" or "no") caused cache_dir to be disabled # rather than enabled (logic would say the latter). Thus, we disable # the cache directory not just on values that parse to True, but (for # backwards compatibility reasons) also on values that parse to False. # In other words, always set it to False if the option is provided in # some (valid) form. parser.values.cache_dir = False
def last_tested_repo_GET(): # max_age: Maximum age in hours, used as base for the search # success(optional): find repos with a successful/unsuccessful vote # job_id(optional); name of the CI that sent the vote # sequential_mode(optional): if set to true, change the search algorithm # to only use previous_job_id as CI name to # search for. Defaults to false # previous_job_id(optional): CI name to search for, if sequential_mode is # True max_age = request.json.get('max_age', None) job_id = request.json.get('job_id', None) success = request.json.get('success', None) sequential_mode = request.json.get('sequential_mode', None) previous_job_id = request.json.get('previous_job_id', None) if success is not None: success = bool(strtobool(success)) if sequential_mode is not None: sequential_mode = bool(strtobool(sequential_mode)) if sequential_mode and previous_job_id is None: raise InvalidUsage('Missing parameter previous_job_id', status_code=400) if max_age is None: raise InvalidUsage('Missing parameters', status_code=400) # Calculate timestamp as now - max_age if int(max_age) == 0: timestamp = 0 else: oldest_time = datetime.now() - timedelta(hours=int(max_age)) timestamp = time.mktime(oldest_time.timetuple()) session = getSession(app.config['DB_PATH']) try: if sequential_mode: # CI pipeline case vote = getVote(session, timestamp, success, previous_job_id, fallback=False) else: # Normal case vote = getVote(session, timestamp, success, job_id) except Exception as e: raise e commit = session.query(Commit).filter( Commit.status == 'SUCCESS', Commit.id == vote.commit_id).first() result = {'commit_hash': commit.commit_hash, 'distro_hash': commit.distro_hash, 'timestamp': vote.timestamp, 'job_id': vote.ci_name, 'success': vote.ci_vote, 'in_progress': vote.ci_in_progress, 'user': vote.user} closeSession(session) return jsonify(result), 200
def load_engine_credentials(self): engine_conf = { 'host': None, 'port': None, 'user': None, 'password': None, 'database': None, 'secured': None, 'secured_validation': None, 'driver': None, 'url': None, } if not os.path.exists(self.ENGINE_DB_CONF_FILE): raise RuntimeError( _('Unable to find {0}' % self.ENGINE_DB_CONF_FILE) ) with open(self.ENGINE_DB_CONF_FILE) as f: for line in f: conf_key, conf_value = line.split('=', 1) conf_value = conf_value.strip('\n') # By default the 10-setup-database.conf includes " " # between the values of keys, we should remove it conf_value = conf_value[1:-1] if 'ENGINE_DB_HOST' == conf_key: engine_conf['host'] = conf_value elif 'ENGINE_DB_PORT' == conf_key: engine_conf['port'] = int(conf_value) elif 'ENGINE_DB_USER' == conf_key: engine_conf['user'] = conf_value elif 'ENGINE_DB_PASSWORD' == conf_key: engine_conf['password'] = conf_value elif 'ENGINE_DB_DATABASE' == conf_key: engine_conf['database'] = conf_value elif 'ENGINE_DB_SECURED' == conf_key: engine_conf['secured'] = bool( strtobool(conf_value) ) elif 'ENGINE_DB_SECURED_VALIDATION' == conf_key: engine_conf['secured_validation'] = bool( strtobool(conf_value) ) elif 'ENGINE_DB_DRIVER' == conf_key: engine_conf['driver'] = conf_value elif 'ENGINE_DB_URL' == conf_key: engine_conf['url'] = conf_value return engine_conf
def launch_totem(session): """Launch Totem player and play video file. Parameters ---------- session : RvSession remote-viewer session """ totem_version = sesion.guest_session.cmd_output("totem --version") logging.info("Totem version %s", totem_version) # Repeat parameters for totem. totem_params = "" if session.guest_vm.is_rhel7(): repeat_cmd = "dconf write /org/gnome/Totem/repeat true" norepeat_cmd = "dconf write /org/gnome/Totem/repeat false" elif session.guest_vm.is_linux(): repeat_cmd = "gconftool-2 --set /apps/totem/repeat -t bool true" norepeat_cmd = "gconftool-2 --set /apps/totem/repeat -t bool false" totem_params += "--display=:0.0 --play" if util.strtobool(session.cfg.repeat_video): cmd = repeat_cmd else: cmd = norepeat_cmd session.guest_session.cmd(cmd, timeout=120) if util.strtobool(session.cfg.fullscreen): totem_params += " --fullscreen " dst = session.cfg.destination_video_file_path cmd = "nohup totem %s %s &> /dev/null &" % (dst, totem_params) session.guest_session.cmd(cmd)
def initialize_options(self): _build_ext.initialize_options(self) self.extra_cmake_args = os.environ.get('PYARROW_CMAKE_OPTIONS', '') self.build_type = os.environ.get('PYARROW_BUILD_TYPE', 'debug').lower() self.with_parquet = strtobool(os.environ.get('PYARROW_WITH_PARQUET', '0')) self.with_jemalloc = strtobool(os.environ.get('PYARROW_WITH_JEMALLOC', '0')) self.bundle_arrow_cpp = strtobool(os.environ.get('PYARROW_BUNDLE_ARROW_CPP', '0'))
def __init__(self, thresholds=(4.0,), *args, **kwargs): super(MLT, self).__init__(*args, **kwargs) self.thresholds = tuple(Threshold(value) for value in sorted(thresholds)) self.enable_auto_max_query_terms = ( strtobool( self.config.get('partycrasher.bucket', 'enable_auto_max_query_terms'))) self.initial_mlt_max_query_terms = ( int( self.config.get('partycrasher.bucket', 'initial_mlt_max_query_terms'))) self.auto_max_query_term_maximum_documents = ( int( self.config.get('partycrasher.bucket', 'auto_max_query_term_maximum_documents'))) self.auto_max_query_term_minimum_documents = ( int( self.config.get('partycrasher.bucket', 'auto_max_query_term_minimum_documents'))) self.mlt_min_score = ( float( self.config.get('partycrasher.bucket', 'mlt_min_score'))) self.strictly_increasing = ( strtobool( self.config.get('partycrasher.bucket', 'strictly_increasing'))) if self.enable_auto_max_query_terms: self.last_max_query_terms = self.initial_mlt_max_query_terms self.max_top_match_score = 0 self.total_top_match_scores = 0 self.total_matches = 0
def query_yes_no(question, default="yes"): """Ask a yes/no question via input() and return their answer. question -- is a string that is presented to the user. default -- is the presumed answer if the user just hits <Enter>. It must be "yes" (the default), "no" or None (meaning an answer is required of the user). The "answer" return value is one of "yes" or "no". """ if default == None: prompt = " [y/n] " elif default == "yes": prompt = " [Y/n] " elif default == "no": prompt = " [y/N] " else: raise ValueError("invalid default answer: '%s'" % default) try: while True: print(question + prompt, end="") choice = input().lower() try: if default is not None and choice == '': return strtobool(default) elif choice: return strtobool(choice) except ValueError: print("Please respond with 'yes' or 'no' (or 'y' or 'n').\n") except KeyboardInterrupt: sys.exit(2)
def readConfigFile(logger, path): ''' Parses a config file into various dictionaries. ''' c = ConfigParser.ConfigParser() c.read(path) cfg = {} cfg['SIM_CAMERA_ZMX_FILE'] = str(c.get("simulation", "camera_zmx_file")) cfg['SIM_COLLIMATOR_ZMX_FILE'] = str(c.get("simulation", "collimator_zmx_file")) cfg['SIM_WAVELENGTH_START'] = Decimal(c.get("simulation", "wavelength_start")) cfg['SIM_WAVELENGTH_END'] = Decimal(c.get("simulation", "wavelength_end")) cfg['SIM_WAVELENGTH_INTERVAL'] = Decimal(c.get("simulation", "wavelength_interval")) cfg['SIM_ADD_CAMERA_WFE'] = bool(strtobool(c.get("simulation", "add_camera_WFE"))) cfg['SIM_ADD_COLLIMATOR_WFE'] = bool(strtobool(c.get("simulation", "add_collimator_WFE"))) cfg['SIM_INSTRUMENT_CONFIGS_DIR_PATH'] = str(c.get("simulation", "inst_configs_dir")) cfg['PUPIL_SAMPLING'] = int(c.get("pupil", "sampling")) cfg['PUPIL_WFE_MAP_SAMPLING'] = int(c.get("pupil", "zemax_WFE_map_sampling")) cfg['PUPIL_GAMMA'] = int(c.get("pupil", "gamma")) cfg['PUPIL_REFERENCE_WAVELENGTH'] = float(c.get("pupil", "reference_wavelength")) cfg['PUPIL_RESAMPLE_TO_WAVELENGTH'] = Decimal(c.get("pupil", "resample_to_wavelength")) cfg['PREOPTICS_CFG_NAME'] = str(c.get("preoptics", "preoptics_config_name")) cfg['IFU_CFG_NAME'] = str(c.get("ifu", "ifu_config_name")) cfg['IFU_SLICES_PER_RESEL'] = int(c.get("ifu", "slices_per_resel")) cfg['SPECTROGRAPH_CFG_NAME'] = str(c.get("spectrograph", "spectrograph_config_name")) cfg['DETECTOR_CFG_NAME'] = str(c.get("detector", "detector_config_name")) return cfg
def __init__(self, aws_secret_access_key, aws_access_key_id, s3_bucket_region, s3_ssenc, s3_connection_host, cassandra_conf_path, use_sudo, nodetool_path, cassandra_bin_dir, backup_schema, buffer_size, exclude_tables, compress_data, connection_pool_size=12): self.aws_secret_access_key = aws_secret_access_key self.aws_access_key_id = aws_access_key_id self.s3_bucket_region = s3_bucket_region self.s3_ssenc = s3_ssenc self.s3_connection_host = s3_connection_host self.cassandra_conf_path = cassandra_conf_path self.nodetool_path = nodetool_path or \ "{!s}/nodetool".format(cassandra_bin_dir) self.cqlsh_path = "{!s}/cqlsh".format(cassandra_bin_dir) self.backup_schema = backup_schema self.connection_pool_size = connection_pool_size self.buffer_size = buffer_size if isinstance(use_sudo, basestring): self.use_sudo = bool(strtobool(use_sudo)) else: self.use_sudo = use_sudo self.exclude_tables = exclude_tables if isinstance(compress_data, basestring): self.compress_data = bool(strtobool(compress_data)) else: self.compress_data = compress_data
def import_obj(self, obj, data, dry_run): guardian_email = data.get('guardian_email') obj.first_name = data.get('first_name') obj.last_name = data.get('last_name') obj.birthday = datetime.strptime( data.get('birthday', ''), '%m/%d/%Y' ) obj.gender = data.get('gender', '') obj.school_name = data.get('school_name', '') obj.school_type = data.get('school_type', '') obj.photo_release = strtobool(data.get('photo_release', '')) obj.consent = strtobool(data.get('consent', '')) obj.is_active = True try: obj.guardian = Guardian.objects.get(user__email=guardian_email) except Guardian.DoesNotExist: raise ImportError( f'guardian with email {guardian_email} not found' ) if not dry_run: obj.save()
def __init__(self, env=os.environ, verbose=False): cmake = env.get("CMAKE_COMMAND", "cmake") or which("cmake", env) if cmake is None: raise RuntimeError("cannot find `cmake` command, " "please populate CMAKE_COMMAND environment variable") self.build = ["make"] self.env = env self.configure = [cmake] self.definitions = dict() self.generator = None rpath = self.env.get("CMAKE_SKIP_RPATH", "") try: rpath = bool(strtobool(rpath)) except ValueError: rpath = False if rpath: self.add_definition("CMAKE_SKIP_RPATH", "TRUE") verbose = self.env.get("VERBOSE", "{0}".format(verbose)) try: verbose = bool(strtobool(verbose)) except ValueError: verbose = False self.verbose = verbose
def create_key(self): """Create an API key.""" print("Creating key. Please input the following options:") name = input("Key name (optional): ") print("To make this key more secure, you should restrict the IP addresses that can use it. ") print("To use with all IPs, leave blank or use 0.0.0.0/0.") print("To use with a single IP, append '/32', such as 207.39.29.22/32. ") print("See this reference on CIDR blocks: http://software77.net/cidr-101.html") cidr = input("CIDR (optional): ") # Set up permissions permissions = [] if strtobool(input("Should this key be able to submit orders? [y/N] ") or 'N'): permissions.append('order') if strtobool(input("Should this key be able to submit withdrawals? [y/N] ") or 'N'): permissions.append('withdraw') otpToken = input("OTP Token (If enabled. If not, press <enter>): ") key = self._curl_bitmex("/apiKey", postdict={"name": name, "cidr": cidr, "enabled": True, "token": otpToken, "permissions": string.join(permissions, ',')}) print("Key created. Details:\n") print("API Key: " + key["id"]) print("Secret: " + key["secret"]) print("\nSafeguard your secret key! If somebody gets a hold of your API key and secret,") print("your account can be taken over completely.") print("\nKey generation complete.")
def test_cts(nosuccess=False, ignore_replication=False, no_color=False): """ Test the CTS-Compliancy of our data. :param nosuccess: Boolean indicating if we should print Success :param ignore_replication: Boolean indicating if we should test for replication of CitationMapping in Files :param no_color: Boolean indicating if we should have non-styled string messages """ if nosuccess is not False: nosuccess = bool(strtobool(str(nosuccess))) if ignore_replication is not False: ignore_replication = bool(strtobool(str(ignore_replication))) if no_color is not False: no_color = bool(strtobool(str(no_color))) _corpora_config(force=True) results = [] for corpus in env.corpora: for resource in corpus.resources: results = results + shell.documentTestResults(resource.inventory.testTextsCitation(ignore_replication=ignore_replication), no_color=no_color) if nosuccess is True: results = [result for result in results if isinstance(result, (shell.Success)) is False] shell.run(results, local, input_required=False) clean()
def get_transaction_result(tr_hash, timeout, servers, attempts=5, wait=2): """ Get created offer hash and look for offer result after it's processing. takes: tr_hash - str - ripple transaction hash timeout - int - transaction timeout servers - list - ripple servers attempts - int - number of attempts to check if trade happened wait - int - seconds to wait until next check returns: transaction result if it's happened or None """ transaction = {} # wait for ripple path find if not strtobool(os.environ.get("TESTING", "no")): time.sleep(wait) for i in xrange(attempts): transaction = tx(tr_hash, timeout=timeout, servers=servers) # check if offer happened if "AffectedNodes" in transaction: break # wait for ripple path find a little more if not strtobool(os.environ.get("TESTING", "no")): time.sleep(wait) return transaction
def test_strtobool(self): yes = ("y", "Y", "yes", "True", "t", "true", "True", "On", "on", "1") no = ("n", "no", "f", "false", "off", "0", "Off", "No", "N") for y in yes: self.assertTrue(strtobool(y)) for n in no: self.assertFalse(strtobool(n))
def __init__(self, togglecsv=None, togglechangingsetpoint=None): if togglechangingsetpoint is None: self.changing = False else: self.changing = strtobool(togglechangingsetpoint) if togglecsv is None: self.csvwriting = False else: self.csvwriting = strtobool(togglecsv)
def test_strtobool(self): yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1') no = ('n', 'no', 'f', 'false', 'off', '0', 'Off', 'No', 'N') for y in yes: self.assertTrue(strtobool(y)) for n in no: self.assertTrue(not strtobool(n))
def inputsToOptions(self, values): # Turn the radio values into an option choice value if strtobool(values.get('_locationGlobal', 'F')): values['location_choice'] = EventOptions.LOCATION_GLOBAL elif strtobool(values.get('_locationRange', 'F')): values['location_choice'] = EventOptions.LOCATION_BOX elif strtobool(values.get('_locationDistanceFromPoint', 'F')): values['location_choice'] = EventOptions.LOCATION_POINT return values
def add_policy_content_request(uuid, content_type, change_description, schema_version, ips=None, urls=None, categories=None, enabled=None, description=''): """ Add content to a specified policy using the provided arguments :param uuid: Policy UUID :param content_type: Policy content type :param change_description: Policy update change description :param schema_version: Policy schema version :param ips: IPs to add to the content :param urls: URLs to add to the content :param categories: Category names to add to the content :param enabled: Policy content enabled :param description: Policy content description :return: Content update response """ path = 'policies/' + uuid + '/content' body = { 'contentType': content_type, 'changeDescription': change_description } if schema_version: body['schemaVersion'] = schema_version content = get_policy_content_request(uuid) if not content or 'content' not in content: return_error('Could not update policy content - failed retrieving the current content') if ips: if 'ipAddresses' not in content['content']: content['content']['ipAddresses'] = [] content['content']['ipAddresses'] += [{ 'ipAddress': ip, 'description': description, 'enabled': bool(strtobool(enabled)) } for ip in ips] elif urls: if 'urls' not in content['content']: content['content']['urls'] = [] content['content']['urls'] += [{ 'url': url, 'description': description, 'enabled': bool(strtobool(enabled)) } for url in urls] elif categories: if 'categories' not in content['content']: content['content']['categories'] = [] content['content']['categories'] += [{ 'categoryName': category, } for category in categories] body['content'] = content['content'] response = http_request('POST', path, data=body) return response
def setConfigFile(self, newFile): self.CONFIG = newFile self.cfg = Config(self.CONFIG) option_flags = self.cfg.ConfigSectionMap("Flags") self.SEP = option_flags['sep'] self.DEBUG = bool(util.strtobool(option_flags['debug'])) self.DEBUGLITE = bool(util.strtobool(option_flags['debuglite'])) self.DATABASE = bool(util.strtobool(option_flags['database'])) self.CSV = bool(util.strtobool(option_flags['csv'])) self.LOGTIME = bool(util.strtobool(option_flags['logtime']))
def read_config(self, config): self.disable_registration = not bool( strtobool(str(config["enable_registration"])) ) if "disable_registration" in config: self.disable_registration = bool( strtobool(str(config["disable_registration"])) ) self.registration_shared_secret = config.get("registration_shared_secret")
def analyse_url(): args = demisto.args() url = args.get('url') internet_access = bool(strtobool(args.get('internet-access', 'true'))) comments = args.get('comments') systems = args.get('systems') should_wait = bool(strtobool(demisto.get(args, 'should_wait'))) return analyse_url_request(url, should_wait, internet_access, comments, systems)
def parse_config_files(self, filenames=None): from configparser import ConfigParser # Ignore install directory options if we have a venv if sys.prefix != sys.base_prefix: ignore_options = [ 'install-base', 'install-platbase', 'install-lib', 'install-platlib', 'install-purelib', 'install-headers', 'install-scripts', 'install-data', 'prefix', 'exec-prefix', 'home', 'user', 'root'] else: ignore_options = [] ignore_options = frozenset(ignore_options) if filenames is None: filenames = self.find_config_files() if DEBUG: self.announce("Distribution.parse_config_files():") parser = ConfigParser() for filename in filenames: if DEBUG: self.announce(" reading %s" % filename) parser.read(filename) for section in parser.sections(): options = parser.options(section) opt_dict = self.get_option_dict(section) for opt in options: if opt != '__name__' and opt not in ignore_options: val = parser.get(section,opt) opt = opt.replace('-', '_') opt_dict[opt] = (filename, val) # Make the ConfigParser forget everything (so we retain # the original filenames that options come from) parser.__init__() # If there was a "global" section in the config file, use it # to set Distribution options. if 'global' in self.command_options: for (opt, (src, val)) in self.command_options['global'].items(): alias = self.negative_opt.get(opt) try: if alias: setattr(self, alias, not strtobool(val)) elif opt in ('verbose', 'dry_run'): # ugh! setattr(self, opt, strtobool(val)) else: setattr(self, opt, val) except ValueError as msg: raise DistutilsOptionError(msg)
def render(shots, frame=None, tractor=True): job_key = str(datetime.datetime.now().strftime("%Y%m%d%H%M%S")) job_key = raw_input("Enter job key (%s): " % job_key) or job_key rm_rman_dir = strtobool(raw_input("Zap renderman directory (Y/n): ") or 'y') res_y = int(raw_input("resolution (540, 720, 1080): ")) res_x, res_y = str(int(res_y * 16 / 9.0)), str(res_y) step = raw_input("Frame step size: ") or "1" renderman_dir = os.path.join(os.getcwd(), 'renderman') if rm_rman_dir: answer = raw_input("Are you sure you want to rm %s? (Y/n) " % renderman_dir) or 'y' confirmed = strtobool(answer) if confirmed: shutil.rmtree(renderman_dir) for shot in shots: name = shot['name'] camera = shot['camera'] scene = os.path.join(os.getcwd(), "scenes", shot['file']) if frame is None: frame0, frameN = shot['startFrame'], shot['endFrame'] else: frame0, frameN = frame, frame image_name = "%s.%s" % (name.replace(' ', '_'), camera) output_dir = str(os.path.join(os.getcwd(), "autorender_%s" % job_key, image_name)) cmd = [ "/Applications/Autodesk/maya2015/Maya.app/Contents/bin/Render", "-r", "rman", "-proj", str(os.getcwd()), "-res", res_x, res_y, "-cam", camera, "-of", "OpenEXR", "-im", image_name, "-rd", output_dir, "-pad", "3", "-s", str(frame0), "-e", str(frameN), "-b", str(step), #"-pre", "hide Emily_Body_Rig:Emily; showHidden Emily_Body_Rig:Emily;", "-fnc", "name.#.ext", ] if tractor: cmd += ["-spool", 'remote rib, remote render'] cmd += [scene] cmd_text = " ".join([pipes.quote(token) for token in cmd]) print "calling: ", cmd_text subprocess.call(cmd_text, shell=True) print "Render job submitted with id:", "autorender_%s" % job_key print "Copy locally with:", "rsync -avz shay:%s ~/Desktop/" % output_dir
def api_pr(): reverse = strtobool(request.args.get('reverse', 'no')) pattern = '*pr_*' if strtobool(request.args.get('all', 'no')) else 'pr_*' keys = r.keys(pattern) keys.sort(key=lambda x: int(x.split('_')[-1]), reverse=reverse) def requests(): yield '[' for key in keys[0:-1]: yield r.get(key) + ',' yield r.get(keys[-1]) + ']' return Response(requests(), content_type='application/json')
def get_tagset(request, tagset_id, conn=None, **kwargs): try: fetch_tags = strtobool(request.GET.get('tags')) except (ValueError, AttributeError): fetch_tags = False try: fetch_images = strtobool(request.GET.get('images')) except (ValueError, AttributeError): fetch_images = False tagset = tags_data.get_tagset(conn, tagset_id, fetch_tags, fetch_images) return HttpResponse(json.dumps(tagset), content_type='application/json')
def user_yes_no_query(question): print(question + ' [y/n]: ') while True: try: response = raw_input().lower() value = strtobool(response) return bool(strtobool(response)) except ValueError: print('Please respond with \'y\' or \'n\'.\n') except KeyboardInterrupt: return False
def user_yes_no_query(question): print(question + " [y/n]: ") while True: try: response = raw_input().lower() value = strtobool(response) return bool(strtobool(response)) except ValueError: print("Please respond with 'y' or 'n'.\n") except KeyboardInterrupt: return False
def get_list(self, verbose=False, content=True, list_id="-1", user_id=None, access_token=None): if type(content) != types.BooleanType: content = strtobool(content) if type(verbose) != types.BooleanType: verbose = strtobool(verbose) conn = species_list_service.connect_mongodb() service_result = species_list_service.get_list(conn, user_id, int(list_id), verbose, content, access_token) conn.close() return service_result;
# General Django development settings # from django.conf.global_settings import DATETIME_INPUT_FORMATS from geonode import get_version from kombu import Queue # GeoNode Version VERSION = get_version() # Defines the directory that contains the settings file as the PROJECT_ROOT # It is used for relative settings elsewhere. PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) # Setting debug to true makes Django serve static media and # present pretty error pages. DEBUG = strtobool(os.getenv('DEBUG', 'True')) # Set to True to load non-minified versions of (static) client dependencies # Requires to set-up Node and tools that are required for static development # otherwise it will raise errors for the missing non-minified dependencies DEBUG_STATIC = strtobool(os.getenv('DEBUG_STATIC', 'False')) # Define email service on GeoNode EMAIL_ENABLE = strtobool(os.getenv('EMAIL_ENABLE', 'False')) if EMAIL_ENABLE: EMAIL_BACKEND = os.getenv( 'DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend') EMAIL_HOST = 'localhost' EMAIL_PORT = 25
from unipath import Path from django.utils.functional import cached_property from config_manager.config_manager import ConfigManager def base_directory(): return Path(__file__).ancestor(3) ROOT_DIR = base_directory() DATA_DIR = ROOT_DIR.child('data') ENV_VARS_DIR = ROOT_DIR.child('polyaxon').child('polyaxon').child('env_vars') TESTING = bool(strtobool(os.getenv('TESTING', "0"))) class SettingsConfigManager(ConfigManager): def __init__(self, **params): super().__init__(**params) self._env = self.get_string('POLYAXON_ENVIRONMENT') self._service = self.get_string('POLYAXON_SERVICE', is_local=True) self._is_debug_mode = self.get_boolean('POLYAXON_DEBUG', is_optional=True, default=False) self._namespace = self.get_string('POLYAXON_K8S_NAMESPACE') if self.is_sidecar_service or self.is_dockerizer_service: self._node_name = None else: self._node_name = self.get_string('POLYAXON_K8S_NODE_NAME',
#get features from layer features = inputLayer.getFeatures() # fill attribute list with attributes from features (check for NULLs) for feature in features: if feature[field_to_cluster] != NULL: attributeValues.append([]) attributeValues[len(attributeValues)-1].append(feature[field_to_cluster]) # create array from attribute list data = array(attributeValues) # ----------------Define starting centroids and perform kmeans(). If random centroids are disabled, starting centroids are ordered and distributed evenly across the value range, otherwise random centroids are used----------------------------- random_centroids = strtobool(os.getenv('QGIS_KMEANS_RANDOM_CENTROIDS', str(random_centroids))) if random_centroids == False and number_of_clusters >= 2: #compute value range and step size for distributing the centroids valueRange = np.max(attributeValues) - np.min(attributeValues) stepSize = valueRange/(number_of_clusters-1) # create array of centroids to feed into kmeans. Populate array starting with min of value range. Then proceed following stepSize and finish with max of value range. If number of clusters is 2, only min and max are used as starting centroids centroidArray = np.array([[np.min(attributeValues)]]) if number_of_clusters > 2: i = 1 while i < (number_of_clusters-1): centroid = np.min(attributeValues)+(i*stepSize) centroidArray = np.append(centroidArray,[[centroid]], axis = 0) i+=1 centroidArray = np.append(centroidArray,[[np.max(attributeValues)]], axis =0) #perform kmeans with starting centroids (instead of random starting centroids)
from cartoview.log_handler import get_logger logger = get_logger(__name__) # Build paths inside the project like this: os.path.join(BASE_DIR, ...) SETTINGS_DIR = os.path.dirname(os.path.abspath(__file__)) BASE_DIR = os.path.dirname(SETTINGS_DIR) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = os.getenv("SECRET_KEY", "c8(50gzg=^s6&m73&801%+@$24+&8duk$^^4ormfkbj!*q86fo") # SECURITY WARNING: don't run with debug turned on in production! DEBUG = strtobool(os.getenv("DEBUG", "True")) ALLOWED_HOSTS = eval(os.getenv("ALLOWED_HOSTS", '["*"]')) # Application definition INSTALLED_APPS = [ "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.messages", "django.contrib.staticfiles", "django.contrib.sites", # third-party apps "guardian",
class Common(Configuration): INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', # Third party apps 'rest_framework', # utilities for rest apis # Your apps 'tree.categories') # https://docs.djangoproject.com/en/2.0/topics/http/middleware/ MIDDLEWARE = ( 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ALLOWED_HOSTS = ["*"] ROOT_URLCONF = 'tree.urls' SECRET_KEY = os.getenv('DJANGO_SECRET_KEY') WSGI_APPLICATION = 'tree.wsgi.application' # Postgres DATABASES = { 'default': dj_database_url.config( default='postgres://postgres:@postgres:5432/postgres', conn_max_age=int(os.getenv('POSTGRES_CONN_MAX_AGE', 600))) } # General APPEND_SLASH = False TIME_ZONE = 'UTC' LANGUAGE_CODE = 'en-us' # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = False USE_L10N = True USE_TZ = True LOGIN_REDIRECT_URL = '/' # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.0/howto/static-files/ STATIC_ROOT = os.path.normpath(join(os.path.dirname(BASE_DIR), 'static')) STATICFILES_DIRS = [] STATIC_URL = '/static/' STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) # Media files MEDIA_ROOT = join(os.path.dirname(BASE_DIR), 'media') MEDIA_URL = '/media/' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': STATICFILES_DIRS, 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] # Set DEBUG to False as a default for safety # https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = strtobool(os.getenv('DJANGO_DEBUG', 'no')) # Password Validation # https://docs.djangoproject.com/en/2.0/topics/auth/passwords/#module-django.contrib.auth.password_validation AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Logging LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'django.server': { '()': 'django.utils.log.ServerFormatter', 'format': '[%(server_time)s] %(message)s', }, 'verbose': { 'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s' }, 'simple': { 'format': '%(levelname)s %(message)s' }, }, 'filters': { 'require_debug_true': { '()': 'django.utils.log.RequireDebugTrue', }, }, 'handlers': { 'django.server': { 'level': 'INFO', 'class': 'logging.StreamHandler', 'formatter': 'django.server', }, 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'simple' }, }, 'loggers': { 'django': { 'handlers': ['console'], 'propagate': True, }, 'django.server': { 'handlers': ['django.server'], 'level': 'INFO', 'propagate': False, }, 'django.request': { 'handlers': ['console'], 'level': 'ERROR', 'propagate': False, }, 'django.db.backends': { 'handlers': ['console'], 'level': 'INFO' }, } }
import os from distutils.util import strtobool import opendp.smartnoise.core as sn from tests import (TEST_PUMS_PATH, TEST_PUMS_NAMES, TEST_EDUC_PATH, TEST_EDUC_NAMES) # Used to skip showing plots, etc. # IS_CI_BUILD = strtobool(os.environ.get('IS_CI_BUILD', 'False')) def test_multilayer_analysis(run=True): with sn.Analysis() as analysis: PUMS = sn.Dataset(path=TEST_PUMS_PATH, column_names=TEST_PUMS_NAMES) age = sn.to_float(PUMS['age']) sex = sn.to_bool(PUMS['sex'], true_label="TRUE") age_clamped = sn.clamp(age, lower=0., upper=150.) age_resized = sn.resize(age_clamped, number_rows=1000) race = sn.to_float(PUMS['race']) mean_age = sn.dp_mean(data=race, privacy_usage={'epsilon': .65}, data_lower=0., data_upper=100., data_rows=500) analysis.release() sex_plus_22 = sn.add(sn.to_float(sex), 22.,
def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, KeywordViewServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the keyword view service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. transport (Union[str, ~.KeywordViewServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always use the default regular endpoint) and "auto" (auto switch to the default mTLS endpoint if client certificate is present, this is the default value). However, the ``api_endpoint`` property takes precedence if provided. (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used to provide client certificate for mutual TLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ if isinstance(client_options, dict): client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. use_client_cert = bool( util.strtobool( os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) ssl_credentials = None is_mtls = False if use_client_cert: if client_options.client_cert_source: import grpc # type: ignore cert, key = client_options.client_cert_source() ssl_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key) is_mtls = True else: creds = SslCredentials() is_mtls = creds.is_mtls ssl_credentials = creds.ssl_credentials if is_mtls else None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint else: use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": api_endpoint = (self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT) else: raise MutualTLSChannelError( "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, KeywordViewServiceTransport): # transport is a KeywordViewServiceTransport instance. if credentials: raise ValueError("When providing a transport instance, " "provide its credentials directly.") self._transport = transport elif isinstance(transport, str): Transport = type(self).get_transport_class(transport) self._transport = Transport(credentials=credentials, host=self.DEFAULT_ENDPOINT) else: self._transport = KeywordViewServiceGrpcTransport( credentials=credentials, host=api_endpoint, ssl_channel_credentials=ssl_credentials, client_info=client_info, )
def s2b(s): import distutils.util as u b=bool(u.strtobool(s)) return(b)
class Common(Configuration): BASE_DIR = BASE_DIR INSTALLED_APPS = ( "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.messages", "django.contrib.staticfiles", "django.contrib.sites", # Third party apps "rest_framework", # utilities for rest apis "rest_framework.authtoken", # token authentication # 'django_filters', # for filtering rest endpoints "rest_framework_filters", "drf_auto_endpoint", "field_history", # 'address', "phone_field", "phonenumber_field", "webpack_loader", # Your apps "applyonline.users", "applyonline", # All-auth "allauth", "allauth.account", "allauth.socialaccount", "allauth.socialaccount.providers.google", "rest_auth", "rest_auth.registration", ) SITE_ID = 1 # https://docs.djangoproject.com/en/2.0/topics/http/middleware/ MIDDLEWARE = ( "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", "field_history.middleware.FieldHistoryMiddleware", ) ALLOWED_HOSTS = ["*"] ROOT_URLCONF = "applyonline.urls" SECRET_KEY = os.getenv("DJANGO_SECRET_KEY") WSGI_APPLICATION = "applyonline.wsgi.application" # Email EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend" ADMINS = (("Author", "*****@*****.**"), ) # Postgres DATABASES = { "default": dj_database_url.config( default="postgres://postgres:@postgres:5432/postgres", conn_max_age=int(os.getenv("POSTGRES_CONN_MAX_AGE", 600)), ) } # General APPEND_SLASH = False TIME_ZONE = "America/Chicago" LANGUAGE_CODE = "en-us" # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = False USE_L10N = True USE_TZ = True LOGIN_REDIRECT_URL = "/" # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.0/howto/static-files/ STATIC_ROOT = os.path.normpath(join(os.path.dirname(BASE_DIR), "static")) STATICFILES_DIRS = [os.path.normpath(join(BASE_DIR, "static"))] STATIC_URL = "/static/" STATICFILES_FINDERS = ( "django.contrib.staticfiles.finders.FileSystemFinder", "django.contrib.staticfiles.finders.AppDirectoriesFinder", ) # Media files MEDIA_ROOT = join(os.path.dirname(BASE_DIR), "media") MEDIA_URL = "/media/" TEMPLATES = [{ "BACKEND": "django.template.backends.django.DjangoTemplates", "DIRS": STATICFILES_DIRS, "APP_DIRS": True, "OPTIONS": { "context_processors": [ "django.template.context_processors.debug", "django.template.context_processors.request", "django.contrib.auth.context_processors.auth", "django.contrib.messages.context_processors.messages", "django.template.context_processors.static", "ws4redis.context_processors.default", ] }, }] # Set DEBUG to False as a default for safety # https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = strtobool(os.getenv("DJANGO_DEBUG", "no")) # Password Validation # https://docs.djangoproject.com/en/2.0/topics/auth/passwords/#module-django.contrib.auth.password_validation AUTH_PASSWORD_VALIDATORS = [ { "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" }, { "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator" }, { "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator" }, { "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator" }, ] # Logging LOGGING = { "version": 1, "disable_existing_loggers": False, "formatters": { "django.server": { "()": "django.utils.log.ServerFormatter", "format": "[%(server_time)s] %(message)s", }, "verbose": { "format": "%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s" }, "simple": { "format": "%(levelname)s %(message)s" }, }, "filters": { "require_debug_true": { "()": "django.utils.log.RequireDebugTrue" } }, "handlers": { "django.server": { "level": "INFO", "class": "logging.StreamHandler", "formatter": "django.server", }, "console": { "level": "DEBUG", "class": "logging.StreamHandler", "formatter": "simple", }, "mail_admins": { "level": "ERROR", "class": "django.utils.log.AdminEmailHandler" }, }, "loggers": { "django": { "handlers": ["console"], "propagate": True }, "django.server": { "handlers": ["django.server"], "level": "INFO", "propagate": False, }, "django.request": { "handlers": ["mail_admins", "console"], "level": "ERROR", "propagate": False, }, "django.db.backends": { "handlers": ["console"], "level": "INFO" }, }, } # Custom user app AUTH_USER_MODEL = "users.User" AUTHENTICATION_BACKENDS = ( # Needed to login by username in Django admin, regardless of `allauth` "django.contrib.auth.backends.ModelBackend", # `allauth` specific authentication methods, such as login by e-mail "allauth.account.auth_backends.AuthenticationBackend", ) # Django Rest Framework REST_FRAMEWORK = { "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.PageNumberPagination", "PAGE_SIZE": int(os.getenv("DJANGO_PAGINATION_LIMIT", 1000)), "DATETIME_FORMAT": "%Y-%m-%dT%H:%M:%S%z", "DEFAULT_RENDERER_CLASSES": ( "rest_framework.renderers.JSONRenderer", "rest_framework.renderers.BrowsableAPIRenderer", ), "DEFAULT_PERMISSION_CLASSES": ["rest_framework.permissions.IsAuthenticated"], "DEFAULT_AUTHENTICATION_CLASSES": ( "rest_framework_jwt.authentication.JSONWebTokenAuthentication", "rest_framework.authentication.SessionAuthentication", "rest_framework.authentication.TokenAuthentication", ), "DEFAULT_METADATA_CLASS": "drf_auto_endpoint.metadata.AutoMetadata", "DEFAULT_FILTER_BACKENDS": ("rest_framework_filters.backends.DjangoFilterBackend", ), } REST_USE_JWT = True JWT_AUTH = { "JWT_ALLOW_REFRESH": True, "JWT_EXPIRATION_DELTA": datetime.timedelta(days=7), } ACCOUNT_EMAIL_REQUIRED = True ACCOUNT_EMAIL_VERIFICATION = "optional" ACCOUNT_AUTHENTICATION_METHOD = "username_email" SOCIALACCOUNT_PROVIDERS = { "google": { "SCOPE": ["profile", "email"], "AUTH_PARAMS": { "access_type": "offline" } } } # django-field-history - use Integers FIELD_HISTORY_OBJECT_ID_TYPE = models.UUIDField GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") # DRF-schema-adapter export settings DRF_AUTO_METADATA_ADAPTER = "drf_auto_endpoint.adapters.AngularFormlyAdapter" EXPORTER_ADAPTER = "export_app.adapters.Angular2Adapter" EXPORTER_FRONT_APPLICATION_NAME = "applyonline-angular" EXPORTER_ROUTER_PATH = "applyonline.urls.router" PHONENUMBER_DEFAULT_REGION = "US" PHONENUMBER_DB_FORMAT = "NATIONAL" WEBPACK_LOADER = { "DEFAULT": { "BUNDLE_DIR_NAME": "aofront/" } } # end with slash
ClassicRunner, ) @pytest.fixture def stitch_mode(): return StitchMode.CSS @pytest.fixture def eyes_config(eyes_config_base, stitch_mode): return eyes_config_base.set_stitch_mode(stitch_mode).add_browser( 700, 460, BrowserType.CHROME) if strtobool(os.getenv("TEST_RUN_ON_VG", "False")): @pytest.fixture(scope="session") def eyes_runner_class(): return lambda: VisualGridRunner(1) @pytest.fixture def batch_info(): return BatchInfo("Python SDK Desktop VG") else: @pytest.fixture(scope="session") def eyes_runner_class(): return lambda: ClassicRunner()
ALL_COMPLETED, FIRST_COMPLETED, FIRST_EXCEPTION, wait) import traceback from file_manager.manager import FileManager from file_sender.sender import DifmetSender from ack_receiver.ack_receiver import AckReceiver from utils.log_setup import setup_logging from settings.settings_manager import SettingsManager, DebugSettingsManager from utils.tools import Tools from utils.const import ENV # debug switches the launcher from a process based multiprocessing to # a thread implementation to follow more easily the overall process # in a debugger. try: DEBUG = strtobool( os.environ.get(ENV.debug) or DebugSettingsManager.get("debug")) except ValueError: DEBUG = False LOGGER = None def launch_named_process(proc, name): """ launch a process and sets its name as displayed by ps -ef | grep name """ if not DEBUG: setproctitle(name) proc()
import os from distutils.util import strtobool from yaml import load, SafeLoader from yaml.scanner import ScannerError from django.core.exceptions import ImproperlyConfigured BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = os.environ.get('HASTEXO_GUACAMOLE_SECRET_KEY') # SECURITY WARNING: don't run with debug turned on in production! DEBUG = bool(strtobool(os.environ.get('HASTEXO_GUACAMOLE_DEBUG', 'false'))) DJANGO_LOG_LEVEL = os.environ.get('HASTEXO_GUACAMOLE_LOG_LEVEL', 'WARNING') LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'default': { 'format': '%(levelname)s:%(name)s:%(message)s', }, }, 'handlers': { 'console': { 'class': 'logging.StreamHandler', 'formatter': 'default', }, }, 'root': {
def _str_as_bool(val): try: val = bool(strtobool(val)) except (AttributeError, ValueError): pass return val if isinstance(val, bool) else None
def make_complex_query_set(self): """ function to search, filter and sort the job catalog with making complex queryset for filtering the job catalog :return: :rtype: """ query = self.request.GET.get("q") program_id = self.request.META.get('HTTP_X_SVMS_PROGRAM_ID') category = self.request.GET.get("category") title = self.request.GET.get("title") level = self.request.GET.get("level") description = self.request.GET.get("description") status = self.request.GET.get("status") job_tag = self.request.GET.get("job_tag") q_object = Q() if query: q_object.add( ( Q(program_id=query) | Q(category=query) | Q(title__icontains=query) | #Q(category__category_name__icontains=query) | Q(description__icontains=query) | Q(job_tag__tag__in=str(query).split(","))), Q.OR) if query.isnumeric(): q_object.add(Q(level__icontains=int(query)), Q.OR) q_object.add(Q(status=strtobool(query)), Q.OR) if query in [ "true", "True", "False", "false" ] else None else: if program_id: q_object.add(Q(program_id=program_id), Q.AND) if category: q_object.add(Q(category=category), Q.AND) if title: q_object.add(Q(title__icontains=title), Q.AND) if description: q_object.add(Q(description__icontains=description), Q.AND) if job_tag: q_object.add(Q(job_tag__tag__in=str(job_tag).split(",")), Q.AND) if level: if level.isnumeric(): q_object.add(Q(level__icontains=int(level)), Q.AND) else: raise Exception( ErrorMessage.WRONG_FIELD_TYPE.value.format( "level", "numeric")) q_object.add(Q(status=strtobool(status)), Q.AND) if status in [ "true", "True", "False", "false" ] else None return q_object
from common.logging import init_logging, get_logger from common.failed_cache import FailedCache from common.utils import on_thread_done, send_msg_to_payload_tracker from .archive_parser import ArchiveParser LOGGER = get_logger(__name__) PROMETHEUS_PORT = os.getenv('PROMETHEUS_PORT', '8086') # How many times are we willing to try to grab an uploaded archive before moving on? MAX_GET_RETRIES = int(os.getenv('MAX_GET_RETRIES', '3')) # number of worker threads WORKER_THREADS = int(os.getenv('WORKER_THREADS', '30')) MAX_QUEUE_SIZE = int(os.getenv('MAX_QUEUE_SIZE', '30')) SYSTEM_DELETION_THRESHOLD = int(os.getenv('SYSTEM_DELETION_THRESHOLD', '24')) # 24 hours DISABLE_OPTIMISATION = strtobool(os.getenv('DISABLE_OPTIMISATION', 'False')) HOST_INVENTORY_PROFILE_URL = "%s%s" % ( os.getenv('HOST_INVENTORY_HOST', 'http://platform_mock:8000'), os.getenv('HOST_INVENTORY_PROFILE_API', '/api/inventory/v1/hosts/%s/system_profile')) DIRECT_INVENTORY_FETCH = strtobool(os.getenv('DIRECT_INVENTORY_FETCH', 'True')) # prometheus metrics NEW_SYSTEM = Counter('ve_listener_upl_new_system', '# of new systems inserted') UPDATE_SYSTEM = Counter('ve_listener_upl_update_system', '# of systems updated') UNCHANGED_SYSTEM = Counter('ve_listener_upl_unchanged_system', '# of system-updates with same vmaas info') DELETED_SYSTEM = Counter('ve_listener_deleted_system', '# of systems deleted') DELETED_SYSTEM_NOT_FOUND = Counter('ve_listener_deleted_system_nf', '# of systems to delete but not found')
def _configure(self): """ Configure CherryPy and initialize self.url_prefix :returns our URI """ server_addr = self.get_localized_config('server_addr', '::') ssl = strtobool(self.get_localized_config('ssl', 'True')) def_server_port = 8443 if not ssl: def_server_port = 8080 server_port = self.get_localized_config('server_port', def_server_port) if server_addr is None: raise ServerConfigException( 'no server_addr configured; ' 'try "ceph config set mgr mgr/{}/{}/server_addr <ip>"'.format( self.module_name, self.get_mgr_id())) self.log.info('server_addr: %s server_port: %s', server_addr, server_port) # Initialize custom handlers. cherrypy.tools.authenticate = AuthManagerTool() cherrypy.tools.session_expire_at_browser_close = SessionExpireAtBrowserCloseTool( ) cherrypy.tools.request_logging = RequestLoggingTool() cherrypy.tools.dashboard_exception_handler = HandlerWrapperTool( dashboard_exception_handler, priority=31) # SSL initialization cert = self.get_store("crt") if cert is not None: self.cert_tmp = tempfile.NamedTemporaryFile() self.cert_tmp.write(cert.encode('utf-8')) self.cert_tmp.flush() # cert_tmp must not be gc'ed cert_fname = self.cert_tmp.name else: cert_fname = self.get_localized_config('crt_file') pkey = self.get_store("key") if pkey is not None: self.pkey_tmp = tempfile.NamedTemporaryFile() self.pkey_tmp.write(pkey.encode('utf-8')) self.pkey_tmp.flush() # pkey_tmp must not be gc'ed pkey_fname = self.pkey_tmp.name else: pkey_fname = self.get_localized_config('key_file') if not cert_fname or not pkey_fname: raise ServerConfigException('no certificate configured') if not os.path.isfile(cert_fname): raise ServerConfigException('certificate %s does not exist' % cert_fname) if not os.path.isfile(pkey_fname): raise ServerConfigException('private key %s does not exist' % pkey_fname) # Apply the 'global' CherryPy configuration. config = { 'engine.autoreload.on': False, 'server.socket_host': server_addr, 'server.socket_port': int(server_port), 'error_page.default': json_error_page, 'tools.request_logging.on': True } if ssl: config['server.ssl_module'] = 'builtin' config['server.ssl_certificate'] = cert_fname config['server.ssl_private_key'] = pkey_fname cherrypy.config.update(config) self._url_prefix = prepare_url_prefix( self.get_config('url_prefix', default='')) uri = "{0}://{1}:{2}{3}/".format( 'https' if ssl else 'http', socket.getfqdn() if server_addr == "::" else server_addr, server_port, self.url_prefix) return uri
fd.write(content) fd.close() if __name__ == "__main__": parser = OptionParser() parser.add_option("-o", "--output", dest="output_dir", help="The directory to put all the output HTML files.") parser.add_option("-f", "--flag", dest="flag", help="true to print out matching statements, " "false to print out mismatching statements") parser.add_option("-a", action="store_true", dest="all", default=False, help="Whether or not to report all statements") (options, args) = parser.parse_args() if len(args) != 1: usage(sys.argv[0]) exit(-1) is_matching = False fd = open(args[0], "rb") data = fd.read() fd.close() if options.flag != None: __quiet = False is_matching = strtobool(options.flag) generate_html_reports("suite name", data, options.output_dir, options.all, is_matching)
help='the id of the gym environment') parser.add_argument('--learning-rate', type=float, default=25e-5, help='the learning rate of the optimizer') parser.add_argument('--seed', type=int, default=2, help='seed of the experiment') parser.add_argument('--total-timesteps', type=int, default=10000000, help='total timesteps of the experiments') parser.add_argument( '--torch-deterministic', type=lambda x: bool(strtobool(x)), default=True, nargs='?', const=True, help='if toggled, `torch.backends.cudnn.deterministic=False`') parser.add_argument('--cuda', type=lambda x: bool(strtobool(x)), default=True, nargs='?', const=True, help='if toggled, cuda will not be enabled by default') parser.add_argument( '--prod-mode', type=lambda x: bool(strtobool(x)), default=False, nargs='?',
# -*- coding: utf-8 -*- import multiprocessing import os from distutils.util import strtobool bind = os.getenv('WEB_BIND', '0.0.0.0:8000') accesslog = '-' access_log_format = "%(h)s %(l)s %(u)s %(t)s '%(r)s' %(s)s %(b)s '%(f)s' '%(a)s' in %(D)sµs" workers = int(os.getenv('WEB_CONCURRENCY', multiprocessing.cpu_count() * 2)) threads = int(os.getenv('PYTHON_MAX_THREADS', 1)) reload = bool(strtobool(os.getenv('WEB_RELOAD', 'false')))
class Common(Configuration): INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', # Third party apps 'rest_framework', 'corsheaders', 'django_extensions', # utilities for rest apis # 'rest_framework.authtoken', # token authentication 'django_filters', # for filtering rest endpoints # Your apps 'covidFYI.users', 'covidFYI.data', ) # https://docs.djangoproject.com/en/2.0/topics/http/middleware/ MIDDLEWARE = ( 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'corsheaders.middleware.CorsMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ALLOWED_HOSTS = ["*"] ROOT_URLCONF = 'covidFYI.urls' SECRET_KEY = os.getenv('DJANGO_SECRET_KEY') WSGI_APPLICATION = 'covidFYI.wsgi.application' CORS_ORIGIN_ALLOW_ALL = True CORS_ALLOW_CREDENTIALS = True CORS_ORIGIN_WHITELIST = [ 'http://*****:*****@gmail.com'), ) # Postgres DATABASES = { 'default': dj_database_url.config( default='postgres://*****:*****@127.0.0.1:5432/covid', conn_max_age=int(os.getenv('POSTGRES_CONN_MAX_AGE', 600)) ) } # General APPEND_SLASH = False TIME_ZONE = 'UTC' LANGUAGE_CODE = 'en-us' # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = False USE_L10N = True USE_TZ = True LOGIN_REDIRECT_URL = '/' # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.0/howto/static-files/ STATIC_ROOT = os.path.normpath(join(os.path.dirname(BASE_DIR), 'static')) STATICFILES_DIRS = [] STATIC_URL = '/static/' STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) # Media files MEDIA_ROOT = join(os.path.dirname(BASE_DIR), 'media') MEDIA_URL = '/media/' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': STATICFILES_DIRS, 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] # Set DEBUG to False as a default for safety # https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = strtobool(os.getenv('DJANGO_DEBUG', 'no')) # Password Validation # https://docs.djangoproject.com/en/2.0/topics/auth/passwords/#module-django.contrib.auth.password_validation AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Logging LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'django.server': { '()': 'django.utils.log.ServerFormatter', 'format': '[%(server_time)s] %(message)s', }, 'verbose': { 'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s' }, 'simple': { 'format': '%(levelname)s %(message)s' }, }, 'filters': { 'require_debug_true': { '()': 'django.utils.log.RequireDebugTrue', }, }, 'handlers': { 'django.server': { 'level': 'INFO', 'class': 'logging.StreamHandler', 'formatter': 'django.server', }, 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'simple' }, 'mail_admins': { 'level': 'ERROR', 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django': { 'handlers': ['console'], 'propagate': True, }, 'django.server': { 'handlers': ['django.server'], 'level': 'INFO', 'propagate': False, }, 'django.request': { 'handlers': ['mail_admins', 'console'], 'level': 'ERROR', 'propagate': False, }, 'django.db.backends': { 'handlers': ['console'], 'level': 'INFO' }, } } # Custom user app AUTH_USER_MODEL = 'users.User' # Django Rest Framework REST_FRAMEWORK = { 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination', 'PAGE_SIZE': int(os.getenv('DJANGO_PAGINATION_LIMIT', 5)), 'DATETIME_FORMAT': '%Y-%m-%dT%H:%M:%S%z', 'DEFAULT_RENDERER_CLASSES': ( 'rest_framework.renderers.JSONRenderer', 'rest_framework.renderers.BrowsableAPIRenderer', ), 'DEFAULT_PERMISSION_CLASSES': [ 'rest_framework.permissions.IsAuthenticated', ], 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework.authentication.SessionAuthentication', 'rest_framework.authentication.TokenAuthentication', ) }
def parse_args(): parser = argparse.ArgumentParser( description="Run the PyTorch unit test suite", epilog="where TESTS is any of: {}".format(", ".join(TESTS)), formatter_class=argparse.RawTextHelpFormatter, ) parser.add_argument( "-v", "--verbose", action="count", default=0, help="print verbose information and test-by-test results", ) parser.add_argument("--jit", "--jit", action="store_true", help="run all jit tests") parser.add_argument( "--distributed-tests", "--distributed-tests", action="store_true", help="run all distributed tests", ) parser.add_argument( "-core", "--core", action="store_true", help= "Only run core tests, or tests that validate PyTorch's ops, modules," "and autograd. They are defined by CORE_TEST_LIST.") parser.add_argument( "-pt", "--pytest", action="store_true", help="If true, use `pytest` to execute the tests. E.g., this runs " "TestTorch with pytest in verbose and coverage mode: " "python run_test.py -vci torch -pt", ) parser.add_argument( "-c", "--coverage", action="store_true", help="enable coverage", default=PYTORCH_COLLECT_COVERAGE, ) parser.add_argument( "-i", "--include", nargs="+", choices=TestChoices(TESTS), default=TESTS, metavar="TESTS", help="select a set of tests to include (defaults to ALL tests)." " tests must be a part of the TESTS list defined in run_test.py", ) parser.add_argument( "-x", "--exclude", nargs="+", choices=TESTS, metavar="TESTS", default=[], help="select a set of tests to exclude", ) parser.add_argument( "-f", "--first", choices=TESTS, metavar="TESTS", help="select the test to start from (excludes previous tests)", ) parser.add_argument( "-l", "--last", choices=TESTS, metavar="TESTS", help="select the last test to run (excludes following tests)", ) parser.add_argument( "--bring-to-front", nargs="+", choices=TestChoices(TESTS), default=[], metavar="TESTS", help="select a set of tests to run first. This can be used in situations" " where you want to run all tests, but care more about some set, " "e.g. after making a change to a specific component", ) parser.add_argument( "--ignore-win-blocklist", action="store_true", help="always run blocklisted windows tests", ) # NS: Disable target determination until it can be made more reliable # parser.add_argument( # "--determine-from", # help="File of affected source filenames to determine which tests to run.", # ) parser.add_argument( "--continue-through-error", action="store_true", help="Runs the full test suite despite one of the tests failing", default=strtobool(os.environ.get("CONTINUE_THROUGH_ERROR", "False")), ) parser.add_argument( "additional_unittest_args", nargs="*", help="additional arguments passed through to unittest, e.g., " "python run_test.py -i sparse -- TestSparse.test_factory_size_check", ) parser.add_argument( "--export-past-test-times", nargs="?", type=str, const=TEST_TIMES_FILE, help="dumps test times from previous S3 stats into a file, format JSON", ) parser.add_argument( "--shard", nargs=2, type=int, help= "runs a shard of the tests (taking into account other selections), e.g., " "--shard 2 3 will break up the selected tests into 3 shards and run the tests " "in the 2nd shard (the first number should not exceed the second)", ) parser.add_argument( "--exclude-jit-executor", action="store_true", help="exclude tests that are run for a specific jit config", ) parser.add_argument( "--exclude-distributed-tests", action="store_true", help="exclude distributed tests", ) parser.add_argument( "--run-specified-test-cases", nargs="?", type=str, const=SPECIFIED_TEST_CASES_FILE, help= "load specified test cases file dumped from previous OSS CI stats, format CSV. " " If all test cases should run for a <test_module> please add a single row: \n" " test_filename,test_case_name\n" " ...\n" " <test_module>,__all__\n" " ...\n" 'how we use the stats will be based on option "--use-specified-test-cases-by".', ) parser.add_argument( "--use-specified-test-cases-by", type=str, choices=["include", "bring-to-front"], default="include", help= 'used together with option "--run-specified-test-cases". When specified test case ' "file is set, this option allows the user to control whether to only run the specified test " "modules or to simply bring the specified modules to front and also run the remaining " "modules. Note: regardless of this option, we will only run the specified test cases " " within a specified test module. For unspecified test modules with the bring-to-front " "option, all test cases will be run, as one may expect.", ) parser.add_argument( "--dry-run", action="store_true", help="Only list the test that will run.", ) return parser.parse_args()
import pandas as pd import numpy as np import math import glob from distutils.util import strtobool # get the file list to be searched extensions = ['*.xls', '*.xlsx', '*.xlsm'] filenames = [] folder = input('(Example) /Users/sunny/Documents/UROP/\nSearch Directory:\n') if folder[-1] != '/': folder = folder + '/' #folder = './sample spreadsheets/' recur = input('Search directory recursively? (True/False))\n') if bool(strtobool(recur)): folder = folder + '**/' for extension in extensions: filenames.extend( glob.iglob(folder + extension, recursive=bool(strtobool(recur)))) filenames.sort() totalSearch = len(filenames) searchCount = 0 # clean up / create the output text file result = open('result.txt', 'w+', encoding='utf-8') result.close() # start reading files
import logging import os from distutils.util import strtobool from default import * from admins import * from databases import * from static import * from media import * from middleware import * from template import * from apps import * from cache import * from debug_toolbar_settings import * from logging import * from tests import * IS_TESTING = strtobool(os.environ.get("TESTING", "no")) try: from local import * except: print '***bit_hedge/settins/local.py not found***'
def strbool(arg): return bool(strtobool(arg))
def fact_ajax(env, node, fact, value): """Fetches the specific facts matching (node/fact/value) from PuppetDB and return a JSON table :param env: Searches for facts in this environment :type env: :obj:`string` :param node: Find all facts for this node :type node: :obj:`string` :param fact: Find all facts with this name :type fact: :obj:`string` :param value: Filter facts whose value is equal to this :type value: :obj:`string` """ draw = int(request.args.get('draw', 0)) envs = environments() check_env(env, envs) render_graph = False if fact in graph_facts and not value and not node: render_graph = True query = AndOperator() if node: query.add(EqualsOperator("certname", node)) if env != '*': query.add(EqualsOperator("environment", env)) if len(query.operations) == 0: query = None # Generator needs to be converted (graph / total) try: value = int(value) except ValueError: if value is not None and query is not None: if is_bool(value): query.add(EqualsOperator('value', bool(strtobool(value)))) else: query.add(EqualsOperator('value', unquote_plus(value))) except TypeError: pass facts = [f for f in get_or_abort(puppetdb.facts, name=fact, query=query)] total = len(facts) counts = {} json = { 'draw': draw, 'recordsTotal': total, 'recordsFiltered': total, 'data': [] } for fact_h in facts: line = [] if not fact: line.append(fact_h.name) if not node: line.append('<a href="{0}">{1}</a>'.format( url_for('node', env=env, node_name=fact_h.node), fact_h.node)) if not value: fact_value = fact_h.value if isinstance(fact_value, str): fact_value = quote_plus(fact_h.value) line.append('<a href="{0}">{1}</a>'.format( url_for('fact', env=env, fact=fact_h.name, value=fact_value), fact_h.value)) json['data'].append(line) if render_graph: if fact_h.value not in counts: counts[fact_h.value] = 0 counts[fact_h.value] += 1 if render_graph: json['chart'] = [{ "label": "{0}".format(k).replace('\n', ' '), "value": counts[k] } for k in sorted(counts, key=lambda k: counts[k], reverse=True)] return jsonify(json)
def str2bool(s): if is_string(s): return strtobool(s) return bool(s)
def getSettingAsBoolean(setting): return bool(strtobool(str(__addon__.getSetting(setting)).lower()))
from mapproxy.config.spec import validate_options from mapproxy.config.validator import validate_references from mapproxy.config.loader import ProxyConfiguration, ConfigurationError from mapproxy.wsgiapp import MapProxyApp from shapely.geometry import box from six.moves.urllib_parse import urlparse, unquote as url_unquote from rawes.elastic_exception import ElasticException LOGGER = logging.getLogger(__name__) __version__ = 0.1 DEBUG = strtobool(os.getenv('REGISTRY_DEBUG', 'True')) ROOT_URLCONF = 'registry' DATABASES = {'default': {}} # required regardless of actual usage SECRET_KEY = os.getenv('REGISTRY_SECRET_KEY', 'Make sure you create a good secret key.') REGISTRY_MAPPING_PRECISION = os.getenv('REGISTRY_MAPPING_PRECISION', '500m') REGISTRY_SEARCH_URL = os.getenv('REGISTRY_SEARCH_URL', 'http://127.0.0.1:9200') REGISTRY_DATABASE_URL = os.getenv('REGISTRY_DATABASE_URL', 'sqlite:////tmp/registry.db') VCAP_SERVICES = os.environ.get('VCAP_SERVICES', None) def vcaps_search_url(VCAP_SERVICES, registry_url): """Extract registry_url from VCAP_SERVICES dict
def __init__(self, parser: ArgumentParser): super(TrainingArgs, self).__init__() # Bookkeeping parser.add_argument( '--save_directory', default='', type=str, help='Location to save model saves and other information.') parser.add_argument( '--experiment_name', default='', type=str, help= 'The experiment name. A directory will be created under save_directory for it.' ) parser.add_argument( '--log_with_slack', default=False, type=lambda x: bool(strtobool(x)), help= 'Whether to log experiment details (starting, epoch accuracies, and ending) to a ' 'Slack channel.') # TODO: Neither of the two following are actually used anywhere. parser.add_argument( '--validation_metrics', default=[ metric.Metric.RELAXED_ENVIRONMENT_ACCURACY, metric.Metric.SEQUENCE_ACCURACY, metric.Metric.CARD_ACCURACY, metric.Metric.EXACT_ENVIRONMENT_ACCURACY, metric.Metric.AGENT_DISTANCE, metric.Metric.SCORE ], nargs='+', type=metric.Metric, help='The metrics to compute on the validation set each epoch.') parser.add_argument( '--training_metrics', default=[ metric.Metric.RELAXED_ENVIRONMENT_ACCURACY, metric.Metric.SEQUENCE_ACCURACY, metric.Metric.CARD_ACCURACY, metric.Metric.EXACT_ENVIRONMENT_ACCURACY, metric.Metric.AGENT_DISTANCE, metric.Metric.SCORE ], nargs='+', type=metric.Metric, help='The metrics to compute on the training set each epoch.') # Data during training parser.add_argument( '--proportion_of_train_for_accuracy', default=0.1, type=float, help= 'The number of training games on which to run inference every epoch to compute an ' 'estimate of the accuracy on the training set.') parser.add_argument( '--aggregate_examples', default=False, type=lambda x: bool(strtobool(x)), help= 'Whether to aggregate training examples during training and validation inference as a ' 'way to improve recovery against error propagation during full game inference.' ) parser.add_argument('--batch_size', default=16, type=int, help='The batch size to use for training.') # Training process parser.add_argument('--initial_patience', default=10., type=float, help='Initial patience.') parser.add_argument( '--patience_update_factor', default=1., type=float, help='Factor to increase patience by when performance improves.') parser.add_argument('--stopping_metric', default=metric.Metric.RELAXED_ENVIRONMENT_ACCURACY, type=metric.Metric, help='Which metric to stop on.') # Optimizer parser.add_argument('--optimizer', default=OptimizerType.ADAM, type=OptimizerType, help='The optimizer type to use.') parser.add_argument('--plan_prediction_learning_rate', default=0.0075, type=float, help='Learning rate to use for hex predictor.') parser.add_argument( '--plan_prediction_l2_coefficient', default=0.000001, type=float, help='Coefficient of the L2 norm for regularization.') parser.add_argument('--action_generation_learning_rate', default=0.001, type=float, help='Learning rate to use for action predictor.') parser.add_argument( '--action_generation_l2_coefficient', default=0., type=float, help='Coefficient of the L2 norm for regularization.') parser.add_argument('--finetune_learning_rate', default=0.001, type=float, help='Learning rate to use for finetuning models.') parser.add_argument( '--finetune_l2_coefficient', default=0., type=float, help='Coefficient of the L2 norm for regularization.') parser.add_argument('--max_gradient', default=-1, type=float, help='Maximum gradient (for clipping)') # Coefficients for auxiliary losses. parser.add_argument( '--pretrain_auxiliary_coefficient_intermediate_goal_probabilities', default=0., type=float, help= 'The coefficient for the card reaching loss intermediate in the network.' ) parser.add_argument( '--pretrain_auxiliary_coefficient_trajectory_distribution', default=0., type=float, help='The coefficient for the trajectory distribution loss.') parser.add_argument( '--pretrain_auxiliary_coefficient_final_goal_probabilities', default=0., type=float, help='The coefficient for the final card prediction.') parser.add_argument( '--pretrain_auxiliary_coefficient_obstacle_probabilities', default=0., type=float, help= 'The coefficient of the prediction of hexes which cannot be passed through.' ) parser.add_argument( '--pretrain_auxiliary_coefficient_avoid_probabilities', default=0., type=float, help= 'The coefficient of the prediction of hexes to avoid (e.g., card it should not pick ' 'up)') parser.add_argument( '--finetune_auxiliary_coefficient_intermediate_goal_probabilities', default=0., type=float, help= 'The coefficient for the card reaching loss intermediate in the network.' ) parser.add_argument( '--finetune_auxiliary_coefficient_trajectory_distribution', default=0., type=float, help='The coefficient for the trajectory distribution loss.') parser.add_argument( '--finetune_auxiliary_coefficient_final_goal_probabilities', default=0., type=float, help='The coefficient for the final card prediction.') parser.add_argument( '--finetune_auxiliary_coefficient_obstacle_probabilities', default=0., type=float, help= 'The coefficient of the prediction of hexes which cannot be passed through.' ) parser.add_argument( '--finetune_auxiliary_coefficient_avoid_probabilities', default=0., type=float, help= 'The coefficient of the prediction of hexes to avoid (e.g., card it should not pick ' 'up)') parser.add_argument( '--finetune_auxiliary_coefficient_implicit_actions', default=0., type=float, help='The coefficient on the implicit example prediction') self._batch_size: int = None self._log_with_slack: bool = None self._initial_patience: float = None self._patience_update_factor: float = None self._plan_prediction_learning_rate: float = None self._plan_prediction_l2_coefficient: float = None self._action_generation_learning_rate: float = None self._action_generation_l2_coefficient: float = None self._finetune_learning_rate: float = None self._finetune_l2_coefficient: float = None self._optimizer_type: OptimizerType = None self._max_gradient: float = None self._proportion_of_train_for_accuracy: int = None self._save_directory: str = None self._experiment_name: str = None self._stopping_metric: metric.Metric = None self._validation_metrics: List[metric.Metric] = None self._training_metrics: List[metric.Metric] = None self._pretrain_auxiliary_coefficient_intermediate_goal_probabilities: float = None self._pretrain_auxiliary_coefficient_trajectory_distribution: float = None self._pretrain_auxiliary_coefficient_final_goal_probabilities: float = None self._pretrain_auxiliary_coefficient_obstacle_probabilities: float = None self._pretrain_auxiliary_coefficient_avoid_probabilities: float = None self._finetune_auxiliary_coefficient_intermediate_goal_probabilities: float = None self._finetune_auxiliary_coefficient_trajectory_distribution: float = None self._finetune_auxiliary_coefficient_final_goal_probabilities: float = None self._finetune_auxiliary_coefficient_obstacle_probabilities: float = None self._finetune_auxiliary_coefficient_avoid_probabilities: float = None self._finetune_auxiliary_coefficient_implicit_actions: float = None self._aggregate_examples: bool = None
from polyaxon_schemas.polyaxonfile import reader class ConfigurationError(Exception): pass def base_directory(): return Path(__file__).ancestor(3) ROOT_DIR = base_directory() DATA_DIR = ROOT_DIR.child('data') ENV_VARS_DIR = ROOT_DIR.child('polyaxon').child('polyaxon').child('env_vars') TESTING = bool(strtobool(os.getenv("TESTING", "0"))) class SettingConfig(object): _PASS = '******' def __init__(self, **params): self._params = params self._requested_keys = set() self._secret_keys = set() self._env = self.get_string('POLYAXON_ENVIRONMENT') self._enable_services = self.get_boolean('POLYAXON_ENABLE_SERVICES', is_optional=True, default=True) @property