def namespace_env(path): """ Fetch data from the environment. :param Path path: Path to the pipeline definition file. :return: A named tuple with information from the environment. :rtype: namedtuple """ env_safe = {key for key in environ.keys() if match(SLUG_REGEX, key)} env_available = set(environ.keys()) env_ignored = env_available - env_safe if env_ignored: log.debug('Environment variables unsafe to load: {}'.format( sorted(env_ignored))) env_type = namedtuple('env', env_safe) env = env_type(**{key: environ[key] for key in env_safe}) log.debug('env namespace: {}'.format( env._replace(**{key: '****' for key in env_safe}))) return env
def build_aws_credentials(): """ Setup the environment to use boto3 to generate authentication for AWS ecr. """ missing_envs = [] if CredBuilder.AWS_REGION_KEY not in environ.keys(): missing_envs.append(CredBuilder.AWS_REGION_KEY) required_access_key_envs = [CredBuilder.AWS_ACCESS_KEY_ID_KEY, CredBuilder.AWS_ACCESS_KEY_ID_FILE_KEY] if not any([k for k in environ.keys() if k in required_access_key_envs]): missing_envs.extend(required_access_key_envs) required_secret_key_envs = [CredBuilder.AWS_SECRET_ACCESS_KEY, CredBuilder.AWS_SECRET_ACCESS_KEY_FILE] if not any([k for k in environ.keys() if k in required_secret_key_envs]): missing_envs.extend(required_secret_key_envs) if any(missing_envs): raise MissingEnvironmentError(missing_envs) access_key_id, secret_access_key = _extract_aws_credentials() makedirs(CredBuilder.AWS_CONFIG_FILE_BASE, exist_ok=True) with open(CredBuilder.AWS_CREDENTIALS_FILE_PATH, mode='w+', encoding='utf-8') as f: default_lines = '\n'.join( ['[default]', 'aws_access_key_id = {kid}'.format(kid=access_key_id), 'aws_secret_access_key = {sak}'.format(sak=secret_access_key) ]) f.writelines(default_lines) with open(CredBuilder.AWS_CONFIG_FILE_PATH, mode='w+', encoding='utf-8') as f: default_lines = '\n'.join(['[default]', 'region = {reg}'.format(reg=environ.get('AWS_REGION'))]) f.writelines(default_lines)
def namespace_env(config, filepath): """ Fetch data from the environment. :param dict config: Plugin configuration, if any. :param Path filepath: Path file been rendered. :return: A named tuple with information from the environment. :rtype: namedtuple """ env_safe = { key for key in environ.keys() if match(SLUG_REGEX, key) } env_available = set(environ.keys()) env_ignored = env_available - env_safe if env_ignored: log.debug('Environment variables unsafe to load: {}'.format( sorted(env_ignored) )) env_type = namedtuple( 'env', env_safe, ) env = env_type(**{key: environ[key] for key in env_safe}) log.debug('env namespace: {}'.format(env._replace( **{key: '****' for key in env_safe} ))) return env
def read_slack_args(): if 'slack_log' in args.keys(): slack_log = args['slack_log'].lower() in ("yes", "true", "t", "1") else: slack_log = False slack_token = None slack_url = None slack_channel = None if slack_log: if 'SLACK_TOKEN' not in environ.keys(): raise Exception( "No feide SLACK TOKEN found. Create env var: SLACK_TOKEN") if 'SLACK_CHANNEL' not in environ.keys(): raise Exception( "No SLACK CHANNEL found. Create env var: SLACK_CHANNEL") if 'SLACK_URL ' not in environ.keys(): raise Exception("No SLACK URL found. Create env var: SLACK_URL") slack_token = environ['SLACK_TOKEN'] slack_url = environ['SLACK_URL'] slack_channel = environ['SLACK_CHANNEL'] return slack_log, slack_token, slack_url, slack_channel
def get_paths(): try: _paths = dict() _paths['rootDirectory'] = abspath( abspath(join(dirname(__file__), pardir, pardir))) _paths['dataDirectory'] = join(_paths['rootDirectory'], 'data') _paths['processedDirectory'] = join(_paths['dataDirectory'], 'processed') _paths['interimDirectory'] = join(_paths['dataDirectory'], 'interim') _paths['modelDirectory'] = join(_paths['rootDirectory'], 'models') load_env(join(_paths['rootDirectory'], '.env')) if 'inventory_path' in environ.keys(): _paths['inventoryDirectory'] = environ['inventory_path'] if 'tools_path' in environ.keys(): _paths['toolsDirectory'] = environ['tools_path'] except: return None else: return _paths
def env_loader(disable_warnings: bool = False) -> SaliencyWebMapperEnvironment: """ loads environment variables into a class: E.g: USER -> class.user """ args = SaliencyWebMapperEnvironment() for key in env.keys(): key = key.lower() if key in dir(args): data_type: type = type(getattr(args, key)) if data_type == bool: if env[key.upper()].lower() == "false": args.__setattr__(key, False) else: args.__setattr__(key, True) else: # Watch out that str can be converted to target type args.__setattr__(key, data_type(env[key.upper()])) if not disable_warnings: for key in dir(args): if not key.upper() in env.keys() and not key.startswith('_'): logging.warning( f"environment variable {key} not set, using default setting value {args.__getattribute__(key)} for {key}." ) return args
def get(self, key): name = 'TXTRADER%s_%s' % (self.label, key) if not name in environ.keys(): #print('Config.get(%s): %s not found in %s' % (key, name, environ.keys())) name = 'TXTRADER_%s' % key if not name in environ.keys(): print('ERROR: Config.get(%s) failed' % key) return environ[name]
def RunRman(RCV, ErrChk=True, ConnectString='target /'): if (ConnectString == '/ as sysdba'): if (not ('ORACLE_SID' in environ.keys())): print 'ORACLE_SID must be set if connect string is:' + ' \'' + ConnectString + '\'' return (1, '', []) if (not ('ORACLE_HOME' in environ.keys())): OracleSid, OracleHome = SetOracleEnv(environ['ORACLE_SID']) # Set the location of the ORACLE_HOME. If ORACLE_HOME is not set # then we'll use the first one we find in the oratab file. if ('ORACLE_HOME' in environ.keys()): OracleHome = environ['ORACLE_HOME'] Rman = OracleHome + '/bin/rman' else: Oratab = LoadOratab() if (len(Oratab) >= 1): SidList = Oratab.keys() OracleSid = SidList[0] OracleHome = Oratab[SidList[0]] environ['ORACLE_HOME'] = OracleHome Rman = OracleHome + '/bin/rman' else: print 'ORACLE_HOME is not set' return (1, '', []) # Start Rman and login proc = Popen([Rman, 'target', '/'], bufsize=-1, stdin=PIPE, stdout=PIPE, stderr=STDOUT, \ shell=False, universal_newlines=True, close_fds=True) # Execute the Sql and fetch the output - # Stderr is just a placeholder. We redirected stderr to stdout as follows 'stderr=STDOUT'. (Stdout, Stderr) = proc.communicate(RCV) # Check for rman errors if (ErrChk): from Oracle import ErrorCheck, ErrorCheck, LookupError, LoadFacilities # Components are installed applications/components such as sqlplus, import, export, rdbms, network, ... # ComponentList contains a list of all components for which the error code will be searched. # For example a component of rdbms will result in ORA-nnnnn errors being included in the search. # ALL_COMPONENTS is an override in the ErrorCheck function that results in *all* installed components # being selected. Searching all component errors is pretty fast so for now we'll just search them all. # ------------------------------------------------------------------------------------------------------- #ComponentList = ['sqlplus','rdbms','network','crs','css','evm','has','oracore','plsql','precomp','racg','srvm','svrmgr'] ComponentList = ['ALL_COMPONENTS'] # Brief explanation of what is returned by ErrorCheck() # ------------------------------------------------------ # rc is the return code (0 is good, anything else is bad). ErrorList is a list of list structures # (a 2 dimensional arrray in other languages). Each outer element of the array represents 1 error found # Sql output. Each inner element has two parts (2 fields), element[0] is the Oracle error code and # element[1] is the full line of text in which the error was found. # For example an ErrorList might look like this: # [['ORA-00001', 'ORA-00001: unique constraint...'],['ORA-00018', 'ORA-00018, 00000, "maximum number of..."']] (rc, ErrorList) = ErrorCheck(Stdout, ComponentList) return (rc, Stdout, ErrorList) else: return (Stdout)
def read_feide_user(): if 'FUSER' not in environ.keys(): raise Exception("No feide username found. Create env var: FUSER") if 'FPASSWORD' not in environ.keys(): raise Exception("No feide username found. Create env var: FPASSWORD") username = environ['FUSER'] # Feide username password = environ['FPASSWORD'] # Feide PASSWORD return username, password
def get_environ(): if ( 'NC_CLI' in environ.keys() and 'BTC_CLI' in environ.keys() ): nc_cli = environ['NC_CLI'] btc_cli = environ['BTC_CLI'] return (nc_cli, btc_cli) else: print("Read the source Luke! You must set BTC_CLI and NC_CLI enviroinment variables") exit(1)
def _extract_aws_credentials() -> (str, str): # Get access key id first if CredBuilder.AWS_ACCESS_KEY_ID_FILE_KEY in environ.keys(): with open(environ.get(CredBuilder.AWS_ACCESS_KEY_ID_FILE_KEY), 'r') as f: aws_access_key_id = f.readline() else: aws_access_key_id = environ.get(CredBuilder.AWS_ACCESS_KEY_ID_KEY) if CredBuilder.AWS_SECRET_ACCESS_KEY_FILE in environ.keys(): with open(environ.get(CredBuilder.AWS_SECRET_ACCESS_KEY_FILE), 'r') as f: aws_secret_access = f.readline() else: aws_secret_access = environ.get(CredBuilder.AWS_SECRET_ACCESS_KEY) return aws_access_key_id, aws_secret_access
def load_config(env_ini: Optional[str] = None) -> None: if env_ini is not None: log.info("loading from %s", path.abspath(env_ini)) parser = configparser.ConfigParser() # mypy confused by this unusual pattern # https://github.com/python/mypy/issues/708 parser.optionxform = str # type: ignore parser.read(env_ini) environ.update(parser["env"].items()) else: log.warning("not loading env from any config file") if not REQUIRED_CONFIG_KEYS.issubset(set(environ.keys())): missing_keys = REQUIRED_CONFIG_KEYS.difference(set(environ.keys())) raise RuntimeError("incomplete configuration! missing keys: %s" % missing_keys)
def start_spark(**kwargs): """[summary] jar_packages=[], files=[], Args: jar_packages (list, optional): [description]. Defaults to []. files (list, optional): [description]. Defaults to []. Returns: [type]: [description] """ # detect execution environment flag_debug = "DEBUG" in environ.keys() settings = load_settings(kwargs) spark_builder = SparkSession.builder print("getting spark session") spark_conf = get_spark_app_config(settings["spark_app_configs"]) spark_builder.config(conf=spark_conf) # create session and retrieve Spark logger object spark_session = spark_builder.getOrCreate() spark_session.sparkContext.setLogLevel(settings.get("logLevel", "INFO")) spark_logger = Log4j(spark_session) if flag_debug: s3_credential(spark_session) print("spark session created") return spark_session, spark_logger, settings
def validate_input(args) -> ip_address: """ Validates and formulates user input. This function can make default decisions about where to get environment variable input from and what form it should take. Arguments: args(argparser): Arguments to validate Return: ip_address: Default target ip address """ # determine if the input IP address is inface an IP ip = None try: # if no target host given if "None" in args.target: # look for RHOST environ var if "RHOST" in environ.keys(): print(Util().msg("Using Environment variable for IP address")) ip = ip_address(environ["RHOST"]) else: ip = ip_address(args.target) except ValueError: print(Util().err_msg( "Argument or environment variable was not a valid IP address")) sys.exit() return ip
def do_GET(self): try: url = base64.b64decode(string.split(self.path[1:], "/")[0], "-_") Log("GET URL: %s" % url) if sys.platform != "win32": self.send_response(200) self.send_header('Content-type', 'video/mpeg2') self.end_headers() tvd = getTvd() curl = getCurl() Log.Debug("TVD: %s" % tvd) Log.Debug("CMD: %s %s %s %s %s %s %s %s" % (curl, url, "--digest", "-s", "-u", "tivo:"+getMyMAC(), "-c", "/tmp/cookies.txt")) Log.Debug(" PIPED to: %s %s %s %s" % (tvd, "-m", getMyMAC(), "-")) if "LD_LIBRARY_PATH" in environ.keys(): del environ["LD_LIBRARY_PATH"] curlp = Popen([curl, url, "--digest", "-s", "-u", "tivo:"+getMyMAC(), "-c", "/tmp/cookies.txt"], stdout=PIPE) tivodecode = Popen([tvd, "-m", getMyMAC(), "-"],stdin=curlp.stdout, stdout=PIPE) Log("Starting decoder") while True: data = tivodecode.stdout.read(4192) if not data: break self.wfile.write(data) except Exception, e: Log("Unexpected error: %s" % e)
async def find_or_create_container(self) -> None: # find or create the container if self.container_name in [ x.name for x in self.client.containers.list(all=True) ]: container = self.client.containers.get(self.container_name) if container.status != "running": container.start() else: if "VOLUMEPATH" in env.keys(): home_dir_path = f"{env['VOLUMEPATH']}/{self.container_name}" if not os.path.exists(home_dir_path): os.mkdir(home_dir_path) os.chown(home_dir_path, uid=1000, gid=1024) os.mkdir(f"{home_dir_path}/coder") os.chown(f"{home_dir_path}/coder", uid=1000, gid=1024) volumes_obj = {home_dir_path: {"bind": "/home", "mode": "rw"}} else: volumes_obj = None self.client.containers.run( # "percyodi/code-server:20201210", "codercom/code-server:latest", "--auth none", detach=True, name=self.container_name, network="my_network", volumes=volumes_obj, user="******", ) await self.wait_for_container()
def parse_args(): """ Parses the arguments Returns: argparse Namespace """ assert 'project_root' in environ.keys() project_root = getenv('project_root') parser = argparse.ArgumentParser() parser.add_argument("--input", type=str, default=getenv('camera_stream')) parser.add_argument("--model", type=str, default=join(project_root, getenv('model_type'))) parser.add_argument("--cpu", dest="use_cuda", action="store_false", default=True) args = parser.parse_args() return args
def session(self): if not self._session: env = getenv('DESKTOP_SESSION') if not env: for var in list(environ.keys()): v = var.split('_') if len(v) < 2: continue elif v[1] == 'DESKTOP': env = v[0].lower() break elif env == 'default' or env == 'gnome': session = readfile('/etc/default/desktop', DefaultDe.Name) env = session.split('=')[1].strip() for de in Pds.SupportedDesktops: if env: if env in de.SessionTypes or env.lower() == de.Name.lower( ): self._session = de else: if de.VersionKey: if getenv(de.VersionKey) == de.Version: self._session = de if not self._session: self._session = DefaultDe else: for de in Pds.SupportedDesktops: if de.Version == self.version and (env in de.SessionTypes or env == de.Name): self._session = de return self._session
def do_GET(self): try: url = base64.b64decode(string.split(self.path[1:], "/")[0], "-_") Log("GET URL: %s" % url) if sys.platform != "win32": self.send_response(200) self.send_header('Content-type', 'video/mpeg2') self.end_headers() tvd = getTvd() curl = getCurl() Log.Debug("TVD: %s" % tvd) Log.Debug("CMD: %s %s %s %s %s %s %s %s" % (curl, url, "--digest", "-s", "-u", "tivo:" + getMyMAC(), "-c", "/tmp/cookies.txt")) Log.Debug(" PIPED to: %s %s %s %s" % (tvd, "-m", getMyMAC(), "-")) if "LD_LIBRARY_PATH" in environ.keys(): del environ["LD_LIBRARY_PATH"] curlp = Popen([ curl, url, "--digest", "-s", "-u", "tivo:" + getMyMAC(), "-c", "/tmp/cookies.txt" ], stdout=PIPE) tivodecode = Popen([tvd, "-m", getMyMAC(), "-"], stdin=curlp.stdout, stdout=PIPE) Log("Starting decoder") while True: data = tivodecode.stdout.read(4192) if not data: break self.wfile.write(data) except Exception, e: Log("Unexpected error: %s" % e)
def start_spark(app_name="spark-app", master="local[*]", jar_packages=[], files=[], spark_config={}): # detect execution environment flag_repl = not (hasattr(__main__, "__file__")) flag_debug = "DEBUG" in environ.keys() if not (flag_repl or flag_debug): # get Spark session factory spark_builder = SparkSession.builder.appName(app_name) else: # get Spark session factory spark_builder = SparkSession.builder.master(master).appName(app_name) # create Spark JAR packages string spark_jars_packages = ",".join(list(jar_packages)) spark_builder.config("spark.jars.packages", spark_jars_packages) spark_files = ",".join(list(files)) spark_builder.config("spark.files", spark_files) # add other config params for key, val in spark_config.items(): spark_builder.config(key, val) # create session and retrieve Spark logger object spark_sess = spark_builder.getOrCreate() spark_logger = logging.Log4j(spark_sess) # get config file if sent to cluster with --files with open(files[0], "r") as config_file: config_dict = json.load(config_file) return spark_sess, spark_logger, config_dict
def backward_compatibility_support(): from ..definitions import ENVIRONMENT_CONFIG, ENVIRONMENT_SDK_PARAMS, ENVIRONMENT_BACKWARD_COMPATIBLE if ENVIRONMENT_BACKWARD_COMPATIBLE.get(): # Add TRAINS_ prefix on every CLEARML_ os environment we support for k, v in ENVIRONMENT_CONFIG.items(): try: trains_vars = [ var for var in v.vars if var.startswith('CLEARML_') ] if not trains_vars: continue alg_var = trains_vars[0].replace('CLEARML_', 'TRAINS_', 1) if alg_var not in v.vars: v.vars = tuple(list(v.vars) + [alg_var]) except: continue for k, v in ENVIRONMENT_SDK_PARAMS.items(): try: trains_vars = [var for var in v if var.startswith('CLEARML_')] if not trains_vars: continue alg_var = trains_vars[0].replace('CLEARML_', 'TRAINS_', 1) if alg_var not in v: ENVIRONMENT_SDK_PARAMS[k] = tuple(list(v) + [alg_var]) except: continue # set OS environ: keys = list(environ.keys()) for k in keys: if not k.startswith('CLEARML_'): continue backwards_k = k.replace('CLEARML_', 'TRAINS_', 1) if backwards_k not in keys: environ[backwards_k] = environ[k]
def _forward_blaz_env_vars(self): self._find_uid_and_guid() result = [] for k in environ.keys(): if k.find('BLAZ_') == 0 and k != 'BLAZ_LOCK' and k != 'BLAZ_VERSION' and k != 'BLAZ_CHDIR_REL' and k != 'BLAZ_SKIP': result.append(''' --env={}="{}" '''.format(k, environ[k])) if k.find('BLAZ_VARS') == 0: env_vars = re.split('\W+', environ[k]) for j in env_vars: result.append(''' --env={}="{}" '''.format(j, environ[j])) elif k.find('_BLAZ_') == 0: result.append(''' --env={0}="${0}" '''.format(k)) if k.find('_BLAZ_VARS') == 0: env_vars = re.split('\W+', environ[k]) for j in env_vars: result.append(''' --env={0}="${0}" '''.format(j)) return ''.join(result)
def session(self): if not self._session: env = getenv('DESKTOP_SESSION') if not env: for var in list(environ.keys()): v = var.split('_') if len(v) < 2: continue elif v[1] == 'DESKTOP': env = v[0].lower() break elif env == 'default' or env == 'gnome': session = readfile('/etc/default/desktop', DefaultDe.Name) env = session.split('=')[1].strip() for de in Pds.SupportedDesktops: if env: if env in de.SessionTypes or env.lower() == de.Name.lower(): self._session = de else: if de.VersionKey: if getenv(de.VersionKey) == de.Version: self._session = de if not self._session: self._session = DefaultDe else: for de in Pds.SupportedDesktops: if de.Version == self.version and (env in de.SessionTypes or env == de.Name): self._session = de return self._session
def __init__(self): self.__engine = create_engine('mysql+mysqldb://{}:{}@{}/{}'.format( environ['HBNB_MYSQL_USER'], environ['HBNB_MYSQL_PWD'], environ['HBNB_MYSQL_HOST'], environ['HBNB_MYSQL_DB']), pool_pre_ping=True) if 'HBNB_ENV' in environ.keys() and environ['HBNB_ENV'] == "test": Base.metadata.drop_all(self.__engine)
def validate_input(args) -> ip_address: # determine if the input IP address is inface an IP ip = None try: # if no target host given if not args.target: # look for RHOST environ var if "RHOST" in environ.keys(): print(Util().msg("Using Environment variable for IP address")) ip = ip_address(environ["RHOST"]) else: ip = ip_address(args.target) except ValueError: print(Util().err_msg("Argument or environment variable was not a valid IP address")) sys.exit() # Input check file if args.markdown: if Path(args.markdown).is_dir(): print(Util().err_msg("Given argument is a path and not a file")) sys.exit() return ip
def make_postactivate_text(site_url): """ Generate the text of a shell script to run on virtualenv activation. Returns the contents as a tuple containing a string and a dictionary. """ settings = {} for key in environ.keys(): if key.startswith(PREFIX): new_item = { key.replace(PREFIX, '', 1).lower().replace('_', ' '): environ.get(key) } settings.update(new_item) settings.update({ 'source folder': '{root}/{url}/source'.format( root=WEBSERVER_ROOT, url=site_url, ), 'site url': site_url, 'settings module': '{module}.{version}'.format( module=SETTINGS_MODULE, version=site_url.split('.')[0], ), 'secret key': _make_random_sequence(50), 'db password': _make_random_sequence(50), 'db user': site_url.replace('.', '_'), 'db name': site_url.replace('.', '_'), 'user': site_url.replace('.', '_'), 'debug toolbar internal ips': _find_my_ip_address(), }) if site_url in OVERRIDES: settings.update(OVERRIDES[site_url]) postactivate = ('#!/bin/bash\n' '# This hook is run after the virtualenv is activated.\n\n' '# Environmental variables for django projects.\n\n') for key in sorted(settings): postactivate += 'export {prefix}{key}="{value}"\n'.format( prefix=PREFIX, key=key.replace(' ', '_').upper(), value=settings[key], ) postactivate += ( '\n' 'export PYTHONPATH="$DJANGO_SOURCE_FOLDER:$PYTHONPATH"\n' 'export PATH="$(dirname $DJANGO_SOURCE_FOLDER)/node_modules/.bin:$PATH"\n' 'export PYTHONWARNINGS=ignore\n' 'cd $DJANGO_SOURCE_FOLDER\n') return postactivate, settings
def update_normalizer_db(normalizer, prod, db_url, update_all, update_merged): """Update selected normalizer source(s) in the gene database.""" if prod: environ['GENE_NORM_PROD'] = "TRUE" db: Database = Database() else: if db_url: endpoint_url = db_url elif 'GENE_NORM_DB_URL' in environ.keys(): endpoint_url = environ['GENE_NORM_DB_URL'] else: endpoint_url = 'http://localhost:8000' db: Database = Database(db_url=endpoint_url) if update_all: normalizers = [src for src in SOURCES] CLI()._update_normalizers(normalizers, db, update_merged) elif not normalizer: CLI()._help_msg() else: normalizers = normalizer.lower().split() if len(normalizers) == 0: raise Exception("Must enter a normalizer") non_sources = set(normalizers) - {src for src in SOURCES} if len(non_sources) != 0: raise Exception(f"Not valid source(s): {non_sources}") CLI()._update_normalizers(normalizers, db, update_merged)
def create_jupyter_anaconda(self): """ Creates a batch file which can be used to launch a jupyter lab/notebook instance using the virtual environment created within this repo. :return: """ if system() == 'Windows': Path('notebooks').mkdir(exist_ok=True) if 'CONDA_PREFIX' in list(environ.keys()): prefix = environ['CONDA_PREFIX'] else: prefix = '' print( 'Warning: The Anaconda Prefix was not found in the environment variables. It is used to a ' 'You therefore need to adapt the jupyter_launcher script. To do this, locate the "activate.bat" ' 'file in your Anaconda distribution.It is expected to be found via C:\\Users\\`UserName`\\Anaconda3. ' 'Insert this path to the Scripts\\activate.bat.') jupyter_cmd = \ f'''CALL {prefix}\Scripts\\activate.bat CALL conda activate {self.env_name} CALL jupyter {self.jupyter}''' file = self.root_dir.joinpath('notebooks/launch_jupyter.bat') with open(file, 'w') as jupyter: jupyter.write(jupyter_cmd) print( f"Successfully create a jupyter {self.jupyter} launcher batch file which can be used to activate" f" the '{self.env_name}' environment and work in a jupyter {self.jupyter} environment" )
def __init__(self, local=False): scope = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive' ] cred_var_prefix = "gc_" cred_keys = [ x for x in list(environ.keys()) if cred_var_prefix == x[:len(cred_var_prefix)] ] if local: cred_dict = { ck[len(cred_var_prefix):]: getenv(ck).replace("\\\\n", "\n") for ck in cred_keys } else: cred_dict = { ck[len(cred_var_prefix):]: getenv(ck).replace("\\n", "\n") for ck in cred_keys } self._credentials = ServiceAccountCredentials.from_json_keyfile_dict( cred_dict, scope)
def load_params(self): """ Loads params from environment variables """ params = {} for key in environ.keys(): if key.startswith(getenv("PGM_PREFIX", "PG")): value = getenv(key) try: prefix, parameter, postfix = key.split(self.DELIMITER, 3) except ValueError: postfix = "default" prefix, parameter = key.split(self.DELIMITER, 2) if postfix not in params: params[postfix] = {} params[postfix][parameter] = value for key in params.keys(): params[key] = {**self.PARAMS_DEFAULTS, **params[key]} self.ENV_PARAMS = params
def start_spark(app_name="my_spark_app", master="local[*]", files=['etl_conf.json']): flag_repl = not (hasattr(__main__, '__file__')) flag_debug = 'DEBUG' in environ.keys() if not (flag_repl or flag_debug): spark_builder = (SparkSession.builder.appName(app_name)) else: spark_builder = SparkSession.builder.appName(app_name).master(master) spark_files = '.'.join(list(files)) spark_builder.config('spark.files', spark_files) spark_builder.config(conf=SparkConf()) spark_sess = spark_builder.getOrCreate() #spark_logger=logger.Log4j(spark_sess) spark_files_dir = SparkFiles.getRootDirectory() config_files = [ x for x in listdir(spark_files_dir) if x.endswith('conf.json') ] if config_files: path_to_config_file = path.join(spark_files_dir, config_files[0]) with open(path_to_config_file, 'r') as f: config_dict = json.load(f) else: config_dict = None return spark_sess, config_dict
def call_subprocess(command, args): """ Call subprocess from external binaries @param: command: command name @param: args: command's arguments """ if "./" in command: try: subprocess.call([command] + args) except FileNotFoundError: print("intek-sh: " + command + "No such file or directory") except PermissionError: print("intek-sh: " + command + ": Permission denied") elif 'PATH' in environ.keys(): sub_paths = environ["PATH"].split(":") isFound = False for sub_path in sub_paths: command_path = sub_path + "/" + command if path.exists(command_path): isFound = True subprocess.call([command_path] + args) break if not isFound: print("intek-sh: " + command + ": command not found") else: print("intek-sh: " + command + ": command not found")
def SetOracleEnv(Sid, Oratab='/etc/oratab'): OracleSid = '' OracleHome = '' Oratab = LoadOratab() SidCount = len(Oratab.keys()) if (SidCount > 0): if (Sid in Oratab.keys()): OracleSid = Sid else: OracleSid = Oratab.keys()[ 0] # Just grab the first ORACLE_SID if none provided. if (OracleSid == ''): print 'Cannot configure Oracle environment. Try setting your Oracle environment manually.' exit(1) else: OracleHome = Oratab[OracleSid] environ['ORACLE_SID'] = OracleSid environ['ORACLE_HOME'] = OracleHome if ('LD_LIBRARY_PATH' in environ.keys()): if (environ['LD_LIBRARY_PATH'] != ''): environ[ 'LD_LIBRARY_PATH'] = OracleHome + '/lib' + ':' + environ[ 'LD_LIBRARY_PATH'] # prepend to LD_LIBRARY_PATH else: environ['LD_LIBRARY_PATH'] = OracleHome + '/lib' else: environ['LD_LIBRARY_PATH'] = OracleHome + '/lib' return (OracleSid, OracleHome)
def teardown_environment(): """Restore things that were remembered by the setup_environment function """ orig_env = GIVEN_ENV['env'] for key in env.keys(): if key not in orig_env: del env[key] env.update(orig_env)
def init_lsf(local_id = 0): """ Init lsf cluster jobs: set up tmpdir on cluster scratch, determine job_id and set pfiles to tmpdir on scratch kwargs ------ local_id: int, if not on lsf, return this value as job_id Returns ------- tuple with tmpdir on cluster scratch and lsf job id """ try: environ.keys().index("LSB_JOBNAME") job_id = int(environ["LSB_JOBINDEX"]) tmpdir = mkdir(join('/scratch/{0:s}.{1:s}/'.format(environ['USER'],environ["LSB_JOBID"]))) logging.info('os.listdir: {0}'.format(listdir(tmpdir))) sleep(20.) tmpdir = join(tmpdir,'{0:s}.XXXXXX'.format(environ["LSB_JOBID"])) p = Popen(["mktemp", "-d", tmpdir], stdout=PIPE) # make a temporary directory and get its name sleep(20.) out, err = p.communicate() logging.info('out: {0}'.format(out)) logging.info('err: {0}'.format(err)) tmpdir = join('/scratch',out.split()[0]) sleep(20.) except ValueError: job_id = local_id #tmpdir = environ["PWD"] tmpdir = mkdir(join(environ["PWD"],'tmp/')) #system('export PFILES={0:s}:$PFILES'.format(tmpdir)) #sleep(10.) logging.info('tmpdir is {0:s}.'.format(tmpdir)) if not exists(tmpdir): logging.error('Tmpdir does not exist: {0}. Exit 14'.format(tmpdir)) sys.exit(14) return tmpdir,job_id
def print_environ(): skeys = environ.keys() skeys.sort() print '<h3> The following environment variables ' \ 'were set by the CGI script: </h3>' print '<dl>' for key in skeys: print '<dt>', escape(key), '<dd>', escape(environ[key]) print '</dl>'
def teardown_environment(): """Restore things that were remembered by the setup_environment function """ orig_env = GIVEN_ENV['env'] # Pull keys out into list to avoid altering dictionary during iteration, # causing python 3 error for key in list(env.keys()): if key not in orig_env: del env[key] env.update(orig_env)
def teardown_environment(): """Restore things that were remebered by the setup_environment function """ orig_env = GIVEN_ENV['env'] for key in env.keys(): if key not in orig_env: del env[key] env.update(orig_env) nud.get_nipy_system_dir = GIVEN_ENV['sys_dir_func'] nud.get_data_path = GIVEN_ENV['path_func']
def _forward_blaz_env_vars(self): result = [] for k in environ.keys(): if k.find('BLAZ_') == 0: result.append(''' --env={}={} '''.format(k, environ[k])) elif k.find('_BLAZ_') == 0: result.append(''' --env={0}=${0} '''.format(k)) return ''.join(result)
def _forward_blaz_env_vars(self): result = [] for k in environ.keys(): if k.find('BLAZ_') == 0 and k != 'BLAZ_LOCK' and k != 'BLAZ_VERSION' and k != 'BLAZ_CHDIR' and k != 'BLAZ_SKIP': result.append(''' --env={}={} '''.format(k, environ[k])) elif k.find('_BLAZ_') == 0: result.append(''' --env={0}=${0} '''.format(k)) return ''.join(result)
def get_pointing(obsID,beam): from os import environ if 'parsetdir' in environ.keys(): parsetdir=os.environ['parsetdir'] else: parsetdir='/Users/STV/Astro/Analysis/FRAT/parsets/' if type(obsID)==type(1): p=bf.get_parameters_new('parsets/L'+str(obsID)+'.parset',True) else: p=bf.get_parameters_new('parsets/'+obsID+'.parset',True) RA=p['beam'][beam]['RA'] DEC=p['beam'][beam]['DEC'] return (RA,DEC)
def make_postactivate_text(site_url): """ Generate the text of a shell script to run on virtualenv activation. Returns the contents as a tuple containing a string and a dictionary. """ settings = {} for key in environ.keys(): if key.startswith(PREFIX): new_item = { key.replace( PREFIX, '', 1).lower().replace( '_', ' '): environ.get(key)} settings.update(new_item) settings.update({ 'source folder': '{root}/{url}/source'.format(root=WEBSERVER_ROOT, url=site_url, ), 'site url': site_url, 'settings module': '{module}.{version}'.format( module=SETTINGS_MODULE, version=site_url.split('.')[0], ), 'secret key': _make_random_sequence(50), 'db password': _make_random_sequence(50), 'db user': site_url.replace('.', '_'), 'db name': site_url.replace('.', '_'), 'user': site_url.replace('.', '_'), 'debug toolbar internal ips': _find_my_ip_address(), }) if site_url in OVERRIDES: settings.update(OVERRIDES[site_url]) postactivate = ( '#!/bin/bash\n' '# This hook is run after the virtualenv is activated.\n\n' '# Environmental variables for django projects.\n\n' ) for key in sorted(settings): postactivate += 'export {prefix}{key}="{value}"\n'.format( prefix=PREFIX, key=key.replace(' ', '_').upper(), value=settings[key], ) postactivate += ('\n' 'export PYTHONPATH="$DJANGO_SOURCE_FOLDER:$PYTHONPATH"\n' 'export PATH="$(dirname $DJANGO_SOURCE_FOLDER)/node_modules/.bin:$PATH"\n' 'export PYTHONWARNINGS=ignore\n' 'cd $DJANGO_SOURCE_FOLDER\n') return postactivate, settings
def updateNodeIPs( env, nodes ): "Update env dict and environ with node IPs" # Get rid of stale junk for var in 'ONOS_NIC', 'ONOS_CELL', 'ONOS_INSTANCES': env[ var ] = '' for var in environ.keys(): if var.startswith( 'OC' ): env[ var ] = '' for index, node in enumerate( nodes, 1 ): var = 'OC%d' % index env[ var ] = node.IP() env[ 'OCI' ] = env[ 'OCN' ] = env[ 'OC1' ] env[ 'ONOS_INSTANCES' ] = '\n'.join( node.IP() for node in nodes ) environ.update( env ) return env
def _kontrol(self): if 'SUDO_UID' in environ.keys(): if len(argv) == 2: if argv[1] == 'yenile': self._yenile() if argv[1] == 'kur': self._kur() elif argv[1] == '--versiyon' or argv[1] == '-v': print VERSIYON exit() if len(argv) < 3: print '\033[1m\033[91mHATA:\033[0m Lüften programı bir komut ile çalıştırın: "sudo vhost3 <platform> <islem>"' exit() else: print '\033[1m\033[91mHATA:\033[0m Yönetici girişi yapmak gerek. Lüften programı "sudo vhost3 <platform> <islem>" komutu ile çalıştırın' exit()
def initialize_config(config_file_name='env.yaml'): config_keys = ['DBSERVER', 'DBNAME', 'DBUSER', 'DBPASS', 'DBPORT', 'REDISHOST', 'REDISPORT', 'REDISPASS'] if contains(config_keys, list(environ.keys())): environ['DEBUG'] = 'False' return config_file_path = path.join(path.dirname(path.abspath(__file__)), config_file_name) if not path.exists(config_file_path): raise Exception('env.yaml required for config initialization') with open(config_file_path, 'r') as config_file: config = yaml.load(config_file) config['dbconfig']['DBPORT'] = str(config['dbconfig']['DBPORT']) config['redisconfig']['REDISPORT'] = str(config['redisconfig']['REDISPORT']) environ.update(config['dbconfig']) environ.update(config['redisconfig']) environ['DEBUG'] = 'True'
def __environ(values, remove=[]): """ Modify the environment for a test, adding/updating values in dict `values` and removing any environment variables mentioned in list `remove`. """ new_keys = set(environ.keys()) - set(values.keys()) old_environ = environ.copy() try: environ.update(values) for to_remove in remove: try: del environ[remove] except KeyError: pass yield finally: environ.update(old_environ) for key in new_keys: del environ[key]
def stderr(exception, ctx=None): try: LOGGER.error(traceback.format_exc()) except Exception: LOGGER.error(exception) if type(exception) == UnauthorizedException: message = 'Session has expired or is invalid, please login again.' elif type(exception) == AccessForbiddenException: message = 'Access to the resource is forbidden, please login ' \ 'with the required credentials and access level.' elif type(exception) == RequestTimeoutException: message = 'The server timed out waiting for the request, ' \ 'please check your connection.' elif hasattr(exception, 'message'): message = exception.message else: message = str(exception) if ctx is not None and ctx.find_root().params['json_output']: message = {'error': str(message)} text = json.dumps( message, sort_keys=True, indent=4, separators=(',', ': ')) if sys.version_info[0] < 3: text = str(text, 'utf-8') if ctx.find_root().params['is_colorized']: message = highlight(text, lexers.JsonLexer(), formatters.TerminalFormatter()) else: message = text click.echo(message) sys.exit(1) else: click.echo('\x1b[2K\r', nl=False) if ctx is not None: if ctx.find_root().params['is_colorized']: message = Fore.RED + str(message) ctx.fail(message) else: if 'VCD_USE_COLORED_OUTPUT' in environ.keys() and \ environ['VCD_USE_COLORED_OUTPUT'] != '0': message = Fore.RED + str(message) click.echo(message) sys.exit(1)
def load_environment(env_path=None, silent=True): # load the environment path (if it exists) if env_path is None: env_path = expanduser("~/.env") if not exists(env_path) and 'NOTEBOOK_HOME' in environ.keys(): env_path = join(os.environ['NOTEBOOK_HOME'], '.env') if not exists(env_path): return with open(env_path, 'r') as f: lines = f.readlines() print('Adding the following system variables:') for line in lines: k, v = line.strip().split('=') environ[k] = v print(' %s = %s' % (k, v)) print('\nThese can be accessed using the following command: ') print(' os.environ[key]') print('\n (e.g.)\n os.environ["HS_USR_NAME"] => %s' % environ['HS_USR_NAME'])
def load_environment(env_path=None, silent=True): # load the environment path (if it exists) if env_path is None: if 'NOTEBOOK_HOME' in environ.keys(): env_path = join(environ['NOTEBOOK_HOME'], '.env') if not exists(env_path): print('\nEnvironment file could not be found. Make sure that the JUPYTER_ENV variable is set properly') return with open(env_path, 'r') as f: lines = f.readlines() print('Adding the following system variables:') for line in lines: k, v = line.strip().split('=') environ[k] = v print(' %s = %s' % (k, v)) print('\nThese can be accessed using the following command: ') print(' os.environ[key]') print('\n (e.g.)\n os.environ["HS_USR_NAME"] => %s' % environ['HS_USR_NAME'])
def __init__(self, layerpath, dico_layer, dico_fields, tipo, text=''): u""" Uses gdal/ogr functions to extract basic informations about geographic file (handles shapefile or MapInfo tables) and store into the dictionaries. layerpath = path to the geographic file dico_layer = dictionary for global informations dico_fields = dictionary for the fields' informations tipo = shp or tab text = dictionary of text in the selected language """ # checking the path to GDAL in the path if "GDAL_DATA" not in env.keys(): try: gdal.SetConfigOption(str('GDAL_DATA'), str(path.abspath(r'data/gdal'))) except: pass else: pass # Creating variables self.alert = 0 source = ogr.Open(layerpath, 0) # OGR driver if not source: u""" if layer doesn't have any object, return an error """ ## print 'no compatible source' self.erratum(dico_layer, layerpath, u'err_nobjet') self.alert = self.alert +1 self.layer = source.GetLayer() # get the layer if self.layer.GetFeatureCount() == 0: u""" if layer doesn't have any object, return an error """ self.erratum(dico_layer, layerpath, u'err_nobjet') self.alert = self.alert +1 return None if tipo == 'shape': try: obj = self.layer.GetFeature(0) # get the first object (shp) self.geom = obj.GetGeometryRef() # get the geometry except AttributeError, e: pass
def dlThread(): global GL_CURL_PID global DownloadThread global DL_QUEUE while True: if DL_QUEUE: (fileName, url) = DL_QUEUE[0] else: break try: tvd = getTvd() curl = getCurl() Log.Debug("CMD: %s \"%s\" %s %s %s %s %s %s" % (curl, url, "--digest", "-s", "-u", "tivo:"+getMyMAC(), "-c", "/tmp/cookies.txt")) Log.Debug(" PIPED to: \"%s\" %s %s %s \"%s\" %s" % (tvd, "-m", getMyMAC(), "-o", fileName, "-")) Log("Downloading: %s From: %s", fileName, url) if "LD_LIBRARY_PATH" in environ.keys(): del environ["LD_LIBRARY_PATH"] try: unlink("/tmp/cookies.txt") except: pass curlp = Popen([curl, url, "--digest", "-s", "-u", "tivo:"+getMyMAC(), "-c", "/tmp/cookies.txt"], stdout=PIPE) tivodecode = Popen([getTvd(), "-m", getMyMAC(), "-o", fileName, "-"], stdin=curlp.stdout) GL_CURL_PID = curlp.pid # Wait two seconds for it to get going and then issue a update for the TiVo folder sleep(2) UpdateTTGFolder() tivodecode.wait() kill(curlp.pid, SIGTERM) sleep(1) except Exception, e: Log("Error in Download Thread: %s" % e) # Issue a refresh to the TTG folder UpdateTTGFolder() DL_QUEUE.popleft() Log("Download complete: %s" % fileName) GL_CURL_PID = 0
def check_gdal(self): """Check if OSGeo libs work and if GDAL_DATA is well refrenced. Returns: -- 1: GDAL_DATA already exists as environment variable -- 2: GDAL_DATA didn't exist as env variable then has been added -- 3: GDAL_DATA didn't exist as env variable and could'nt be added """ # GDAL install try: try: from osgeo import gdal except ImportError: import gdal logger.info('GDAL version: {}'.format(gdal.__version__)) except: logger.error("GDAL is not installed or not reachable." " DicoGIS is going to close.") return 1 # GDAL_DATA variable if "GDAL_DATA" not in env.keys(): try: gdal.SetConfigOption(str('GDAL_DATA'), str(path.abspath(r'data/gdal'))) logger.info("GDAL_DATA path not found in environment variable." " DicoGIS'll use its own: " + path.abspath(r'data/gdal')) return 2 except: logger.error("Oups! Something's wrong with GDAL_DATA path.") return 3 else: logger.info("GDAL_DATA path found in environment variable: {}." " DicoGIS'll use it.".format(env.get("GDAL_DATA"))) return 4 # end of method return
def postgresify(): """Return a fully configured Django ``DATABASES`` setting. We do this by analyzing all environment variables on Heroku, scanning for postgres DBs, and then making shit happen, duh. Returns a fully configured databases dict. """ databases = {} # If the special ``DATABASE_URL`` variable is set, use this as the # 'default' database for Django. if environ.get(DEFAULT_URL, ''): databases['default'] = config() # If there is a legacy ``SHARED_DATABASE_URL`` variable set, assign this if environ.get(SHARED_URL, '') and databases.get('default', '') != config(env=SHARED_URL): databases['SHARED_DATABASE'] = config(env=SHARED_URL) # Analyze all environment variables looking for databases: for key in environ.keys(): # If this is a Heroku PostgreSQL database: if key.startswith('HEROKU_') \ and 'POSTGRESQL' in key \ and key.endswith('_URL') \ and databases.get('default', '') != config(env=key): # Generate a human-friendly database name: db_name = key.split('_') db_name.remove('HEROKU') db_name.remove('POSTGRESQL') db_name.remove('URL') db_name = '_'.join(db_name) databases[db_name] = config(env=key) return databases
def make_postactivate_text(site_url): """ Generate the text of a shell script to run on virtualenv activation. Returns the contents as a tuple containing a string and a dictionary. """ settings = {} for key in environ.keys(): if key.startswith(PREFIX): new_item = {key.replace(PREFIX, "", 1).lower().replace("_", " "): environ.get(key)} settings.update(new_item) settings.update( { "source folder": "%s/%s/source" % (WEBSERVER_ROOT, site_url), "site url": site_url, "settings module": "%s.%s" % (SETTINGS_PATH, site_url.split(".")[0]), "secret key": _make_random_sequence(50), "db password": _make_random_sequence(50), "db user": site_url.replace(".", "_"), "db name": site_url.replace(".", "_"), "user": site_url.replace(".", "_"), # "wsgi module": "%s.wsgi" % (DJANGO_APP_NAME,), } ) if site_url in OVERRIDES: settings.update(OVERRIDES[site_url]) postactivate = ( "#!/bin/bash\n" "# This hook is run after the virtualenv is activated.\n\n" "# Environmental varibales for django projects.\n\n" ) for key in sorted(settings): postactivate += 'export %s%s="%s"\n' % (PREFIX, key.replace(" ", "_").upper(), settings[key]) postactivate += "\n" 'export PYTHONPATH="$DJANGO_SOURCE_FOLDER:$PYTHONPATH:"\n' "cd $DJANGO_SOURCE_FOLDER\n" return postactivate, settings
with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() description = 'A free opensource globalplatform library for java card and smartcard.' include_dirs = [] define_macros = [] library_dirs = [] if 'win32' == get_platform(): gp_include_dir = '' gp_library_dir = '' import sys if ('install' in sys.argv) or ('bdist' in sys.argv): if 'GLOBALPLATFORM_ROOT' in environ.keys(): gp_include_dir = environ['GLOBALPLATFORM_ROOT'] gp_library_dir = environ['GLOBALPLATFORM_ROOT'] else: print('Please set system environ variable GLOBALPLATFORM_ROOT first!') exit(-1) include_dirs = [gp_include_dir, ] define_macros = [('_WIN32', 1), ('WIN32', 1), ] library_dirs = [gp_library_dir, ] else: include_dirs = ['/usr/include/PCSC'] define_macros = [] library_dirs = [] setup(
def init_app(self, app): # app.secret_key app.config.setdefault('SECRET_KEY', environ.get('SECRET_KEY')) # SQL-Alchemy app.config.setdefault('SQLALCHEMY_DATABASE_URI', environ.get('DATABASE_URL')) # Sentry app.config.setdefault('SENTRY_DSN', environ.get('SENTRY_DSN')) # Exceptional app.config.setdefault('EXCEPTIONAL_API_KEY', environ.get('EXCEPTIONAL_API_KEY')) # Flask-GoogleFed app.config.setdefault('GOOGLE_DOMAIN', environ.get('GOOGLE_DOMAIN')) # Celery w/ RabbitMQ if 'RABBITMQ_URL' in environ: app.config.setdefault('BROKER_URL', environ.get('RABBITMQ_URL')) # Celery w/ RedisCloud elif 'REDISCLOUD_URL' in environ: app.config.setdefault('BROKER_URL', environ.get('REDISCLOUD_URL')) app.config.setdefault('BROKER_TRANSPORT', environ.get('REDISCLOUD_URL')) # Mailgun if 'MAILGUN_SMTP_SERVER' in environ: app.config.setdefault('SMTP_SERVER', environ.get('MAILGUN_SMTP_SERVER')) app.config.setdefault('SMTP_LOGIN', environ.get('MAILGUN_SMTP_LOGIN')) app.config.setdefault('SMTP_PASSWORD', environ.get('MAILGUN_SMTP_PASSWORD')) app.config.setdefault('MAIL_SERVER', environ.get('MAILGUN_SMTP_SERVER')) app.config.setdefault('MAIL_USERNAME', environ.get('MAILGUN_SMTP_LOGIN')) app.config.setdefault('MAIL_PASSWORD', environ.get('MAILGUN_SMTP_PASSWORD')) app.config.setdefault('MAIL_USE_TLS', True) # SendGrid elif 'SENDGRID_USERNAME' in environ: app.config.setdefault('SMTP_SERVER', 'smtp.sendgrid.net') app.config.setdefault('SMTP_LOGIN', environ.get('SENDGRID_USERNAME')) app.config.setdefault('SMTP_PASSWORD', environ.get('SENDGRID_PASSWORD')) app.config.setdefault('MAIL_SERVER', 'smtp.sendgrid.net') app.config.setdefault('MAIL_USERNAME', environ.get('SENDGRID_USERNAME')) app.config.setdefault('MAIL_PASSWORD', environ.get('SENDGRID_PASSWORD')) app.config.setdefault('MAIL_USE_TLS', True) # Postmark elif 'POSTMARK_SMTP_SERVER' in environ: app.config.setdefault('SMTP_SERVER', 'POSTMARK_SMTP_SERVER') app.config.setdefault('SMTP_LOGIN', environ.get('POSTMARK_API_KEY')) app.config.setdefault('SMTP_PASSWORD', environ.get('POSTMARK_API_KEY')) app.config.setdefault('MAIL_SERVER', 'POSTMARK_SMTP_SERVER') app.config.setdefault('MAIL_USERNAME', environ.get('POSTMARK_API_KEY')) app.config.setdefault('MAIL_PASSWORD', environ.get('POSTMARK_API_KEY')) app.config.setdefault('MAIL_USE_TLS', True) # Heroku Redis redis_url = environ.get('REDIS_URL') if redis_url: url = urlparse(redis_url) app.config.setdefault('REDIS_HOST', url.hostname) app.config.setdefault('REDIS_PORT', url.port) app.config.setdefault('REDIS_PASSWORD', url.password) # Redis To Go redis_url = environ.get('REDISTOGO_URL') if redis_url: url = urlparse(redis_url) app.config.setdefault('REDIS_HOST', url.hostname) app.config.setdefault('REDIS_PORT', url.port) app.config.setdefault('REDIS_PASSWORD', url.password) # Mongolab, MongoHQ and mLab MongoHQ mongo_addon_vars = {'MONGOLAB_URI', 'MONGOHQ_URL', 'MONGODB_URI'} defined_env_vars = set(environ.keys()) defined_mongo_addons = defined_env_vars & mongo_addon_vars if len(defined_mongo_addons) == 1: mongo_addon_var = defined_mongo_addons.pop() mongo_uri = environ[mongo_addon_var] url = urlparse(mongo_uri) app.config.setdefault('MONGO_URI', mongo_uri) app.config.setdefault('MONGODB_USER', url.username) app.config.setdefault('MONGODB_USERNAME', url.username) app.config.setdefault('MONGODB_PASSWORD', url.password) app.config.setdefault('MONGODB_HOST', url.hostname) app.config.setdefault('MONGODB_PORT', url.port) app.config.setdefault('MONGODB_DB', url.path[1:]) elif len(defined_mongo_addons) > 1: logger.error( 'Multiple MongoDB addons enabled. Flask-Heroku cannot ' 'determine which to use.') # Cloudant cloudant_uri = environ.get('CLOUDANT_URL') if cloudant_uri: app.config.setdefault('COUCHDB_SERVER', cloudant_uri) # Memcachier app.config.setdefault('CACHE_MEMCACHED_SERVERS', environ.get('MEMCACHIER_SERVERS')) app.config.setdefault('CACHE_MEMCACHED_USERNAME', environ.get('MEMCACHIER_USERNAME')) app.config.setdefault('CACHE_MEMCACHED_PASSWORD', environ.get('MEMCACHIER_PASSWORD'))
#!/usr/bin/python #coding=utf-8 import logging,os import sys reload(sys) sys.setdefaultencoding('utf-8') import traceback from os import environ import ks_http ks_http.head() for key in environ.keys(): print key print "<br>" print "-----------<br><br>" print " sys.stdout.encoding =%s"%(sys.stdout.encoding) for name ,value in environ.items(): print "%s:%s"%(name,value) print "<br>" print "-----------<br><br>" for param in os.environ.keys(): print "<b>%20s</b>: %s<br>" % (param, os.environ[param]) print sys.getdefaultencoding()
def envlist() -> list: """Return a list of the system's environment variables""" sysenv = [] for k, v in zip(environ.keys(), environ.values()): sysenv.append(k + r'=' + v) return sysenv
from os import environ for param in environ.keys(): print "<b>%20s</b>: %s<\br>" % (param,environ[param]) REDIRECT_STATUS: 200 VERSIONER_PYTHON_PREFER_32_BIT: no SERVER_SOFTWARE: lighttpd/1.4.31 SCRIPT_NAME: /weight/index.py REQUEST_METHOD: GET SERVER_PROTOCOL: HTTP/1.1 CONTENT_LENGTH: 0 HTTP_ACCEPT_CHARSET: windows-1251,utf-8;q=0.7,*;q=0.3 HTTP_USER_AGENT: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11<r> HTTP_CONNECTION: keep-alive<r> SERVER_NAME: 127.0.0.1<r> REMOTE_PORT: 52528<r> SERVER_PORT: 8080<r> SERVER_ADDR: 0.0.0.0<r> DOCUMENT_ROOT: /Users/nordmenss/webserver/<r> SCRIPT_FILENAME: /Users/nordmenss/webserver/weight/index.py<r> HTTP_HOST: 127.0.0.1:8080<r> HTTP_CACHE_CONTROL: max-age=0<r> REQUEST_URI: /weight/index.py<r> HTTP_ACCEPT: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8<r> GATEWAY_INTERFACE: CGI/1.1<r> REMOTE_ADDR: 127.0.0.1<r> HTTP_ACCEPT_LANGUAGE: ru-RU,ru;q=0.8,en-US;q=0.6,en;q=0.4<r> __CF_USER_TEXT_ENCODING: 0x1F5:7:49<r> VERSIONER_PYTHON_VERSION: 2.7<r> HTTP_ACCEPT_ENCODING: gzip,deflate,sdch<r>
def printenv() -> None: """Print the system's environment variables""" for k, v in zip(environ.keys(), environ.values()): print(k, r'=', v) # noqa return None