def __init__(self): self.host = '127.0.0.1' self.base_url = os.getenv('BASE_URL', 'public/') self.port = int(os.getenv('PORT', '5000')) self.username = os.getenv('BASIC_AUTH_USER', 'admin') self.password = os.getenv('BASIC_AUTH_PASSWORD', os.urandom(20).encode('hex')) self.self_destruct_delay = os.getenv('SELF_DESTRUCT_DELAY', 600)
def forceConnection(): global network if network == None: theSVNVersion=get_svn_codec_version() theCodecVersion=get_codec_version() host = Network.kLocalHost port = Network.kDefaultPort hostString = os.getenv("RLGLUE_HOST") portString = os.getenv("RLGLUE_PORT") if (hostString != None): host = hostString try: port = int(portString) except TypeError: port = Network.kDefaultPort print "RL-Glue Python Experiment Codec Version: "+theCodecVersion+" (Build "+theSVNVersion+")" print "\tConnecting to " + host + " on port " + str(port) + "..." sys.stdout.flush() network = Network.Network() network.connect(host,port) network.clearSendBuffer() network.putInt(Network.kExperimentConnection) network.putInt(0) network.send()
def start(): evwsgi.start( os.getenv('FAVIEW_IP', '0.0.0.0'), os.getenv('FAVIEW_PORT', '8080'), ) evwsgi.set_base_module(base) for local_path, real_path in MEDIA_PREFIX.iteritems(): media_dir = ServeStatic( settings.MEDIA_ROOT + local_path, real_path, maxage = 2629000, ) evwsgi.wsgi_cb(( settings.MEDIA_URL + local_path, media_dir, )) def generic(environ, start_response): res = django_handler.handler(environ, start_response) return [res] evwsgi.wsgi_cb(('', generic)) evwsgi.set_debug(0) evwsgi.run()
def test_for_unrecognized_values(self): """qiime_config has no extra values""" error_msg_fragment = (" contains unrecognized values:\n%s\nYou can " "safely remove these values from your QIIME " "config file as they will be ignored by QIIME.") qiime_project_dir = get_qiime_project_dir() orig_config = parse_qiime_config_file(open(qiime_project_dir + '/qiime/support_files/qiime_config')) # check the env qiime_config qiime_config_env_filepath = getenv('QIIME_CONFIG_FP') if qiime_config_env_filepath: qiime_config_via_env = parse_qiime_config_file( open(qiime_config_env_filepath)) extra_vals = [] for key in qiime_config_via_env: if key not in orig_config: extra_vals.append(key) if extra_vals: self.fail("The QIIME config file set via the QIIME_CONFIG_FP " "environment variable" + error_msg_fragment % ", ".join(extra_vals)) # check the qiime_config in $HOME/.qiime_config home_dir = getenv('HOME') if (exists(home_dir + "/.qiime_config")): qiime_config_home = parse_qiime_config_file( open(home_dir + "/.qiime_config")) extra_vals = [] for key in qiime_config_home: if key not in orig_config: extra_vals.append(key) if extra_vals: self.fail("The .qiime_config in your HOME" + error_msg_fragment % ", ".join(extra_vals))
def GetLocalChromePath(path_from_command_line): if path_from_command_line: return path_from_command_line if sys.platform == 'darwin': # Mac chrome_path = ( '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome') if os.path.isfile(chrome_path): return chrome_path elif sys.platform.startswith('linux'): found = False try: with open(os.devnull, 'w') as devnull: found = subprocess.call(['google-chrome', '--version'], stdout=devnull, stderr=devnull) == 0 except OSError: pass if found: return 'google-chrome' elif sys.platform == 'win32': search_paths = [os.getenv('PROGRAMFILES(X86)'), os.getenv('PROGRAMFILES'), os.getenv('LOCALAPPDATA')] chrome_path = os.path.join('Google', 'Chrome', 'Application', 'chrome.exe') for search_path in search_paths: test_path = os.path.join(search_path, chrome_path) if os.path.isfile(test_path): return test_path return None
def __init__(self, *args, **kwargs): """GMS client constructor. The dn will be extracted from the x509 cert and available as a default for user_id in other method calls. certfile -- Path to CADC proxy certificate """ # This client does not support name/password authentication super(GroupsClient, self).__init__(usenetrc=False, *args, **kwargs) # Specific base_url for AC webservice host = os.getenv('AC_WEBSERVICE_HOST', self.host) path = os.getenv('AC_WEBSERVICE_PATH', '/ac') self.base_url = '%s://%s%s' % ('https', host, path) self.logger.info('Base URL ' + self.base_url) # This client will need the user DN self.current_user_dn = self.get_current_user_dn() # Specialized exceptions handled by this client self._HTTP_STATUS_CODE_EXCEPTIONS[404] = { "User": exceptions.UserNotFoundException(), "Group": exceptions.GroupNotFoundException() } self._HTTP_STATUS_CODE_EXCEPTIONS[409] = \ exceptions.GroupExistsException()
def __init__(self, context=None): self._content = '' "Editor's content" self.apple_script = os.path.join(os.getenv('TM_BUNDLE_SUPPORT'), 'pasteboard.scpt') zen.set_newline(os.getenv('TM_LINE_ENDING', zen.get_newline())) self.set_context(context)
def updateTask(self, status, last_error): """ """ TUID = os.getenv('G_AT_UUID', '') sqlfmt = "UPDATE tasks SET %s WHERE TUID='%s'; " cols = { 'DutVersion': "'%s'" % os.getenv('U_DUT_FW_VERSION', 'UNKNOWN'), 'DutLibVersion': "'%s'" % os.getenv('U_DUT_SW_VERSION', 'UNKNOWN'), 'Status': '', #'StartTime' : 'Now()', 'EndTime': '', #'Duration' : '0', #'LastError' : '', } record = deepcopy(cols) record['Status'] = "'%s'" % str(status) record['EndTime'] = 'Now()' #record['LastError'] = "'%s'"% pformat(last_error) ups = '' for k, v in record.items(): if len(ups): ups += ',' if not v: v = ("'" + str(v) + "'") ups += ("`%s`=%s" % (k, v) ) sql = sqlfmt % (ups, TUID) self.emitSQL(sql) return
def __init__(self): # Attempt to detect Desktop Session Type session = os.getenv('DESKTOP_SESSION', '').lower() # see https://askubuntu.com/questions/72549/how-to-determine-which-window-manager-is-running xdg_session = os.getenv('XDG_CURRENT_DESKTOP', '').lower() # Attempt to find an adaptor that works if 'gnome' in session or 'gnome' in xdg_session: self.adapter = GnomeAdapter() elif 'kde' in session or 'kde' in xdg_session: try: self.adapter = PowerManagerAdapter() except EnvironmentError: # Fall back to powerdevil self.adapter = KdeAdapter() elif 'xfce' in session or 'xfce' in xdg_session: self.adapter = XfceAdapter() # TODO implement for LXDE, X-Cinnamon, Unity; systemd-inhibit elif session is '' and xdg_session is '': logger.warning('Could not detect Desktop Session, will try default \ Power Manager then Gnome') try: self.adapter = PowerManagerAdapter() except EnvironmentError: # Fall back to Gnome power manager self.adapter = GnomeAdapter() else: raise NotImplementedError(xdg_session)
def _candidate_tempdir_list(): """Generate a list of candidate temporary directories which _get_default_tempdir will try.""" dirlist = [] # First, try the environment. for envname in 'TMPDIR', 'TEMP', 'TMP': dirname = _os.getenv(envname) if dirname: dirlist.append(dirname) # Failing that, try OS-specific locations. if _os.name == 'mac': try: fsr = _Folder.FSFindFolder(_Folders.kOnSystemDisk, _Folders.kTemporaryFolderType, 1) dirname = fsr.as_pathname() dirlist.append(dirname) except _Folder.error: pass elif _os.name == 'riscos': dirname = _os.getenv('Wimp$ScrapDir') if dirname: dirlist.append(dirname) elif _os.name == 'nt': dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ]) else: dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ]) # As a last resort, the current directory. try: dirlist.append(_os.getcwd()) except (AttributeError, _os.error): dirlist.append(_os.curdir) return dirlist
def _get_connection(): aws_access_key = os.getenv('AWS_ACCESS_KEY') aws_secret_access_key = os.getenv('AWS_SECRET_KEY') if not aws_access_key or not aws_secret_access_key: raise ValueError("AWS access keys not defined in environment.") connection = boto.connect_s3(aws_access_key_id=aws_access_key, aws_secret_access_key=aws_secret_access_key) return connection
def get_win_env(key): """Returns a windows environment variable""" # try both encodings try: return ensure_unicode(os.getenv(key), DEFAULT_ENCODING) except UnicodeDecodeError: return ensure_unicode(os.getenv(key), FS_ENCODING)
def run_as_user(): if platform.system() == 'Windows' or not os.getenv('SUDO_USER'): LOGGER.debug('No need to change the user') return {} pw = pwd.getpwnam(os.getenv('SUDO_USER')) LOGGER.debug('Providing the parameters to run the command as {}'.format( pw.pw_name)) env = os.environ.copy() for k in env.keys(): if k.startswith('SUDO_'): del env[k] env['HOME'] = pw.pw_dir env['LOGNAME'] = env['USER'] = env['USERNAME'] = pw.pw_name def demote(): os.setgid(pw.pw_gid) os.setuid(pw.pw_uid) return { 'preexec_fn': demote, 'env': env, }
def setUp(self): print "-----------------------------------------" print os.getenv('OSSIEHOME') print "-----------------------------------------" domBooter, self._domMgr = self.launchDomainManager(debug=9) devBooter, self._devMgr = self.launchDeviceManager("/nodes/test_BasicTestDevice_node/DeviceManager.dcd.xml", debug=9) self._app = None
def load_tutorial(self): home_dir = os.getenv('USERPROFILE') or os.getenv('HOME') path, _ = QtGui.QFileDialog.getOpenFileName(self, 'Ouvir un tutoriel (fichier manifest.json)', home_dir, 'manifest.json') self._load_tutorial_from_path(path)
def set_proxy(): proxy = os.getenv('PROXY', '') if proxy: interface_ip_list = get_all_interface_ip() predefine_no_proxy_list = ".xip.io,172.30.0.0/16,172.17.0.0/16,%s" % socket.gethostname() proxy_user = quote_plus(os.getenv('PROXY_USER','')) if proxy_user: proxy_password = quote_plus(os.getenv('PROXY_PASSWORD','')) http_proxy_url = "http://%s:%s@%s" % (proxy_user, proxy_password, proxy) https_proxy_url = "https://%s:%s@%s" % (proxy_user, proxy_password, proxy) else: http_proxy_url = "http://%s" % proxy https_proxy_url = "https://%s" % proxy # openshift proxy setup if system(('sed -i -e "/^#HTTP_PROXY=*/cHTTP_PROXY=%s"' ' -e "/^#HTTPS_PROXY=*/cHTTPS_PROXY=%s"' ' -e "/^#NO_PROXY=*/cNO_PROXY=%s%s"' ' %s') % (http_proxy_url, http_proxy_url, interface_ip_list, predefine_no_proxy_list, OPENSHIFT_OPTION))[2]: return ("Permisison denined: %s" % OPENSHIFT_OPTION) # docker daemon proxy setup if not os.path.isdir('/etc/systemd/system/docker.service.d'): subprocess.call("mkdir /etc/systemd/system/docker.service.d", shell=True) env_file_content = ('[Service]\n' 'Environment="HTTP_PROXY=%s" "NO_PROXY=localhost,127.0.0.1,::1,.xip.io"\n') \ % (http_proxy_url) try: with open('/etc/systemd/system/docker.service.d/http-proxy.conf', 'w') as fh: fh.write(env_file_content) subprocess.call('systemctl daemon-reload', shell=True) return subprocess.call('systemctl restart docker', shell=True) except IOError as err: return err
def listen(): app.logger.info('Locked and ready to go') login = os.getenv('LOGIN') password = os.getenv('PASSWORD') password = base64.b64decode(bytes(password.encode('utf-8'))) app.logger.info("Login: %s" %login) app.whatsapp.login(login, password)
def db_init(self): if os.path.isfile(os.getenv("HOME") + '/.qgis2/python/plugins/Cxf_in/account.ini'): with open(os.getenv("HOME") + '/.qgis2/python/plugins/Cxf_in/account.ini', "r") as lines: line = lines.read().splitlines() self.ui.user.setText(line[0]) self.ui.password.setText(line[1]) if os.path.isfile(os.getenv("HOME") + '/.qgis2/python/plugins/Cxf_in/db_conf.ini'): with open(os.getenv("HOME") + '/.qgis2/python/plugins/Cxf_in/db_conf.ini', "r") as lines: line = lines.read().splitlines() self.ui.server.setText(line[0]) self.ui.database.setText (line[1]) self.ui.user_2.setText (line[2]) self.ui.passwd.setText(line[3]) if line[4]=="0": self.ui.postgis.setChecked(False) self.ui.toolBox.setItemText(0, "Visualizzazione Fogli SpatiaLite") self.ui.toolBox.setItemText(3, "Carica CXF su DB SpatiaLite") self.ui.cxfspatialite.setText("Importa files CXF nel DB SpatiaLite") self.ui.dbreset.setText("Svuota intero DB SpatiaLite") else: self.ui.postgis.setChecked(True) self.ui.toolBox.setItemText(0, "Visualizzazione Fogli PostGis") self.ui.toolBox.setItemText(3, "Carica CXF su DB PostGis") self.ui.cxfspatialite.setText("Importa files CXF nel DB PostGis") self.ui.dbreset.setText("Svuota intero DB PostGis") lines.close()
def load(self): ids_filename = os.path.join(os.getenv("DATA_PATH"), "openml", "ids.pkl") if not os.path.exists(ids_filename): url = "http://www.openml.org/api_query/?{0}".format(urllib.urlencode({"q": self.query})) result = get_result_as_dict(url) f = open(ids_filename, "w") pickle.dump(result, f) else: result = pickle.load(open(ids_filename, "r")) ds_ids = [int(r[0]) for r in result["data"]] for ds_id in ds_ids: if self.verbose: print("retrieving {0}...".format(ds_id)) url_desc = "http://www.openml.org/d/{0}/json".format(ds_id) u = urllib.urlopen(url_desc) desc = json.load(u) u.close() if "arff" not in desc["url"]: if self.verbose: print("skipping {0}...".format(ds_id)) continue filename = os.path.join(os.getenv("DATA_PATH"), "openml", "{0}.arff".format(ds_id)) if not os.path.exists(filename): urllib.urlretrieve(desc["url"], filename)
def connect(self,cookie=None): if self.id=='cert': self.__http = httplib.HTTPSConnection(self.server, cert_file = os.getenv('X509_USER_PROXY'), key_file = os.getenv('X509_USER_PROXY')) elif self.id=='sso': if cookie: self.cookieFilename = cookie else: if '-dev' in self.server: self.cookieFilename = '%s/private/dev-cookie.txt'%(os.getenv('HOME')) elif '-int' in self.server: self.cookieFilename = '%s/private/int-cookie.txt'%(os.getenv('HOME')) else: self.cookieFilename = '%s/private/prod-cookie.txt'%(os.getenv('HOME')) if not os.path.isfile(self.cookieFilename): print "The required sso cookie file is absent. Trying to make one for you" os.system('cern-get-sso-cookie -u https://%s -o %s --krb'%( self.server, self.cookieFilename)) if not os.path.isfile(self.cookieFilename): print "The required sso cookie file cannot be made." sys.exit(1) self.curl = pycurl.Curl() print "Using sso-cookie file",self.cookieFilename self.curl.setopt(pycurl.COOKIEFILE,self.cookieFilename) self.output = cStringIO.StringIO() self.curl.setopt(pycurl.SSL_VERIFYPEER, 1) self.curl.setopt(pycurl.SSL_VERIFYHOST, 2) self.curl.setopt(pycurl.CAPATH, '/etc/pki/tls/certs') self.curl.setopt(pycurl.WRITEFUNCTION, self.output.write) else: self.__http = httplib.HTTPConnection(self.server)
def get_channel_urls(platform=None): if os.getenv('CIO_TEST'): base_urls = ['http://filer/pkgs/pro', 'http://filer/pkgs/free'] if os.getenv('CIO_TEST').strip() == '2': base_urls.insert(0, 'http://filer/test-pkgs') return normalize_urls(base_urls, platform=platform)
def test_setvar(self): """Test setvar function.""" self.mock_stdout(True) env.setvar('FOO', 'bar') txt = self.get_stdout() self.mock_stdout(False) self.assertEqual(os.getenv('FOO'), 'bar') self.assertEqual(os.environ['FOO'], 'bar') # no printing if dry run is not enabled self.assertEqual(txt, '') build_options = { 'extended_dry_run': True, 'silent': False, } init_config(build_options=build_options) self.mock_stdout(True) env.setvar('FOO', 'foobaz') txt = self.get_stdout() self.mock_stdout(False) self.assertEqual(os.getenv('FOO'), 'foobaz') self.assertEqual(os.environ['FOO'], 'foobaz') self.assertEqual(txt, " export FOO=\"foobaz\"\n") # disabling verbose self.mock_stdout(True) env.setvar('FOO', 'barfoo', verbose=False) txt = self.get_stdout() self.mock_stdout(False) self.assertEqual(os.getenv('FOO'), 'barfoo') self.assertEqual(os.environ['FOO'], 'barfoo') self.assertEqual(txt, '')
def db_view_change_style(): in_file =open(os.getenv("HOME")+'/.qgis2/python/plugins/Cxf_in/setting.sty', 'r') for line in in_file: if line[:4]=="att:": dirstili=line.split (":",2)[2].replace("\n","") if(os.path.exists(dirstili) ==False): dirstili= os.getenv("HOME")+'/.qgis2/python/plugins/Cxf_in/QML_Default' in_file.close() layers = iface.legendInterface().layers() for layer in layers: if layer.name() == "Particelle": layer.loadNamedStyle(dirstili+'/part.qml') elif layer.name() == "Fabbricati": layer.loadNamedStyle(dirstili+'/fab.qml') elif layer.name() == "Strade": layer.loadNamedStyle(dirstili+'/strade.qml') elif layer.name() == "Confine": layer.loadNamedStyle(dirstili+'/conf.qml') elif layer.name() == "Acque": layer.loadNamedStyle(dirstili+'/acque.qml') elif layer.name() == "Linee": layer.loadNamedStyle(dirstili+'/linee.qml') elif layer.name() == "Simboli": layer.loadNamedStyle(dirstili+'/Simboli.qml') elif layer.name() == "Testi": layer.loadNamedStyle(dirstili+'/testo.qml') elif layer.name() == "Fiduciali": layer.loadNamedStyle(dirstili+'/fidu.qml') iface.mapCanvas().refresh()
def test_context_properties(): """Test setting context properties""" a = default_app a.use() if a.backend_name.lower() == 'pyglet': return # cannot set more than once on Pyglet # stereo, double buffer won't work on every sys contexts = [dict(samples=4), dict(stencil_size=8), dict(samples=4, stencil_size=8)] if a.backend_name.lower() != 'glfw': # glfw *always* double-buffers contexts.append(dict(double_buffer=False, samples=4)) contexts.append(dict(double_buffer=False)) else: assert_raises(RuntimeError, Canvas, app=a, context=dict(double_buffer=False)) if a.backend_name.lower() == 'sdl2' and os.getenv('TRAVIS') == 'true': raise SkipTest('Travis SDL cannot set context') for context in contexts: n_items = len(context) with Canvas(context=context): if os.getenv('TRAVIS', 'false') == 'true': # Travis cannot handle obtaining these values props = context else: props = get_gl_configuration() assert_equal(len(context), n_items) for key, val in context.items(): assert_equal(val, props[key], key) assert_raises(TypeError, Canvas, context='foo') assert_raises(KeyError, Canvas, context=dict(foo=True)) assert_raises(TypeError, Canvas, context=dict(double_buffer='foo'))
def do_edit(given_cl, current_cl, cl_file_path): if given_cl.is_unspecified(): # Show an editor if CL not specified on the command-line tmp_fd, tmp_path = tempfile.mkstemp(prefix='appspot-', suffix='.txt') os.write(tmp_fd, editable_change(current_cl)) os.close(tmp_fd) retcode = subprocess.call( '%s %s' % (os.getenv('VISUAL', os.getenv('EDITOR', 'vi')), commands.mkarg(tmp_path)), shell=True) try: if retcode < 0: raise Exception('editor closed with signal %s' % -retcode) elif retcode > 0: raise Exception('editor exited with error value %s' % retcode) edited_cl = parse_change(open(tmp_path).read()) finally: os.remove(tmp_path) if edited_cl.is_unspecified(): print >>sys.stderr, 'cancelled edit' return edited_cl.merge_into(current_cl) else: given_cl.merge_into(current_cl) out = open(cl_file_path, 'w') out.write(editable_change(current_cl)) out.close()
def loadEnvironment(theEnvironment): theSVNVersion=get_svn_codec_version() theCodecVersion=get_codec_version() client = ClientEnvironment(theEnvironment) host = Network.kLocalHost port = Network.kDefaultPort hostString = os.getenv("RLGLUE_HOST") portString = os.getenv("RLGLUE_PORT") if (hostString != None): host = hostString try: port = int(portString) except TypeError: port = Network.kDefaultPort print("RL-Glue Python Environment Codec Version: "+theCodecVersion+" (Build "+theSVNVersion+")") print("\tConnecting to " + host + " on port " + str(port) + "...") sys.stdout.flush() client.connect(host, port, Network.kRetryTimeout) print("\t Environment Codec Connected") client.runEnvironmentEventLoop() client.close()
def config_files_from_theanorc(): rval = [os.path.expanduser(s) for s in os.getenv('THEANORC', '~/.theanorc').split(os.pathsep)] if os.getenv('THEANORC') is None and sys.platform == "win32": # to don't need to change the filename and make it open easily rval.append(os.path.expanduser('~/.theanorc.txt')) return rval
def __init__(self): # http://standards.freedesktop.org/basedir-spec/latest/ar01s03.html self.app_dir = join(getenv('XDG_DATA_HOME', expanduser('~/.local/share')), appname) if not isdir(self.app_dir): makedirs(self.app_dir) self.plugin_dir = join(self.app_dir, 'plugins') if not isdir(self.plugin_dir): mkdir(self.plugin_dir) self.home = expanduser('~') self.respath = dirname(__file__) self.filename = join(getenv('XDG_CONFIG_HOME', expanduser('~/.config')), appname, '%s.ini' % appname) if not isdir(dirname(self.filename)): makedirs(dirname(self.filename)) self.config = RawConfigParser() try: self.config.readfp(codecs.open(self.filename, 'r', 'utf-8')) except: self.config.add_section('config') if not self.get('outdir') or not isdir(self.get('outdir')): self.set('outdir', expanduser('~'))
def test_ampliconnoise_install(self): """ AmpliconNoise install looks sane.""" url = "http://qiime.org/install/install.html#ampliconnoise-install-notes" pyro_lookup_file = getenv('PYRO_LOOKUP_FILE') self.assertTrue(pyro_lookup_file is not None, "$PYRO_LOOKUP_FILE variable is not set. See %s for help." % url) self.assertTrue(exists(pyro_lookup_file), "$PYRO_LOOKUP_FILE variable is not set to an existing filepath.") seq_lookup_file = getenv('SEQ_LOOKUP_FILE') self.assertTrue(seq_lookup_file is not None, "$SEQ_LOOKUP_FILE variable is not set. See %s for help." % url) self.assertTrue(exists(seq_lookup_file), "$SEQ_LOOKUP_FILE variable is not set to an existing filepath.") self.assertTrue(which("SplitKeys.pl"), "Couldn't find SplitKeys.pl. " + "Perhaps AmpliconNoise Scripts directory isn't in $PATH?" + " See %s for help." % url) self.assertTrue(which("FCluster"), "Couldn't find FCluster. " + "Perhaps the AmpliconNoise bin directory isn't in $PATH?" + " See %s for help." % url) self.assertTrue(which("Perseus"), "Couldn't find Perseus. " + "Perhaps the AmpliconNoise bin directory isn't in $PATH?" + " See %s for help." % url)
def add_parser_arguments(cls, add): add("server", default=os.getenv('DA_SERVER'), help="DirectAdmin server (can include port, standard 2222, include http:// if the DA server does not support SSL)") add("username", default=os.getenv('DA_USERNAME'), help="DirectAdmin username") add("login-key", default=os.getenv('DA_LOGIN_KEY'), help="DirectAdmin login key")
""" import os from dotenv import find_dotenv, load_dotenv from django.core.management.utils import get_random_secret_key load_dotenv(find_dotenv()) # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = os.getenv('SECRET_KEY') if SECRET_KEY is None: SECRET_KEY = get_random_secret_key() with open('.env', 'a+') as envfile: envfile.write(f'SECRET_KEY="{SECRET_KEY}"\n') # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True # testserver for view pytest ALLOWED_HOSTS = ['0.0.0.0', 'localhost', '127.0.0.1', 'testserver'] # Application definition INSTALLED_APPS = [ # pre_installed 'django.contrib.admin',
def mock_asynchttpclient(request): """mock AsyncHTTPClient for recording responses""" AsyncHTTPClient.configure(MockAsyncHTTPClient) if not os.getenv('GITHUB_ACCESS_TOKEN'): load_mock_responses('api.github.com')
import os import json import requests from flask import Flask, session, render_template, request, redirect, url_for from flask import jsonify, make_response from flask_session import Session from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker from werkzeug.security import generate_password_hash, check_password_hash app = Flask(__name__) # Check for environment variable if not os.getenv("DATABASE_URL"): raise RuntimeError("DATABASE_URL is not set") if not os.getenv("SECRET_KEY"): raise RuntimeError("SECRET_KEY is not set") if not os.getenv("GOODREADS_KEY"): raise RuntimeError("GOODREADS_KEY is not set") goodreads_key = os.getenv("GOODREADS_KEY") # Configure session to use filesystem app.config["SESSION_PERMANENT"] = False app.config["SESSION_TYPE"] = "filesystem" Session(app) # Set up database engine = create_engine(os.getenv("DATABASE_URL"))
from flask import Flask from flask_pymongo import PyMongo from pymongo import MongoClient import os app = Flask(__name__) app.config["MONGO_URI"] = os.getenv("MONGO_URI") or "mongodb://*****:*****@app.route("/") def connect_mongo(): out = f"Connection: {str(mongo.cx)}," for obj in col.find(): out += f" one object from collection: {obj}" return out i = 42 @app.route("/test") def test(): global i db.SomeCollection.insert_one({"name": f"Anton{i}"}) i += 1
from flask import Flask,render_template,request,session, flash, escape, request, redirect, url_for from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker from models import * from os import urandom import re import os from datetime import datetime app = Flask(__name__) app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv('DATABASE_URL') app.config["SQLALCHEMY_TRACK_MODIFICATIONS"]= False db.init_app(app) app.secret_key = urandom(24) @app.route("/") def index(): users = User.query.all() if 'user_id' in session : for user in users: if session['user_id'] == user.user_id : return render_template("success.html", message="Hello", name = user.user_name) else : return render_template("index.html",users=users, message="") @app.route("/login", methods=["POST"]) def login(): """ Login to Form """ if request.method == 'POST' : #Get form information.
def __init__(self, client): threading.Thread.__init__ (self) self.client = client self.dbh = pymonetdb.connect(port=int(os.getenv('MAPIPORT')),hostname=os.getenv('MAPIHOST'),database=os.getenv('TSTDB'))
FILE = Path(__file__).resolve() ROOT = FILE.parents[3] # YOLOv5 root directory if str(ROOT) not in sys.path: sys.path.append(str(ROOT)) # add ROOT to PATH from utils.datasets import LoadImagesAndLabels, img2label_paths from utils.general import LOGGER, check_dataset, check_file try: import wandb assert hasattr(wandb, '__version__') # verify package import not local dir except (ImportError, AssertionError): wandb = None RANK = int(os.getenv('RANK', -1)) WANDB_ARTIFACT_PREFIX = 'wandb-artifact://' def remove_prefix(from_string, prefix=WANDB_ARTIFACT_PREFIX): return from_string[len(prefix):] def check_wandb_config_file(data_config_file): wandb_config = '_wandb.'.join(data_config_file.rsplit('.', 1)) # updated data.yaml path if Path(wandb_config).is_file(): return wandb_config return data_config_file def check_wandb_dataset(data_file):
msgstr = [line[7:]] elif line.startswith('"'): if in_msgid: msgid.append(line) if in_msgstr: msgstr.append(line) if in_msgstr: messages.append((msgid, msgstr)) return messages files = sys.argv[1:] # xgettext -n --keyword=_ $FILES XGETTEXT=os.getenv('XGETTEXT', 'xgettext') if not XGETTEXT: print('Cannot extract strings: xgettext utility is not installed or not configured.',file=sys.stderr) print('Please install package "gettext" and re-run \'./configure\'.',file=sys.stderr) exit(1) child = Popen([XGETTEXT,'--output=-','-n','--keyword=_'] + files, stdout=PIPE) (out, err) = child.communicate() messages = parse_po(out.decode('utf-8')) f = open(OUT_CPP, 'w') f.write(""" #include <QtGlobal> // Automatically generated by extract_strings.py
from iter8_analytics.api.analytics.iter8response import CheckAndIncrementResponse, EpsilonTGreedyResponse, PosteriorBayesianRoutingResponse, OptimisticBayesianRoutingResponse from iter8_analytics.api.analytics.iter8experiment import CheckAndIncrementExperiment, EpsilonTGreedyExperiment, BayesianRoutingExperiment import iter8_analytics.constants as constants import flask_restplus from flask import request, current_app from datetime import datetime, timezone, timedelta import dateutil.parser as parser import json import os import logging import copy log = logging.getLogger(__name__) DataCapture.data_capture_mode = os.getenv( constants.ITER8_DATA_CAPTURE_MODE_ENV) analytics_namespace = api.namespace( 'analytics', description='Operations to support canary releases and A/B tests') experiment_namespace = api.namespace( 'experiment', description='Operations to support canary releases and A/B tests') ################# # REST API ################# @experiment_namespace.route('/algorithms') class Algorithms(flask_restplus.Resource):
import os from dataclasses import dataclass from typing import List from dataclasses_jsonschema import JsonSchemaMixin from arcor2 import package_version CALIBRATION_URL = os.getenv("ARCOR2_CALIBRATION_URL", "http://localhost:5014") SERVICE_NAME = "ARCOR2 Calibration Service" def version() -> str: return package_version(__name__) @dataclass class Corner(JsonSchemaMixin): x: float y: float @dataclass class MarkerCorners(JsonSchemaMixin): marker_id: int corners: List[Corner]
"""This module defines functions for handling files and paths.""" import os import sys from os import sep as pathsep from glob import glob as pyglob import pickle as pypickle import zipfile import platform import os.path from os.path import isfile, isdir, join, split, splitext from os.path import getsize, isabs, exists, abspath from shutil import copy, Error as shError PLATFORM = platform.system() USERHOME = os.getenv('USERPROFILE') or os.getenv('HOME') __all__ = [ 'gunzip', 'backupFile', 'openFile', 'openDB', 'openSQLite', 'openURL', 'copyFile', 'isExecutable', 'isReadable', 'isWritable', 'makePath', 'relpath', 'sympath', 'which', 'pickle', 'unpickle', 'glob', 'addext', 'PLATFORM', 'USERHOME' ] major, minor = sys.version_info[:2] if major > 2: import gzip from gzip import GzipFile import io class TextIOWrapper(io.TextIOWrapper):
def ConfigSectionMap(section): dict1 = {} options = Config.options(section) for option in options: try: dict1[option] = Config.get(section, option) if dict1[option] == -1: DebugPrint("skip: %s" % option) except: print("exception on %s!" % option) dict1[option] = None return dict1 # set default vars defconf = os.getenv("HOME") + "/.zbx.conf" username = "" password = "" api = "" noverify = "" # Define commandline arguments parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,description='Tries to find a list of hosts in Zabbix matching a search string.', epilog=""" This program can use .ini style configuration files to retrieve the needed API connection information. To use this type of storage, create a conf file (the default is $HOME/.zbx.conf) that contains at least the [Zabbix API] section and any of the other parameters: [Zabbix API] username=johndoe password=verysecretpassword api=https://zabbix.mycompany.com/path/to/zabbix/frontend/ no_verify=true
build_options = { 'TEST':1, 'RELEASE':'false', } call_scons(build_options, extra_option_str) build_options = { 'TEST':1, 'SECURED':1, 'RELEASE':'false', } call_scons(build_options, extra_option_str) print ("*********** Unit test Stop *************") # Main module starts here if os.getenv("SCONSFLAGS", "") == "": os.environ["SCONSFLAGS"] = "-Q -j " + str(multiprocessing.cpu_count()) arg_num = len(sys.argv) script_name = sys.argv[0] # May be overridden in user's shell VERBOSE = os.getenv("VERBOSE", "1") if arg_num == 1: build_all("true", "") build_all("false", "") unit_tests() elif arg_num == 2: if str(sys.argv[1]) == '-c':
def configure(env): env.use_ptrcall = True env.add_module_version_string("mono") envvars = Variables() envvars.Add(BoolVariable('mono_static', 'Statically link mono', False)) envvars.Update(env) bits = env['bits'] mono_static = env['mono_static'] mono_lib_names = ['mono-2.0-sgen', 'monosgen-2.0'] if env['platform'] == 'windows': if mono_static: raise RuntimeError('mono-static: Not supported on Windows') if bits == '32': if os.getenv('MONO32_PREFIX'): mono_root = os.getenv('MONO32_PREFIX') elif os.name == 'nt': mono_root = monoreg.find_mono_root_dir(bits) else: if os.getenv('MONO64_PREFIX'): mono_root = os.getenv('MONO64_PREFIX') elif os.name == 'nt': mono_root = monoreg.find_mono_root_dir(bits) if not mono_root: raise RuntimeError('Mono installation directory not found') mono_lib_path = os.path.join(mono_root, 'lib') env.Append(LIBPATH=mono_lib_path) env.Append(CPPPATH=os.path.join(mono_root, 'include', 'mono-2.0')) mono_lib_name = find_file_in_dir(mono_lib_path, mono_lib_names, extension='.lib') if not mono_lib_name: raise RuntimeError('Could not find mono library in: ' + mono_lib_path) if os.getenv('VCINSTALLDIR'): env.Append(LINKFLAGS=mono_lib_name + Environment()['LIBSUFFIX']) else: env.Append(LIBS=mono_lib_name) mono_bin_path = os.path.join(mono_root, 'bin') mono_dll_name = find_file_in_dir(mono_bin_path, mono_lib_names, extension='.dll') if not mono_dll_name: raise RuntimeError('Could not find mono shared library in: ' + mono_bin_path) copy_file_no_replace(mono_bin_path, 'bin', mono_dll_name + '.dll') else: sharedlib_ext = '.dylib' if sys.platform == 'darwin' else '.so' mono_root = '' if bits == '32': if os.getenv('MONO32_PREFIX'): mono_root = os.getenv('MONO32_PREFIX') else: if os.getenv('MONO64_PREFIX'): mono_root = os.getenv('MONO64_PREFIX') if mono_root: mono_lib_path = os.path.join(mono_root, 'lib') env.Append(LIBPATH=mono_lib_path) env.Append(CPPPATH=os.path.join(mono_root, 'include', 'mono-2.0')) mono_lib = find_file_in_dir(mono_lib_path, mono_lib_names, prefix='lib', extension='.a') if not mono_lib: raise RuntimeError('Could not find mono library in: ' + mono_lib_path) env.Append(CPPFLAGS=['-D_REENTRANT']) if mono_static: mono_lib_file = os.path.join(mono_lib_path, 'lib' + mono_lib + '.a') if sys.platform == "darwin": env.Append(LINKFLAGS=['-Wl,-force_load,' + mono_lib_file]) elif sys.platform == "linux" or sys.platform == "linux2": env.Append(LINKFLAGS=['-Wl,-whole-archive', mono_lib_file, '-Wl,-no-whole-archive']) else: raise RuntimeError('mono-static: Not supported on this platform') else: env.Append(LIBS=[mono_lib]) if sys.platform == "darwin": env.Append(LIBS=['iconv', 'pthread']) elif sys.platform == "linux" or sys.platform == "linux2": env.Append(LIBS=['m', 'rt', 'dl', 'pthread']) if not mono_static: mono_so_name = find_file_in_dir(mono_lib_path, mono_lib_names, prefix='lib', extension=sharedlib_ext) if not mono_so_name: raise RuntimeError('Could not find mono shared library in: ' + mono_lib_path) copy_file_no_replace(mono_lib_path, 'bin', 'lib' + mono_so_name + sharedlib_ext) else: if mono_static: raise RuntimeError('mono-static: Not supported with pkg-config. Specify a mono prefix manually') env.ParseConfig('pkg-config monosgen-2 --cflags --libs') mono_lib_path = '' mono_so_name = '' tmpenv = Environment() tmpenv.AppendENVPath('PKG_CONFIG_PATH', os.getenv('PKG_CONFIG_PATH')) tmpenv.ParseConfig('pkg-config monosgen-2 --libs-only-L') for hint_dir in tmpenv['LIBPATH']: name_found = find_file_in_dir(hint_dir, mono_lib_names, prefix='lib', extension=sharedlib_ext) if name_found: mono_lib_path = hint_dir mono_so_name = name_found break if not mono_so_name: raise RuntimeError('Could not find mono shared library in: ' + str(tmpenv['LIBPATH'])) copy_file_no_replace(mono_lib_path, 'bin', 'lib' + mono_so_name + sharedlib_ext) env.Append(LINKFLAGS='-rdynamic')
import os from setuptools import setup version = os.getenv('VERSION', '1.10.1') setup( name='tensorflow-autodetect', version=version, url='https://github.com/commaai/tensorflow-autodetect', author='comma.ai', author_email='', license='MIT', long_description= 'Auto-detect tensorflow or tensorflow-gpu package based on nvidia driver being installed', keywords='tensorflow tensorflow-gpu', install_requires=[ ('tensorflow-gpu' if os.path.exists('/proc/driver/nvidia/version') else 'tensorflow') + '==' + version, ], classifiers=[ 'Natural Language :: English', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', ], )
from dotenv import load_dotenv load_dotenv() from telegram import InlineKeyboardButton, InlineKeyboardMarkup from telegram.ext import CallbackContext, CommandHandler, CallbackQueryHandler from telegram.update import Update import os # START VIEW CONDITIONAL_CHANNEL_URL = os.getenv('CONDITIONAL_CHANNEL_URL') CONDITIONAL_CHANNEL_ID = '-100' + str(os.getenv('CONDITIONAL_CHANNEL_ID')) PRIVATE_CHANNEL_URL = os.getenv('PRIVATE_CHANNEL_URL') START_BUTTONS = [[InlineKeyboardButton("Подписаться", url=CONDITIONAL_CHANNEL_URL)], [InlineKeyboardButton("Проверить подписку", callback_data="validate")]] START_MARKUP = InlineKeyboardMarkup(START_BUTTONS) def start(update: Update, context: CallbackContext): update.message.reply_text("Подпишись, чтобы посмотреть продолжение!", reply_markup=START_MARKUP) # VALIDATE VIEW VALIDATE_BUTTONS = [[InlineKeyboardButton("В приватный канал", url=PRIVATE_CHANNEL_URL)]] VALIDATE_MARKUP = InlineKeyboardMarkup(VALIDATE_BUTTONS) def validate(update: Update, context: CallbackContext): update = update.callback_query
def main(): global DEBUG try: depsonly = bool(sys.argv[2] in ['true', 1]) except IndexError: depsonly = False if os.getenv('ROOMSERVICE_DEBUG'): DEBUG = True product = sys.argv[1] device = product[product.find("_") + 1:] or product if depsonly: repo_path = get_from_manifest(device) if repo_path: fetch_dependencies(repo_path) else: print("Trying dependencies-only mode on a" "non-existing device tree?") sys.exit() print("Device {0} not found. Attempting to retrieve device repository from " "{1} Github (http://github.com/{1}).".format(device, org_display)) githubreq = urllib.request.Request( "https://api.github.com/search/repositories?" "q={0}+user:{1}+in:name+fork:true".format(device, org_display)) add_auth(githubreq) repositories = [] try: result = json.loads(urllib.request.urlopen(githubreq).read().decode()) except urllib.error.URLError: print("Failed to search GitHub") sys.exit() except ValueError: print("Failed to parse return data from GitHub") sys.exit() for res in result.get('items', []): repositories.append(res) for repository in repositories: repo_name = repository['name'] if not (repo_name.startswith("device_") and repo_name.endswith("_" + device)): continue print("Found repository: %s" % repository['name']) fallback_branch = detect_revissquid(repository) manufacturer = repo_name[7:-(len(device)+1)] repo_path = "device/%s/%s" % (manufacturer, device) adding = [{'repository': repo_name, 'target_path': repo_path}] add_to_manifest(adding, fallback_branch) print("Syncing repository to retrieve project.") os.system('repo sync --force-sync --no-tags --current-branch --no-clone-bundle %s' % repo_path) print("Repository synced!") fetch_dependencies(repo_path, fallback_branch) print("Done") sys.exit() print("Repository for %s not found in the %s Github repository list." % (device, org_display)) print("If this is in error, you may need to manually add it to your " "%s" % custom_local_manifest)
import os import json import pymysql import unittest from Bio import SeqIO from BioSQL import BioSeqDatabase from seqann.util import get_features from seqann.models.annotation import Annotation from seqann.models.reference_data import ReferenceData neo4jpass = '******' if os.getenv("NEO4JPASS"): neo4jpass = os.getenv("NEO4JPASS") neo4juser = '******' if os.getenv("NEO4JUSER"): neo4juser = os.getenv("NEO4JUSER") neo4jurl = "http://neo4j.b12x.org:80" if os.getenv("NEO4JURL"): neo4jurl = os.getenv("NEO4JURL") biosqlpass = "******" if os.getenv("BIOSQLPASS"): biosqlpass = os.getenv("BIOSQLPASS") biosqluser = '******'
from time import clock, time import json which = int(argv[1]) submit_id = int(argv[2]) sname = argv[0] argv = [] import ROOT as root from PandaCore.Tools.Misc import * from PandaCore.Tools.Load import * import PandaCore.Tools.job_management as cb import PandaAnalysis.Tagging.cfg_v8 as tagcfg Load('PandaAnalyzer') data_dir = getenv('CMSSW_BASE') + '/src/PandaAnalysis/data/' stopwatch = clock() def print_time(label): global stopwatch now_ = clock() PDebug( sname + '.print_time:' + str(time()), '%.3f s elapsed performing "%s"' % ((now_ - stopwatch) / 1000., label)) stopwatch = now_ def copy_local(long_name): replacements = {
import os, requests from flask import Flask, session, render_template, request, redirect, url_for from flask_session import Session from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker app = Flask(__name__) # Check for environment variable if not os.getenv("DATABASE_URL"): raise RuntimeError("DATABASE_URL is not set") # Configure session to use filesystem app.config["SESSION_PERMANENT"] = False app.config["SESSION_TYPE"] = "filesystem" Session(app) # Set up database engine = create_engine(os.getenv("DATABASE_URL")) db = scoped_session(sessionmaker(bind=engine)) KEY = "ryVZpwrtKAmsAjJQpwlduA" reviews = [] @app.route("/") def index(): if 'user' in session: return redirect(url_for('search'))
def _get_linux_console_width() -> int: # Don't run tput if TERM is not defined, to prevent terminal-related errors. if os.getenv('TERM'): return int(subprocess.check_output(['tput', 'cols'])) return 0
def __init__(self, site_name=None, requestor_class=None, requestor_kwargs=None, **config_settings): """Initialize a Reddit instance. :param site_name: The name of a section in your ``praw.ini`` file from which to load settings from. This parameter, in tandem with an appropriately configured ``praw.ini``, file is useful if you wish to easily save credentials for different applications, or communicate with other servers running reddit. If ``site_name`` is ``None``, then the site name will be looked for in the environment variable praw_site. If it is not found there, the DEFAULT site will be used. :param requestor_class: A class that will be used to create a requestor. If not set, use ``prawcore.Requestor`` (default: None). :param requestor_kwargs: Dictionary with additional keyword arguments used to initialize the requestor (default: None). Additional keyword arguments will be used to initialize the :class`.Config` object. This can be used to specify configuration settings during instantiation of the :class:`.Reddit` instance. For more details please see :ref:`configuration`. Required settings are: * client_id * client_secret (for installed applications set this value to ``None``) * user_agent The ``requestor_class`` and ``requestor_kwargs`` allow for customization of the requestor :class`.Reddit` will use. This allows, e.g., easily adding behavior to the requestor or wrapping its :class`Session` in a caching layer. Example usage: .. code-block:: python import json, betamax, requests class JSONDebugRequestor(Requestor): def request(self, *args, **kwargs): response = super().request(*args, **kwargs) print(json.dumps(response.json(), indent=4)) return response my_session = betamax.Betamax(requests.Session()) reddit = Reddit(..., requestor_class=JSONDebugRequestor, requestor_kwargs={'session': my_session}) """ self._core = self._authorized_core = self._read_only_core = None self._objector = None self._unique_counter = 0 try: config_section = site_name or os.getenv("praw_site") or "DEFAULT" self.config = Config(config_section, **config_settings) except configparser.NoSectionError as exc: help_message = ("You provided the name of a praw.ini " "configuration which does not exist.\n\nFor help " "with creating a Reddit instance, visit\n" "https://praw.readthedocs.io/en/latest/code_overvi" "ew/reddit_instance.html\n\n" "For help on configuring PRAW, visit\n" "https://praw.readthedocs.io/en/latest/getting_sta" "rted/configuration.html") if site_name is not None: exc.message += "\n" + help_message raise required_message = ("Required configuration setting {!r} missing. \n" "This setting can be provided in a praw.ini file, " "as a keyword argument to the `Reddit` class " "constructor, or as an environment variable.") for attribute in ("client_id", "user_agent"): if getattr(self.config, attribute) in ( self.config.CONFIG_NOT_SET, None, ): raise ClientException(required_message.format(attribute)) if self.config.client_secret is self.config.CONFIG_NOT_SET: raise ClientException( required_message.format("client_secret") + "\nFor installed applications this value " "must be set to None via a keyword argument " "to the `Reddit` class constructor.") self._check_for_update() self._prepare_objector() self._prepare_prawcore(requestor_class, requestor_kwargs) self.auth = models.Auth(self, None) """An instance of :class:`.Auth`. Provides the interface for interacting with installed and web applications. See :ref:`auth_url` """ self.front = models.Front(self) """An instance of :class:`.Front`. Provides the interface for interacting with front page listings. For example: .. code-block:: python for submission in reddit.front.hot(): print(submission) """ self.inbox = models.Inbox(self, None) """An instance of :class:`.Inbox`. Provides the interface to a user's inbox which produces :class:`.Message`, :class:`.Comment`, and :class:`.Submission` instances. For example to iterate through comments which mention the authorized user run: .. code-block:: python for comment in reddit.inbox.mentions(): print(comment) """ self.live = models.LiveHelper(self, None) """An instance of :class:`.LiveHelper`. Provides the interface for working with :class:`.LiveThread` instances. At present only new LiveThreads can be created. .. code-block:: python reddit.live.create('title', 'description') """ self.multireddit = models.MultiredditHelper(self, None) """An instance of :class:`.MultiredditHelper`. Provides the interface to working with :class:`.Multireddit` instances. For example you can obtain a :class:`.Multireddit` instance via: .. code-block:: python reddit.multireddit('samuraisam', 'programming') """ self.redditors = models.Redditors(self, None) """An instance of :class:`.Redditors`. Provides the interface for Redditor discovery. For example to iterate over the newest Redditors, run: .. code-block:: python for redditor in reddit.redditors.new(limit=None): print(redditor) """ self.subreddit = models.SubredditHelper(self, None) """An instance of :class:`.SubredditHelper`. Provides the interface to working with :class:`.Subreddit` instances. For example to create a Subreddit run: .. code-block:: python reddit.subreddit.create('coolnewsubname') To obtain a lazy a :class:`.Subreddit` instance run: .. code-block:: python reddit.subreddit('redditdev') Note that multiple subreddits can be combined and filtered views of /r/all can also be used just like a subreddit: .. code-block:: python reddit.subreddit('redditdev+learnpython+botwatch') reddit.subreddit('all-redditdev-learnpython') """ self.subreddits = models.Subreddits(self, None) """An instance of :class:`.Subreddits`. Provides the interface for :class:`.Subreddit` discovery. For example to iterate over the set of default subreddits run: .. code-block:: python for subreddit in reddit.subreddits.default(limit=None): print(subreddit) """ self.user = models.User(self) """An instance of :class:`.User`.
flags.DEFINE_string("output_dir", "", "Base output directory for run.") flags.DEFINE_string("schedule", "continuous_train_and_eval", "Method of Experiment to run.") flags.DEFINE_integer("eval_steps", 100, "Number of steps in evaluation. By default, eval will " "stop after eval_steps or when it runs through the eval " "dataset once in full, whichever comes first, so this " "can be a very large number.") except: # pylint: disable=bare-except pass flags.DEFINE_string("std_server_protocol", "grpc", "Protocol for tf.train.Server.") # Google Cloud TPUs flags.DEFINE_string("cloud_tpu_name", "%s-tpu" % os.getenv("USER"), "Name of Cloud TPU instance to use or create.") # Google Cloud ML Engine flags.DEFINE_bool("cloud_mlengine", False, "Whether to launch on Cloud ML Engine.") flags.DEFINE_string("cloud_mlengine_master_type", None, "Machine type for master on Cloud ML Engine. " "If provided, overrides default selections based on " "--worker_gpu. User is responsible for ensuring " "type is valid and that --worker_gpu matches number of " "GPUs on machine type. See documentation: " "https://cloud.google.com/ml-engine/reference/rest/v1/" "projects.jobs#traininginput") # Hyperparameter tuning on Cloud ML Engine # Pass an --hparams_range to enable
def template(self, cmd_list, expected_output_file="result.html", success=True, script=file_fib, script_name="cmdline_test.py", expected_entries=None, expected_stdout=None, cleanup=True, check_func=None, concurrency=None, send_term=False): if os.getenv("COVERAGE_RUN"): if "viztracer" in cmd_list: idx = cmd_list.index("viztracer") if not concurrency: cmd_list = [ "coverage", "run", "--parallel-mode", "--pylib", "-m" ] + cmd_list[idx:] elif concurrency == "multiprocessing": cmd_list = [ "coverage", "run", "--concurrency=multiprocessing", "-m" ] + cmd_list[idx:] elif "vizviewer" in cmd_list: idx = cmd_list.index("vizviewer") cmd_list = [ "coverage", "run", "--parallel-mode", "--pylib", "-m" ] + ["viztracer.viewer"] + cmd_list[idx + 1:] elif "python" in cmd_list: idx = cmd_list.index("python") cmd_list = ["coverage", "run", "--parallel-mode", "--pylib" ] + cmd_list[idx + 1:] if script: self.build_script(script, script_name) if send_term: p = subprocess.Popen(cmd_list) time.sleep(2) p.terminate() p.wait() return None result = subprocess.run(cmd_list, stdout=subprocess.PIPE, timeout=30) if not (success ^ (result.returncode != 0)): print(success, result.returncode) print(result.stdout) self.assertTrue(success ^ (result.returncode != 0)) if success: if expected_output_file: if type(expected_output_file) is list: for f in expected_output_file: self.assertTrue(os.path.exists(f)) elif type(expected_output_file) is str: self.assertTrue(os.path.exists(expected_output_file)) if expected_entries: assert (type(expected_output_file) is str and expected_output_file.split(".")[-1] == "json") with open(expected_output_file) as f: data = json.load(f) self.assertEventNumber(data, expected_entries) if expected_stdout: self.assertRegex(result.stdout.decode("utf-8"), expected_stdout) if check_func: assert (type(expected_output_file) is str and expected_output_file.split(".")[-1] == "json") with open(expected_output_file) as f: data = json.load(f) check_func(data) if cleanup: self.cleanup(output_file=expected_output_file, script_name=script_name) return result
if 'os locale' in daconfig and daconfig['os locale'] is not None: print('export LOCALE="' + str(daconfig['os locale']) + '"') print('export DAPYTHONVERSION="3"') if 'web server' in daconfig and isinstance(daconfig['web server'], str): print('export DAWEBSERVER="' + daconfig['web server'] + '"') else: print('export DAWEBSERVER="nginx"') if 'other os locales' in daconfig and type(daconfig['other os locales']) is list: print('declare -a OTHERLOCALES') print('export OTHERLOCALES') indexno = 0 for locale in daconfig['other os locales']: print('OTHERLOCALES[' + str(indexno) + ']=' + repr(str(locale))) indexno += 1 else: other_locales_variable = os.getenv('OTHERLOCALES', None) if other_locales_variable is not None and other_locales_variable != 'null': print('declare -a OTHERLOCALES') print('export OTHERLOCALES') indexno = 0 for locale in map(lambda x: x.strip(), separator.split(other_locales_variable)): print('OTHERLOCALES[' + str(indexno) + ']=' + repr(str(locale))) indexno += 1 max_content_length = daconfig.get('maximum content length', 16 * 1024 * 1024) if isinstance(max_content_length, (int, type(None))): if max_content_length is None or max_content_length <= 0: print('DAMAXCONTENTLENGTH=0') else: print('DAMAXCONTENTLENGTH=' + str(max_content_length)) else: print('DAMAXCONTENTLENGTH=' + str(16 * 1024 * 1024))
def wrapper(text): # Color function content will be wrapped, and the rest of the text color will be normal. wrapped_text = getattr(self, item.upper()) + text # No need to duplicate normal color suffix. if not wrapped_text.endswith(self.NORMAL): wrapped_text += self.NORMAL return wrapped_text return wrapper return super().__getattribute__(item) _printer = None # Colors won't work on Linux if TERM is not defined. _colors = os.name == 'nt' or os.getenv('TERM') # If we're not inside IPython, use pyreadline's console. if os.name == 'nt' and sys.stdout == sys.__stdout__: try: assert __IPYTHON__ except NameError: try: from pyreadline.console.console import Console _printer = Printer(Console()) except ImportError: # If all failed, just print without colors. _colors = False
import logging import os from typing import Optional from flask import Flask, render_template from flask_socketio import SocketIO from flask_socketio import send from switchremoteplay.controller import SwitchController # Follow the example at https://flask-socketio.readthedocs.io/en/latest/ logger = logging.getLogger('switchremoteplay') app = Flask(__name__) app.config['SECRET_KEY'] = os.getenv('SECRET_KEY') or 'asdasdasdf' socketio = SocketIO(app, cors_allowed_origins='*') controller: Optional[SwitchController] = None @socketio.on('connect') def test_connect(): print("Connected") send({'connected': True}, json=True) @socketio.on('disconnect') def test_disconnect(): print("Disconnected")
nosetests -v --with-spec --spec-color coverage report -m codecov --token=$CODECOV_TOKEN """ import unittest import os import json import logging from flask_api import status # HTTP Status Codes from mock import MagicMock, patch from app.model import Product, ValidationError, db import app.service as service import time DATABASE_URI = os.getenv('DATABASE_URI', None) ###################################################################### # T E S T C A S E S ###################################################################### class TestProductServer(unittest.TestCase): """ Product Server Tests """ @classmethod def setUpClass(cls): """ Run once before all tests """ service.app.debug = False service.initialize_logging(logging.INFO) # Set up the test database if DATABASE_URI: service.app.config['SQLALCHEMY_DATABASE_URI'] = DATABASE_URI
''' import os import warnings try: from vstutils.environment import prepare_environment, cmd_execution except ImportError: warnings.warn('"vstutils" was not installed', ImportWarning) prepare_environment = lambda *args, **kwargs: () cmd_execution = prepare_environment default_settings = { # ansible specific environment variables "ANSIBLE_HOST_KEY_CHECKING": 'False', "ANSIBLE_FORCE_COLOR": "true", # celery specific "C_FORCE_ROOT": "true", # django settings module "DJANGO_SETTINGS_MODULE": os.getenv( "DJANGO_SETTINGS_MODULE", 'polemarch.main.settings' ), # VSTUTILS settings "VST_PROJECT": os.getenv("VST_PROJECT", 'polemarch'), "VST_ROOT_URLCONF": os.getenv("VST_ROOT_URLCONF", 'vstutils.urls'), } __version__ = "1.1.2" prepare_environment(**default_settings)
class BaseConfig(object): # SQLITE PROJECT_ROOT = PROJECT_ROOT PROJECT_DATABASE_PATH = PROJECT_DATABASE_PATH SUBMISSIONS_DATABASE_PATH = os.path.join(PROJECT_DATABASE_PATH, 'submissions') SUBMISSIONS_MIME_TYPE_WHITELIST = [ 'application/json', 'application/ld+json', 'application/msword', 'application/octet-stream', 'application/ogg', 'application/pdf', 'application/rtf', 'application/vnd.ms-excel', 'application/vnd.oasis.opendocument.spreadsheet', 'application/vnd.oasis.opendocument.text', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', 'application/xml', 'image/bmp', 'image/gif', 'image/jpeg', 'image/png', 'image/tiff', 'image/webp', 'text/csv', 'text/javascript', 'text/plain', 'text/xml', 'video/mpeg', 'video/ogg', 'video/webm', ] ASSET_DATABASE_PATH = os.path.join(PROJECT_DATABASE_PATH, 'assets') ASSET_ALLOWED_EXTS = [ '.jpg', '.jpe', '.jpeg', '.png', '.gif', '.svg', '.bmp', '.tif', '.tiff', ] SQLALCHEMY_DATABASE_PATH = os.path.join(PROJECT_DATABASE_PATH, 'database.sqlite3') SQLALCHEMY_DATABASE_URI = 'sqlite:///%s' % (SQLALCHEMY_DATABASE_PATH) DEBUG = False ERROR_404_HELP = False PREFERRED_URL_SCHEME = 'http' REVERSE_PROXY_SETUP = os.getenv('HOSTON_REVERSE_PROXY_SETUP', False) AUTHORIZATIONS = { 'oauth2_password': { 'type': 'oauth2', 'flow': 'password', 'scopes': {}, 'tokenUrl': '/api/v1/auth/tokens', }, } # fmt: off # THIS ORDERING IS VERY SPECIFIC AND INFLUENCES WHICH MODULES CAN DEPEND ON EACH OTHER ENABLED_MODULES = ( # Users # Dependencies: [NONE] 'users', # Organizations # Dependencies: Users # # Note: Organization defines a many-to-many relationship with User # and will import app.modules.organizations.models when the # User module and object are imported. Disabling the # 'organizations' modules will currently break the implementation # of the User model because it creates a broken backref 'organizations', # Authentication # Dependencies: Users 'auth', # Submissions # Dependencies: Users 'submissions', # Assets # Dependencies: Submissions 'assets', # Miscellaneous 'collaborations', 'notifications', 'encounters', 'projects', 'sightings', # Front-end # Dependencies: Users, Auth, Assets 'frontend', # REST APIs = API, Passthroughs, Configuration # Dependencies: Users, Auth 'api', 'passthroughs', 'configuration', ) # fmt: on STATIC_ROOT = os.path.join(PROJECT_ROOT, 'app', 'static') SWAGGER_UI_JSONEDITOR = True SWAGGER_UI_OAUTH_CLIENT_ID = 'documentation' SWAGGER_UI_OAUTH_REALM = 'Authentication for Houston server documentation' SWAGGER_UI_OAUTH_APP_NAME = 'Houston server documentation' SQLALCHEMY_TRACK_MODIFICATIONS = True CSRF_ENABLED = True PREMAILER_CACHE_MAXSIZE = 1024 MAX_CONTENT_LENGTH = 16 * 1024 * 1024 # Maximum size of 16MB PERMANENT_SESSION_LIFETIME = datetime.timedelta(days=7) SESSION_COOKIE_SECURE = False SESSION_COOKIE_HTTPONLY = False SESSION_REFRESH_EACH_REQUEST = True REMEMBER_COOKIE_DURATION = datetime.timedelta(days=14) REMEMBER_COOKIE_SECURE = True REMEMBER_COOKIE_HTTPONLY = True REMEMBER_COOKIE_REFRESH_EACH_REQUEST = True TIMEZONE = pytz.timezone('UTC')