Example #1
0
    def __init__(self, name, init_config, agentConfig, instances):
        self.ca_certs = init_config.get('ca_certs', get_ca_certs_path())
        proxy_settings = get_proxy(agentConfig)
        self.proxies = {
            "http": None,
            "https": None,
        }
        if proxy_settings:
            uri = "{host}:{port}".format(
                host=proxy_settings['host'],
                port=proxy_settings['port'])
            if proxy_settings['user'] and proxy_settings['password']:
                uri = "{user}:{password}@{uri}".format(
                    user=proxy_settings['user'],
                    password=proxy_settings['password'],
                    uri=uri)
            self.proxies['http'] = "http://{uri}".format(uri=uri)
            self.proxies['https'] = "https://{uri}".format(uri=uri)
        else:
            self.proxies['http'] = environ.get('HTTP_PROXY', None)
            self.proxies['https'] = environ.get('HTTPS_PROXY', None)

        self.proxies['no'] = environ.get('no_proxy',
                                         environ.get('NO_PROXY', None)
                                         )

        NetworkCheck.__init__(self, name, init_config, agentConfig, instances)
Example #2
0
def determine_base_flags():
    flags = {
        'libraries': [],
        'include_dirs': [],
        'extra_link_args': [],
        'extra_compile_args': []}
    if c_options['use_ios']:
        sysroot = environ.get('IOSSDKROOT', environ.get('SDKROOT'))
        if not sysroot:
            raise Exception('IOSSDKROOT is not set')
        flags['include_dirs'] += [sysroot]
        flags['extra_compile_args'] += ['-isysroot', sysroot]
        flags['extra_link_args'] += ['-isysroot', sysroot]
    elif platform == 'darwin':
        v = os.uname()
        if v[2] >= '13.0.0':
            # use xcode-select to search on the right Xcode path
            # XXX use the best SDK available instead of a specific one
            import platform as _platform
            xcode_dev = getoutput('xcode-select -p').splitlines()[0]
            sdk_mac_ver = '.'.join(_platform.mac_ver()[0].split('.')[:2])
            print('Xcode detected at {}, and using MacOSX{} sdk'.format(
                    xcode_dev, sdk_mac_ver))
            sysroot = join(xcode_dev.decode('utf-8'),
                    'Platforms/MacOSX.platform/Developer/SDKs',
                    'MacOSX{}.sdk'.format(sdk_mac_ver),
                    'System/Library/Frameworks')
        else:
            sysroot = ('/System/Library/Frameworks/'
                       'ApplicationServices.framework/Frameworks')
        flags['extra_compile_args'] += ['-F%s' % sysroot]
        flags['extra_link_args'] += ['-F%s' % sysroot]
    return flags
Example #3
0
File: setup.py Project: not-na/kivy
def determine_base_flags():
    flags = {"libraries": [], "include_dirs": [], "extra_link_args": [], "extra_compile_args": []}
    if c_options["use_ios"]:
        sysroot = environ.get("IOSSDKROOT", environ.get("SDKROOT"))
        if not sysroot:
            raise Exception("IOSSDKROOT is not set")
        flags["include_dirs"] += [sysroot]
        flags["extra_compile_args"] += ["-isysroot", sysroot]
        flags["extra_link_args"] += ["-isysroot", sysroot]
    elif platform.startswith("freebsd"):
        flags["include_dirs"] += [join(environ.get("LOCALBASE", "/usr/local"), "include")]
        flags["extra_link_args"] += ["-L", join(environ.get("LOCALBASE", "/usr/local"), "lib")]
    elif platform == "darwin":
        v = os.uname()
        if v[2] >= "13.0.0":
            # use xcode-select to search on the right Xcode path
            # XXX use the best SDK available instead of a specific one
            import platform as _platform

            xcode_dev = getoutput("xcode-select -p").splitlines()[0]
            sdk_mac_ver = ".".join(_platform.mac_ver()[0].split(".")[:2])
            print("Xcode detected at {}, and using MacOSX{} sdk".format(xcode_dev, sdk_mac_ver))
            sysroot = join(
                xcode_dev.decode("utf-8"),
                "Platforms/MacOSX.platform/Developer/SDKs",
                "MacOSX{}.sdk".format(sdk_mac_ver),
                "System/Library/Frameworks",
            )
        else:
            sysroot = "/System/Library/Frameworks/" "ApplicationServices.framework/Frameworks"
        flags["extra_compile_args"] += ["-F%s" % sysroot]
        flags["extra_link_args"] += ["-F%s" % sysroot]
    return flags
    def _send_slack_alert(self, message, service, color='green', sender='Cabot'):

        channel = '#' + env.get('SLACK_ALERT_CHANNEL')
        url = env.get('SLACK_WEBHOOK_URL')
        icon_url = env.get('SLACK_ICON_URL')

        # TODO: handle color
        resp = requests.post(url, data=json.dumps({
            'channel': channel,
            'username': sender[:15],
            'icon_url': icon_url,
            'attachments': [{
                'title': service.name,
                'text': message,
                'color': color,
                'fields': [{
                    'title': 'status',
                    'value': service.overall_status,
                    'short': 'false'
                    }, {
                    'title': 'old status',
                    'value': service.old_overall_status,
                    'short': 'false'
                    }
                ]
            }]
        }))
Example #5
0
def application(config):
    app = Application("Scrapyd")
    http_port = int(environ.get('PORT', config.getint('http_port', 6800)))
    config.cp.set('scrapyd', 'database_url', environ.get('DATABASE_URL'))

    poller = Psycopg2QueuePoller(config)
    eggstorage = FilesystemEggStorage(config)
    scheduler = Psycopg2SpiderScheduler(config)
    environment = Environment(config)

    app.setComponent(IPoller, poller)
    app.setComponent(IEggStorage, eggstorage)
    app.setComponent(ISpiderScheduler, scheduler)
    app.setComponent(IEnvironment, environment)

    launcher = Launcher(config, app)
    timer = TimerService(5, poller.poll)
    webservice = TCPServer(http_port, server.Site(Root(config, app)))
    log.msg("Scrapyd web console available at http://localhost:%s/ (HEROKU)"
        % http_port)

    launcher.setServiceParent(app)
    timer.setServiceParent(app)
    webservice.setServiceParent(app)

    return app
    def load_project_properties(cls):
        """
        Parse the JSON configuration file located in the settings folder and store the resulting dictionary in the
        `conf` class variable. Values from "standard" OpenStack environment variables override this configuration.
        """

        cls.logger.debug("Loading test settings...")
        with open(PROPERTIES_FILE) as config_file:
            try:
                cls.conf = json.load(config_file)
            except Exception as e:
                assert False, "Error parsing config file '{}': {}".format(PROPERTIES_FILE, e)

        # Check for environment variables and update configuration
        cred = cls.conf[PROPERTIES_CONFIG_CRED]
        env_cred = {
            PROPERTIES_CONFIG_CRED_KEYSTONE_URL: environ.get('OS_AUTH_URL', cred[PROPERTIES_CONFIG_CRED_KEYSTONE_URL]),
            PROPERTIES_CONFIG_CRED_USER: environ.get('OS_USERNAME', cred[PROPERTIES_CONFIG_CRED_USER]),
            PROPERTIES_CONFIG_CRED_PASS: environ.get('OS_PASSWORD', cred[PROPERTIES_CONFIG_CRED_PASS]),
            PROPERTIES_CONFIG_CRED_TENANT_ID: environ.get('OS_TENANT_ID', cred[PROPERTIES_CONFIG_CRED_TENANT_ID]),
            PROPERTIES_CONFIG_CRED_TENANT_NAME: environ.get('OS_TENANT_NAME', cred[PROPERTIES_CONFIG_CRED_TENANT_NAME])
        }
        cred.update(env_cred)

        # Ensure all values are given (either by settings file or overriden by environment variables)
        for name in env_cred.keys():
            if not cred[name]:
                assert False, "A value for '{}.{}' setting must be provided".format(PROPERTIES_CONFIG_CRED, name)
Example #7
0
def get_backend_api(test_case, cluster_id):
    """
    Get an appropriate BackendAPI for the specified dataset backend.

    Note this is a backdoor that is useful to be able to interact with cloud
    APIs in tests. For many dataset backends this does not make sense, but it
    provides a convenient means to interact with cloud backends such as EBS or
    cinder.

    :param test_case: The test case that is being run.

    :param cluster_id: The unique cluster_id, used for backend APIs that
        require this in order to be constructed.
    """
    backend_config_filename = environ.get(
        "FLOCKER_ACCEPTANCE_TEST_VOLUME_BACKEND_CONFIG")
    if backend_config_filename is None:
        raise SkipTest(
            'This test requires the ability to construct an IBlockDeviceAPI '
            'in order to verify construction. Please set '
            'FLOCKER_ACCEPTANCE_TEST_VOLUME_BACKEND_CONFIG to a yaml filepath '
            'with the dataset configuration.')
    backend_name = environ.get("FLOCKER_ACCEPTANCE_VOLUME_BACKEND")
    if backend_name is None:
        raise SkipTest(
            "Set acceptance testing volume backend using the " +
            "FLOCKER_ACCEPTANCE_VOLUME_BACKEND environment variable.")
    backend_config_filepath = FilePath(backend_config_filename)
    full_backend_config = yaml.safe_load(
        backend_config_filepath.getContent())
    backend_config = full_backend_config.get(backend_name)
    if 'backend' in backend_config:
        backend_config.pop('backend')
    backend = get_backend(backend_name)
    return get_api(backend, pmap(backend_config), reactor, cluster_id)
def test_all():
    cwd = Path(getcwd())
    all_omts = [p.as_posix() for p in cwd.glob('**/*.omt')]
    th = TallyHolder()
    if environ.get('TRAVIS'):
        if not environ.get('OMV_ENGINE'):
            tallies = [parse_omt(t) for t in all_omts]
        else:
            engine = environ.get('OMV_ENGINE').lower()
            tallies = [parse_omt(t)
                       for t in all_omts
                       if load_yaml(t)['engine'].lower() == engine]
    else:
        tallies = [parse_omt(t) for t in all_omts]
        
    for t in tallies:
        th.add(t)

    results = [t.all_passed() for t in tallies]
    inform('')
    inform("%i test(s) run" % len(tallies),
           overline='-', underline='-', center=True)
    inform('')
    if all(results):
        inform("All tests passing!", underline='=', center=True)
    else:
        failed = [trim_path(t.omt) for t in tallies if not t.all_passed()]
        inform("Some test(s) failed: ",  failed, underline='=')
    
    if is_verbose():
        print('\n'+th.summary()+'\n')

    assert all(results)
Example #9
0
def test_returned_filesize():
    runner = CliRunner()

    result = runner.invoke(
        cli,
        ['search',
        environ.get('SENTINEL_USER'),
        environ.get('SENTINEL_PASSWORD'),
        'tests/map.geojson',
        '--url', 'https://scihub.copernicus.eu/dhus/',
        '-s', '20141205',
        '-e', '20141208',
        '-q', 'producttype=GRD']
        )
    expected = "1 scenes found with a total size of 0.50 GB"
    assert result.output.split("\n")[-2] == expected

    result = runner.invoke(
        cli,
        ['search',
        environ.get('SENTINEL_USER'),
        environ.get('SENTINEL_PASSWORD'),
        'tests/map.geojson',
        '--url', 'https://scihub.copernicus.eu/dhus/',
        '-s', '20140101',
        '-e', '20141231',
        '-q', 'producttype=GRD']
        )
    expected = "20 scenes found with a total size of 11.06 GB"
    assert result.output.split("\n")[-2] == expected
Example #10
0
def get_libpaths():
    """
    On AIX, the buildtime searchpath is stored in the executable.
    as "loader header information".
    The command /usr/bin/dump -H extracts this info.
    Prefix searched libraries with LD_LIBRARY_PATH (preferred),
    or LIBPATH if defined. These paths are appended to the paths
    to libraries the python executable is linked with.
    This mimics AIX dlopen() behavior.
    """
    libpaths = environ.get("LD_LIBRARY_PATH")
    if libpaths is None:
        libpaths = environ.get("LIBPATH")
    if libpaths is None:
        libpaths = []
    else:
        libpaths = libpaths.split(":")
    objects = get_ld_headers(executable)
    for (_, lines) in objects:
        for line in lines:
            # the second (optional) argument is PATH if it includes a /
            path = line.split()[1]
            if "/" in path:
                libpaths.extend(path.split(":"))
    return libpaths
Example #11
0
File: search.py Project: mmedal/piz
 def __init__(self, api_key=environ.get('PIZ_GOOGLE_API_KEY'), cx=environ.get('PIZ_GOOGLE_SEARCH_CX')):
     if api_key is None or cx is None:
         raise UserMisconfigurationError('You must have both PIZ_GOOGLE_API_KEY and PIZ_GOOGLE_SEARCH_CX set as '
                                         'environment variables in your shell.')
     self.api_key = api_key
     self.cx = cx
     self.service = build('customsearch', 'v1', developerKey=api_key)
Example #12
0
def fcgi():
    """<comment-ja>
    外部Web Server(FastCGI) 起動処理
    </comment-ja>
    <comment-en>
    TODO: English Comment
    </comment-en>
    """
    # WebServer(fcgi)
    conf = ''
    if env.get('KARESANSUI_CONF'): # envrion
        conf = env.get('KARESANSUI_CONF')
    else: #error
        print >>sys.stderr, '[fcgi] Please specify the configuration file. - Please set the environment variable that "KARESANSUI_CONF".'
        sys.exit(1)

    config = None
    if conf: # read file
        _k2v = K2V(conf)
        config = _k2v.read()

    try:
        import flup
    except ImportError, e:
        print >>sys.stderr, '[Error] There are not enough libraries.(fcgi) - %s' % ''.join(e.args)
        traceback.format_exc()
        sys.exit(1)
Example #13
0
    def users_added_callback(self,users):
        '''
        Server url will be dynamic to work same code on different servers
        in case on user enrollment and iOS profile generation also.
        '''
        loader = Loader("/opt/toppatch/mv/media/app/")
        server_url = environ.get('SERVER_CNAME')
        ses_conn = ses.connect_to_region('us-east-1',
                    aws_access_key_id=environ.get('AWS_SES_ACCESS_KEY_ID'),
                    aws_secret_access_key=environ.get(
                        'AWS_SES_SECRET_ACCESS_KEY'))
        for user in users:

            link = str(server_url) + '/enroll/'+str(user.get('enrollment_id'))
            message = loader.load('user_enroll_mail.html').generate(
                        company_name=user.get('company_name'),
                    user_passwd=user.get('passcode'), activation_link=link)
            # message  = 'Your verification \
            #             link is : {0} and enrollment password is {1} . To ensure \
            #             your device os please open this link in your device \
            #             browser only. :)'.format(
            #                 str(server_url) + '/enroll/'+str(user['enrollment_id']), user['passcode'])
            #message  = message.replace('  ', '')

            try:
                ses_conn.send_email('*****@*****.**',
                        'MDM Enrollment verification', message,
                         [user['email']], format='html')
            except Exception,err:
                print repr(err)
Example #14
0
    def send_alert(self, service, users, duty_officers):

        account_sid = env.get('TWILIO_ACCOUNT_SID')
        auth_token = env.get('TWILIO_AUTH_TOKEN')
        outgoing_number = env.get('TWILIO_OUTGOING_NUMBER')
        url = 'http://%s%s' % (settings.WWW_HTTP_HOST,
                               reverse('twiml-callback', kwargs={'service_id': service.id}))

        # No need to call to say things are resolved
        if service.overall_status != service.CRITICAL_STATUS:
            return
        client = TwilioRestClient(
            account_sid, auth_token)
        # FIXME: `user` is in fact a `profile`
        mobiles = TwilioUserData.objects.filter(user__user__in=duty_officers)
        mobiles = [m.prefixed_phone_number for m in mobiles if m.phone_number]
        for mobile in mobiles:
            try:
                client.calls.create(
                    to=mobile,
                    from_=outgoing_number,
                    url=url,
                    method='GET',
                )
            except Exception, e:
                logger.exception('Error making twilio phone call: %s' % e)
Example #15
0
    def send_alert(self, service, users, duty_officers):

        account_sid = env.get('TWILIO_ACCOUNT_SID')
        auth_token = env.get('TWILIO_AUTH_TOKEN')
        outgoing_number = env.get('TWILIO_OUTGOING_NUMBER')

        all_users = list(users) + list(duty_officers)

        client = TwilioRestClient(
            account_sid, auth_token)
        mobiles = TwilioUserData.objects.filter(user__user__in=all_users)
        mobiles = [m.prefixed_phone_number for m in mobiles if m.phone_number]
        c = Context({
            'service': service,
            'host': settings.WWW_HTTP_HOST,
            'scheme': settings.WWW_SCHEME,
        })
        message = Template(sms_template).render(c)
        for mobile in mobiles:
            try:
                client.sms.messages.create(
                    to=mobile,
                    from_=outgoing_number,
                    body=message,
                )
            except Exception, e:
                logger.exception('Error sending twilio sms: %s' % e)
def init_rqify(app):
    """Auto-configure an appropriate Redis service for your Flask application.

    :param app app: Your Flask application.
    """
    # Look for RedisGreen.
    if any([k.startswith('REDISGREEN_') for k in environ]):
        app.config.setdefault('RQ_DEFAULT_URL', environ.get('REDISGREEN_URL'))

    # Look for MyRedis.
    elif any([k.startswith('MYREDIS_') for k in environ]):
        app.config.setdefault('RQ_DEFAULT_URL', environ.get('MYREDIS_URL'))

    # Look for Redis Cloud.
    elif any([k.startswith('REDISCLOUD_') for k in environ]):
        app.config.setdefault('RQ_DEFAULT_URL', environ.get('REDISCLOUD_URL'))

    # Look for Redis To Go.
    elif any([k.startswith('REDISTOGO_') for k in environ]):
        app.config.setdefault('RQ_DEFAULT_URL', environ.get('REDISTOGO_URL'))

    # Look for openredis.
    elif any([k.startswith('OPENREDIS_') for k in environ]):
        app.config.setdefault('RQ_DEFAULT_URL', environ.get('OPENREDIS_URL'))

    RQ(app)
def get_runtime_paths(self, what):
    """
    Determine what components of LD_LIBRARY_PATH are necessary to run `what`
    """
    from os import environ
    from os.path import dirname
        
    self.env.stash()
    self.env.LD_LIBRARY_PATH = environ.get("LD_LIBRARY_PATH", "")
    
    try:
        # Run ldd
        out, err = self.bld.cmd_and_log([self.env.LDD, what], 
                                        output=waflib.Context.BOTH)
    finally:
        self.env.revert()
    
    # Parse ldd output to determine what paths are used by the dynamic linker
    maybe_paths = set()
    for line in out.split("\n"):
        parts = line.split()
        if not parts: continue
        if parts[1] == "=>": maybe_paths.add(dirname(parts[2]))
        
    return maybe_paths & set(environ.get("LD_LIBRARY_PATH", "").split(":"))
Example #18
0
File: setup.py Project: soasme/rio
def configure_app(app):
    """Configure Flask/Celery application.

    * Rio will find environment variable `RIO_SETTINGS` first::

        $ export RIO_SETTINGS=/path/to/settings.cfg
        $ rio worker

    * If `RIO_SETTINGS` is missing, Rio will try to load configuration
      module in `rio.settings` according to another environment
      variable `RIO_ENV`. Default load `rio.settings.dev`.

        $ export RIO_ENV=prod
        $ rio worker
    """
    app.config_from_object('rio.settings.default')

    if environ.get('RIO_SETTINGS'):
        app.config_from_envvar('RIO_SETTINGS')
        return

    config_map = {
        'dev': 'rio.settings.dev',
        'stag': 'rio.settings.stag',
        'prod': 'rio.settings.prod',
        'test': 'rio.settings.test',
    }

    rio_env = environ.get('RIO_ENV', 'dev')
    config = config_map.get(rio_env, config_map['dev'])
    app.config_from_object(config)
Example #19
0
def cpython_versions(major):
    result = [None, None]
    ver = '' if major == 2 else '3'
    supported = environ.get("DEBPYTHON{}_SUPPORTED".format(ver))
    default = environ.get("DEBPYTHON{}_DEFAULT".format(ver))
    if not supported or not default:
        config = ConfigParser()
        config.read("/usr/share/python{}/debian_defaults".format(ver))
        if not default:
            default = config.get('DEFAULT', 'default-version', fallback='')[6:]
        if not supported:
            supported = config.get('DEFAULT', 'supported-versions', fallback='')\
                .replace('python', '')
    if default:
        try:
            result[0] = tuple(int(i) for i in default.split('.'))
        except Exception as err:
            log.warn('invalid debian_defaults file: %s', err)
    if supported:
        try:
            result[1] = tuple(tuple(int(j) for j in i.strip().split('.'))
                              for i in supported.split(','))
        except Exception as err:
            log.warn('invalid debian_defaults file: %s', err)
    return result
Example #20
0
File: elb.py Project: ajdiaz/mico
def elb_connect(region=None, *args, **kwargs):
    """Helper to connect to Amazon Web Services EC2, using identify provided
    by environment, as also optional region in arguments.
    """
    if not os_environ.get("AWS_ACCESS_KEY_ID", None):
        raise EC2LibraryError("Environment variable AWS_ACCESS_KEY_ID is not set.")
    if not os_environ.get("AWS_SECRET_ACCESS_KEY", None):
        raise EC2LibraryError("Environment variable AWS_SECRET_ACCESS_KEY is not set.")

    if not region:
        region = env.get("ec2_region")

    for reg in boto.ec2.elb.regions():
        if reg.name == region:
            region = reg

#    region = get_region(region,
#            aws_access_key_id = os_environ.get("AWS_ACCESS_KEY_ID"),
#            aws_secret_access_key = os_environ.get("AWS_ACCESS_SECRET_KEY")
#    )
#
    connection = ELBConnection(
            os_environ.get("AWS_ACCESS_KEY_ID"),
            os_environ.get("AWS_SECRET_ACCESS_KEY"),
            region=region,
            *args,
            **kwargs
    )
    return connection
Example #21
0
def main():
    install_path = environ.get("HDF5_DIR")
    version = environ.get("HDF5_VERSION", DEFAULT_VERSION)
    vs_version = environ.get("HDF5_VSVERSION")
    use_prefix = True if environ.get("H5PY_USE_PREFIX") is not None else False

    if install_path is not None:
        if not exists(install_path):
            makedirs(install_path)
    if vs_version is not None:
        cmake_generator = VSVERSION_TO_GENERATOR[vs_version]
        if vs_version == '9-64':
            # Needed for
            # http://help.appveyor.com/discussions/kb/38-visual-studio-2008-64-bit-builds
            run("ci\\appveyor\\vs2008_patch\\setup_x64.bat")

    if not hdf5_install_cached(install_path):
        with TemporaryFile() as f:
            download_hdf5(version, f)
            build_hdf5(version, f, install_path, cmake_generator, use_prefix)
    else:
        print("using cached hdf5", file=stderr)
    if install_path is not None:
        print("hdf5 files: ", file=stderr)
        for dirpath, dirnames, filenames in walk(install_path):
            for file in filenames:
                print(" * " + pjoin(dirpath, file))
Example #22
0
def main(styles_list):
    from os import environ
    ##### Temp ####
    environ['IMGDIR'] = '/mnt/images'
    ## ************* ##
    imgnum = int(environ.get('IMGNUM', 0))
    root_dir = environ.get('IMGDIR', '/mnt/Post_Complete/MozuRoot')
    ext = environ.get('IMGEXT','.png')
    ## Do for only 1 img number or load any that are found
    if imgnum > 0:
        flist = create_list_files_to_send(styles_list, imgnum=imgnum, ext=ext, root_dir=root_dir)
    else:
        flist = []
        for x in range(1,7,1):
            li1 = create_list_files_to_send(styles_list, imgnum=x, ext=ext, root_dir=root_dir)
            [ flist.append(f) for f in li1 if f is not None ]
    ####
    ## Compile Actual Styles and Filename found and Ready to Send
    loaded_filenames = [f.split('/')[-1].split('.')[0] for f in flist if f is not None]
    loaded_styles = list(set(sorted([fn[:9] for fn in loaded_filenames if fn is not None])))
    print 'loaded styles', loaded_styles
    ### Send Collected to Mozu
    import mozu_exec
    print('Starting.\nReloading {0} Images for {1} Styles from {2} to Mozu'.format(ext.lstrip('.').upper(), len(styles_list), root_dir))
    mozu_exec.main(flist)
    print('Finished.\nReloaded {0} Images for {1} Styles\n{2} Total Files from {3} to Mozu'.format(ext.lstrip('.').upper(), len(loaded_styles), len(loaded_filenames),root_dir))

    ## Set Media Version for Loaded Styles
    import media_version_ctrl
    media_version_ctrl.batch_process_by_style_list(loaded_styles)
    print('Finished.\nMedia Incr for {1} Styles from {2} to Mozu'.format(ext.lstrip('.').upper(), len(loaded_styles), root_dir))
    ## Return File Path of Loaded Styles for further ops if needed
    return loaded_filenames
def agent_main(collector):
    to_file(sys.stdout)
    startLogging(sys.stdout)
    return react(
        run_agent, [
            environ.get(
                "FLOCKER_CONFIGURATION_PATH",
                "/etc/flocker",
            ).decode("ascii"),
            environ.get(
                "CATALOG_FIREHOSE_PROTOCOL",
                DEFAULT_FIREHOSE_PROTOCOL,
            ).decode("ascii"),
            environ.get(
                "CATALOG_FIREHOSE_HOSTNAME",
                DEFAULT_FIREHOSE_HOSTNAME,
            ).decode("ascii"),
            int(
                environ.get(
                    "CATALOG_FIREHOSE_PORT",
                    unicode(DEFAULT_FIREHOSE_PORT).encode("ascii"),
                ).decode("ascii")
            ),
            # Base64 encoded
            environ["CATALOG_FIREHOSE_SECRET"].decode("ascii"),
            collector,
        ],
    )
Example #24
0
def load_env(app):
    if 'DATABASE_URI' in environ: app.config['DATABASE_URI'] = environ.get('DATABASE_URI')

    if 'INSTA_ID' in environ: app.config['INSTA_ID'] = environ.get('INSTA_ID')
    if 'INSTA_SECRET' in environ: app.config['INSTA_SECRET'] = environ.get('INSTA_SECRET')
    
    if 'SECRET_KEY' in environ: app.config['SECRET_KEY'] = environ.get('SECRET_KEY')
def run():
    """
    Run the server.
    """

    # Set up the logger.
    if not os.path.isdir(os.path.join(script_dir, 'logs')):
        os.makedirs(os.path.join(script_dir, 'logs'))
    # Format the logs.
    formatter = logging.Formatter(
            "%(asctime)s - %(name)s - %(levelname)s - %(message)s")
    # Enable the logs to split files at midnight.
    handler = TimedRotatingFileHandler(
            os.path.join(script_dir, 'logs', 'TorSpider.log'),
            when='midnight', backupCount=7, interval=1)
    handler.setLevel(app.config['LOG_LEVEL'])
    handler.setFormatter(formatter)
    log = logging.getLogger('werkzeug')
    log.setLevel(app.config['LOG_LEVEL'])
    log.addHandler(handler)
    app.logger.addHandler(handler)
    app.logger.setLevel(app.config['APP_LOG_LEVEL'])

    # Set up the app server, port, and configuration.
    port = int(environ.get('PORT', app.config['LISTEN_PORT']))
    addr = environ.get('LISTEN_ADDR', app.config['LISTEN_ADDR'])
    if app.config['USETLS']:
        context = (app.config['CERT_FILE'], app.config['CERT_KEY_FILE'])
        app.run(host=addr, port=port, threaded=True, ssl_context=context)
    else:
        app.run(host=addr, port=port, threaded=True)
Example #26
0
    def setUpClass(cls):
        token = environ.get('KB_AUTH_TOKEN', None)
        cls.ctx = {'token': token, 'provenance': [{'service': 'data_api2',
            'method': 'please_never_use_it_in_production', 'method_params': []}],
            'authenticated': 1}
        config_file = environ.get('KB_DEPLOYMENT_CONFIG', None)
        cls.cfg = {}
        config = ConfigParser()
        config.read(config_file)
        for nameval in config.items('data_api2'):
            cls.cfg[nameval[0]] = nameval[1]
        cls.wsURL = cls.cfg['workspace-url']
        cls.wsClient = workspaceService(cls.wsURL, token=token)
        cls.serviceImpl = data_api2(cls.cfg)
        cls.obj_name="ReferenceGenomeAnnotations/kb|g.207118"

        cls.obj_name="ReferenceGenomeAnnotations/kb|g.217864"
        cls.feature='kb|g.207118.CDS.3237'
        cls.feature='kb|g.217864.CDS.11485'
        cls.gene='kb|g.217864.locus.10619'

        cls.obj_name="ReferenceGenomeAnnotations/kb|g.140057"
        cls.feature='kb|g.140057.CDS.2901'
        cls.gene='kb|g.140057.locus.2922'
        cls.mrna='kb|g.140057.mRNA.2840'
        cls.taxon= u'1779/523209/1'
        cls.assembly='1837/56/1'
def memcacheify(timeout=500):
    """Return a fully configured Django ``CACHES`` setting. We do this by
    analyzing all environment variables on Heorku, scanning for an available
    memcache addon, and then building the settings dict properly.

    If no memcache servers can be found, we'll revert to building a local
    memory cache.

    Returns a fully configured caches dict.
    """
    caches = {}

    if all((environ.get(e, "") for e in MEMCACHE_ENV_VARS)):
        caches["default"] = CACHE_DEFAULTS
        caches["default"].update({"LOCATION": "localhost:11211", "TIMEOUT": timeout})
    elif all((environ.get(e, "") for e in MEMCACHIER_ENV_VARS)):
        servers = environ.get("MEMCACHIER_SERVERS").replace(",", ";")
        environ["MEMCACHE_SERVERS"] = servers
        environ["MEMCACHE_USERNAME"] = environ.get("MEMCACHIER_USERNAME")
        environ["MEMCACHE_PASSWORD"] = environ.get("MEMCACHIER_PASSWORD")
        caches["default"] = CACHE_DEFAULTS
        caches["default"].update({"LOCATION": servers, "TIMEOUT": timeout})
    elif all((environ.get(e, "") for e in MEMCACHEDCLOUD_ENV_VARS)):
        servers = environ.get("MEMCACHEDCLOUD_SERVERS").replace(",", ";")
        environ["MEMCACHE_SERVERS"] = servers
        environ["MEMCACHE_USERNAME"] = environ.get("MEMCACHEDCLOUD_USERNAME")
        environ["MEMCACHE_PASSWORD"] = environ.get("MEMCACHEDCLOUD_PASSWORD")
        caches["default"] = CACHE_DEFAULTS
        caches["default"].update({"LOCATION": servers, "TIMEOUT": timeout})
    elif environ.get("MEMCACHEIFY_USE_LOCAL", False):
        caches["default"] = {"BACKEND": "django_pylibmc.memcached.PyLibMCCache"}
    else:
        caches["default"] = {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}

    return caches
 def setUpClass(cls):
     cls.token = environ.get('KB_AUTH_TOKEN', None)
     # WARNING: don't call any logging methods on the context object,
     # it'll result in a NoneType error
     cls.ctx = MethodContext(None)
     cls.ctx.update({'token': cls.token,
                     'provenance': [
                         {'service': 'ReadsUtils',
                          'method': 'please_never_use_it_in_production',
                          'method_params': []
                          }],
                     'authenticated': 1})
     config_file = environ.get('KB_DEPLOYMENT_CONFIG', None)
     cls.cfg = {}
     config = ConfigParser()
     config.read(config_file)
     for nameval in config.items('ReadsUtils'):
         cls.cfg[nameval[0]] = nameval[1]
     cls.shockURL = cls.cfg['shock-url']
     cls.ws = Workspace(cls.cfg['workspace-url'], token=cls.token)
     cls.impl = ReadsUtils(cls.cfg)
     shutil.rmtree(cls.cfg['scratch'])
     os.mkdir(cls.cfg['scratch'])
     suffix = int(time.time() * 1000)
     wsName = "test_ReadsUtils_" + str(suffix)
     cls.ws_info = cls.ws.create_workspace({'workspace': wsName})
     cls.dfu = DataFileUtil(os.environ['SDK_CALLBACK_URL'], token=cls.token)
    def run(self, args):
        self.check_not_docsearch_app_id('run a config manually')

        self.exec_shell_command(["docker", "stop", "documentation-scrapper-dev"])
        self.exec_shell_command(["docker", "rm", "documentation-scrapper-dev"])

        f = open(args[0], 'r')
        config = f.read()

        run_command = [
            'docker',
            'run',
            '-e',
            'APPLICATION_ID=' + environ.get('APPLICATION_ID'),
            '-e',
            'API_KEY=' + environ.get('API_KEY'),
            '-e',
            "CONFIG=" + config,
            '-v',
            getcwd() + '/scraper/src:/root/src',
            '--name',
            'documentation-scrapper-dev',
            '-t',
            'algolia/documentation-scrapper-dev',
            '/root/run'
        ]

        return self.exec_shell_command(run_command)
Example #30
0
def determine_base_flags():
    flags = {
        'libraries': [],
        'include_dirs': [],
        'extra_link_args': [],
        'extra_compile_args': []}
    if c_options['use_ios']:
        sysroot = environ.get('IOSSDKROOT', environ.get('SDKROOT'))
        if not sysroot:
            raise Exception('IOSSDKROOT is not set')
        flags['include_dirs'] += [sysroot]
        flags['extra_compile_args'] += ['-isysroot', sysroot]
        flags['extra_link_args'] += ['-isysroot', sysroot]
    elif platform == 'darwin':
        v = os.uname()
        if v[2] == '13.0.0':
            sysroot = ('/Applications/Xcode5-DP.app/Contents/Developer'
                       '/Platforms/MacOSX.platform/Developer/SDKs'
                       '/MacOSX10.8.sdk/System/Library/Frameworks')
        else:
            sysroot = ('/System/Library/Frameworks/'
                       'ApplicationServices.framework/Frameworks')
        flags['extra_compile_args'] += ['-F%s' % sysroot]
        flags['extra_link_args'] += ['-F%s' % sysroot]
    return flags
def get_config_file():
    return environ.get(DEPLOY, None)
def get_site_config():
	# convert true/false str (case insensitive) to bool
	# "true" (str) = True (bool). everything else = False
	def str_to_bool(input):
		return isinstance(input, str) and input.upper() == 'TRUE'

	# Load config into system env
	# System environment variables ALWAYS TAKES PRECEDENCE
	load_dotenv()

	# ALLOWED_HOST must exist
	assert environ.get('ALLOWED_HOSTS'), "ALLOWED_HOSTS is not configured"
	# DB_TYPE must be 'sqlite3', 'mysql', or 'postgresql'
	SUPPORTED_DB = set(('sqlite3', 'mysql', 'postgresql'))
	assert environ.get('DB_TYPE') in SUPPORTED_DB, f'DB_TYPE must be one of {SUPPORTED_DB}'

	# build config dict
	config = {
		'DEBUG': str_to_bool(environ.get('DEBUG')),
		'ALLOWED_HOSTS': environ.get('ALLOWED_HOSTS').split(','),
		'SITE_SSL': str_to_bool(environ.get('SITE_SSL')),
		'DB_TYPE': environ.get('DB_TYPE'),
		'DB_NAME': environ.get('DB_NAME'),
        'DB_HOST': environ.get('DB_HOST'),
        'DB_PORT': environ.get('DB_PORT'),
        'DB_USERNAME': environ.get('DB_USERNAME'),
        'DB_PASSWORD': environ.get('DB_PASSWORD'),
		'EMAIL_HOST': environ.get('EMAIL_HOST'),
		'EMAIL_PORT': environ.get('EMAIL_PORT'),
		'EMAIL_HOST_USER': environ.get('EMAIL_HOST_USER'),
    	'EMAIL_HOST_PASSWORD': environ.get('EMAIL_HOST_PASSWORD'),
		'EMAIL_USE_TLS': str_to_bool(environ.get('EMAIL_USE_TLS')),
		'EMAIL_USE_SSL': str_to_bool(environ.get('EMAIL_USE_SSL')),
		'ADMIN_URL': environ.get('ADMIN_URL'),
		'STATUS_PAGE_URL': environ.get('STATUS_PAGE_URL'),
	}

	return config
Example #33
0
from os import environ
from json import dumps
from logging import getLogger
from time import sleep
from ratelimitqueue import RateLimitQueue
import boto3
from os import environ
from base64 import b64encode
from uuid import uuid1

logger = getLogger()
factory = ClientFactory()
tdclient = factory.create_client()
max_calls = 60  # maximum is 120
kinesis = boto3.client('kinesis')
stream_name = environ.get('STREAM_NAME')


def fetch_all_instruments(assetTypes: list):
    """
  Enumerates through all symbols
  """
    symbols = []
    filter_count = 0
    for prefix in list(range(65, 91)) + list(range(48, 57)):
        prefix = '.*' + chr(prefix)

        instruments = tdclient.search_instruments(symbol=prefix,
                                                  projection='symbol-regex')

        print('Query Prefix {} found {} instruments...'.format(
import logging

from os import environ
from os.path import join
from distutils.sysconfig import get_python_lib

VEXT_DEBUG_LOG = "VEXT_DEBUG_LOG"
vext_pth = join(get_python_lib(), 'vext_importer.pth')

logger = logging.getLogger("vext")
if VEXT_DEBUG_LOG in environ:
    if environ.get(VEXT_DEBUG_LOG, "0") == "1":
        logger.setLevel(logging.DEBUG)
    else:
        logger.addHandler(logging.NullHandler())


def install_importer():
    logger.debug("install_importer has been moved to gatekeeper module")
    from vext import gatekeeper
    gatekeeper.install_importer()
with Location Tracking enabled will send data to be fetched by this endpoint.

"""
import argparse
from datetime import datetime, timedelta
import dateutil
from os import environ
import pprint
import requests
import sys

# THE ECM API keys should be exported to the environment before running this
# script.
HEADERS = {
    "Content-Type": "application/json",
    "X-CP-API-ID": environ.get("X_CP_API_ID"),
    "X-CP-API-KEY": environ.get("X_CP_API_KEY"),
    "X-ECM-API-ID": environ.get("X_ECM_API_ID"),
    "X-ECM-API-KEY": environ.get("X_ECM_API_KEY"),
}


def url2id(url):
    """Extract the ID from a URL"""
    return int(url.split("/")[-2])


def get(url, filt=None):
    """Do an HTTP GET on `url`.

    Returns the data as a python dict. Forces a program exit on HTTP error.
    def __call__(self, environ, start_response):
        # Context object, equivalent to the perl impl CallContext
        ctx = MethodContext(self.userlog)
        ctx['client_ip'] = getIPAddress(environ)
        status = '500 Internal Server Error'

        try:
            body_size = int(environ.get('CONTENT_LENGTH', 0))
        except (ValueError):
            body_size = 0
        if environ['REQUEST_METHOD'] == 'OPTIONS':
            # we basically do nothing and just return headers
            status = '200 OK'
            rpc_result = ""
        else:
            request_body = environ['wsgi.input'].read(body_size)
            try:
                req = json.loads(request_body)
            except ValueError as ve:
                err = {'error': {'code': -32700,
                                 'name': "Parse error",
                                 'message': str(ve),
                                 }
                       }
                rpc_result = self.process_error(err, ctx, {'version': '1.1'})
            else:
                ctx['module'], ctx['method'] = req['method'].split('.')
                ctx['call_id'] = req['id']
                ctx['rpc_context'] = {
                    'call_stack': [{'time': self.now_in_utc(),
                                    'method': req['method']}
                                   ]
                }
                prov_action = {'service': ctx['module'],
                               'method': ctx['method'],
                               'method_params': req['params']
                               }
                ctx['provenance'] = [prov_action]
                try:
                    token = environ.get('HTTP_AUTHORIZATION')
                    # parse out the method being requested and check if it
                    # has an authentication requirement
                    method_name = req['method']
                    auth_req = self.method_authentication.get(
                        method_name, 'none')
                    if auth_req != 'none':
                        if token is None and auth_req == 'required':
                            err = JSONServerError()
                            err.data = (
                                'Authentication required for ' +
                                'identify_promoter ' +
                                'but no authentication header was passed')
                            raise err
                        elif token is None and auth_req == 'optional':
                            pass
                        else:
                            try:
                                user = self.auth_client.get_user(token)
                                ctx['user_id'] = user
                                ctx['authenticated'] = 1
                                ctx['token'] = token
                            except Exception, e:
                                if auth_req == 'required':
                                    err = JSONServerError()
                                    err.data = \
                                        "Token validation failed: %s" % e
                                    raise err
                    if (environ.get('HTTP_X_FORWARDED_FOR')):
                        self.log(log.INFO, ctx, 'X-Forwarded-For: ' +
                                 environ.get('HTTP_X_FORWARDED_FOR'))
                    self.log(log.INFO, ctx, 'start method')
                    rpc_result = self.rpc_service.call(ctx, req)
                    self.log(log.INFO, ctx, 'end method')
                    status = '200 OK'
                except JSONRPCError as jre:
                    err = {'error': {'code': jre.code,
                                     'name': jre.message,
                                     'message': jre.data
                                     }
                           }
                    trace = jre.trace if hasattr(jre, 'trace') else None
                    rpc_result = self.process_error(err, ctx, req, trace)
                except Exception:
                    err = {'error': {'code': 0,
                                     'name': 'Unexpected Server Error',
                                     'message': 'An unexpected server error ' +
                                                'occurred',
                                     }
                           }
                    rpc_result = self.process_error(err, ctx, req,
                                                    traceback.format_exc())
def get_service_name():
    return environ.get(SERVICE, None)
Example #38
0
    log_format = logging.Formatter("%(asctime)s || %(levelname)s "
                                   "|| %(module)s - %(lineno)d ||"
                                   " %(funcName)s || %(message)s")

    # info to the console
    log_console_handler = logging.StreamHandler()
    log_console_handler.setLevel(logging.DEBUG)
    log_console_handler.setFormatter(log_format)

    logger.addHandler(log_console_handler)

    # environment vars
    load_dotenv("dev.env", override=True)

    # ignore warnings related to the QA self-signed cert
    if environ.get("ISOGEO_PLATFORM").lower() == "qa":
        urllib3.disable_warnings()

    # establish isogeo connection
    isogeo = Isogeo(
        client_id=environ.get("ISOGEO_API_USER_LEGACY_CLIENT_ID"),
        client_secret=environ.get("ISOGEO_API_USER_LEGACY_CLIENT_SECRET"),
        auto_refresh_url="{}/oauth/token".format(environ.get("ISOGEO_ID_URL")),
        platform=environ.get("ISOGEO_PLATFORM", "qa"),
        auth_mode="user_legacy",
    )

    # getting a token
    isogeo.connect(
        username=environ.get("ISOGEO_USER_NAME"),
        password=environ.get("ISOGEO_USER_PASSWORD"),
Example #39
0
    'jnius_jvm_android.pxi',
    'jnius_jvm_desktop.pxi',
    'jnius_localref.pxi',
    'jnius.pyx',
    'jnius_utils.pxi',
]

libraries = []
library_dirs = []
extra_link_args = []
include_dirs = []
install_requires = []

# detect Python for android
platform = sys.platform
ndkplatform = environ.get('NDKPLATFORM')
if ndkplatform is not None and environ.get('LIBLINK'):
    platform = 'android'

# detect cython
try:
    from Cython.Distutils import build_ext
    install_requires.append('cython')
except ImportError:
    from distutils.command.build_ext import build_ext
    if platform != 'android':
        print('\n\nYou need Cython to compile Pyjnius.\n\n')
        raise
    files = [fn[:-3] + 'c' for fn in files if fn.endswith('pyx')]

if platform == 'android':
                                                    traceback.format_exc())

        # print 'Request method was %s\n' % environ['REQUEST_METHOD']
        # print 'Environment dictionary is:\n%s\n' % pprint.pformat(environ)
        # print 'Request body was: %s' % request_body
        # print 'Result from the method call is:\n%s\n' % \
        #    pprint.pformat(rpc_result)

        if rpc_result:
            response_body = rpc_result
        else:
            response_body = ''

        response_headers = [
            ('Access-Control-Allow-Origin', '*'),
            ('Access-Control-Allow-Headers', environ.get(
                'HTTP_ACCESS_CONTROL_REQUEST_HEADERS', 'authorization')),
            ('content-type', 'application/json'),
            ('content-length', str(len(response_body)))]
        start_response(status, response_headers)
        return [response_body]

    def process_error(self, error, context, request, trace=None):
        if trace:
            self.log(log.ERR, context, trace.split('\n')[0:-1])
        if 'id' in request:
            error['id'] = request['id']
        if 'version' in request:
            error['version'] = request['version']
            e = error['error'].get('error')
            if not e:
                error['error']['error'] = trace
Example #41
0
Heroku = True

if Heroku:
    from os import environ
    BOT_TOKEN = environ.get("BOT_TOKEN", None)
    BOT_NAME = "Torrent Searcher"
    BOT_LINK = "https://t.me/blackfishcloud"
else:
    BOT_TOKEN = "Get it from @botfather"
    BOT_NAME = "Midoria"
    BOT_LINK = "https://t.me/blackfishcloud"
Example #42
0
from flask import Flask, jsonify, request, make_response
from flask_pymongo import PyMongo
from bson.objectid import ObjectId
from flask_cors import CORS
from functools import wraps
import datetime, random, string
import jwt
try:
    from keys import db
except:
    from os import environ
    db = environ.get('DB')

from products_router import ProductsRouter
from categories_router import CategoriesRouter
from customer_router import CustomerRouter
from main_router import MainRouter

app = Flask(__name__)
CORS(app)
app.config["MONGO_URI"] = db
mongo = PyMongo(app)

customer = CustomerRouter
products = ProductsRouter
categories = CategoriesRouter
main = MainRouter

#secret = rZP0y2lg5A61NtC
#token = eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VyIjoidHN1bmEyMjIxQGxpdmUuY29tIiwiZXhwIjoxNTU3OTMyMDEyfQ.jEK3vz4YnMgTpkxvCUpN0YPr1wVnGaMr5P1YBQWouw8
Example #43
0
import json
import urllib.parse
from os import environ as env
import boto3
from boto3.dynamodb.conditions import Key

CHARSET = 'UTF-8'
region_name = env.get("REGION_NAME")
table_name = env.get("TABLE_NAME")
notification_email = env.get("NOTIFICATION_EMAIL")

source_email = '*****@*****.**'
subj = 'Abuse Gozeit Service'

table = boto3.resource("dynamodb", region_name=region_name).Table(table_name)
email_client = boto3.client('ses', region_name=region_name)


def lambda_handler(event, context):
    key_encode = event['Records'][0]['s3']['object']['key']
    key = urllib.parse.unquote(key_encode)
    email = key.split('/')[1]
    file = key.split('/')[2]
    item = file[:26]
    size = event['Records'][0]['s3']['object']['size'] / 1024
    filesize = "{:.2f}".format(size)
    response = table.query(KeyConditionExpression=Key("email").eq(email)
                           & Key("item").eq(item))

    if response['Items']:
        response['Items'][0]['filesize'] = event['Records'][0]['s3']['object'][
Example #44
0
# this file for sync translation in github workflow
import base64
import json
import os
import tempfile
from operator import itemgetter
from os import path, environ
from string import ascii_uppercase
from typing import List

import gspread
from gspread import Worksheet

SHEET_URL = environ.get(
    'SHEET_URL',
    'https://docs.google.com/spreadsheets/d/1K81JbXNLtui93caZu_MyQdy8lv5qxyo69AvlKRA98bM'
)
CREDENTIAL_BASE64 = environ.get('CREDENTIAL_BASE64')
if not CREDENTIAL_BASE64:
    raise Exception(
        'you must set google service account json key encode to base64 and set CREDENTIAL_BASE64 Env'
    )

DATA_SHEET_NAME = environ.get('DATA_SHEET_NAME', 'origin')
SYNC_SHEET_NAME = environ.get('SYNC_SHEET_NAME', 'data')
META_SHEET_NAME = environ.get('META_SHEET_NAME', 'meta')
UNSYNC_FIELD = environ.get('UNSYNC_FIELD', 'unsync')
ID_FIELD = environ.get('ID_FIELD', 'Translation ID')
_lazy_gc = None

Example #45
0
    def from_notebook_node(self, nb, resources=None, **kw):
        config = new_config()
        nbname = name = get_in(resources, 'metadata.name')  # notebook name
        if name:
            config['metadata']['name'] = normalize_name(name)
        config['spec']['handler'] = handler_name()

        ended = 'ended'
        started = 'started'
        code_cells = 'code_cells'
        nameless_annotation = ''
        target_function_name = environ.get(env_keys.function_name)
        seen_function_name = nameless_annotation

        function_buffers = {
            nameless_annotation: {
                ended: False,
                started: False,
                code_cells: [],
            },
        }
        if target_function_name:
            function_buffers[target_function_name] = {
                ended: False,
                started: False,
                code_cells: [],
            }
        else:
            # to avoid accidental KeyError
            target_function_name = nameless_annotation

        for cell in filter(is_code_cell, nb['cells']):
            code = cell['source']
            if has_ignore(code):
                continue

            match = has_end(code)
            if match:
                current_name = match.group('name')
                if current_name in [target_function_name, nameless_annotation]:
                    if function_buffers[current_name][ended]:
                        raise MagicError('Found multiple consecutive '
                                         + '"end-code" annotations')
                    # found code that belongs to the current function
                    function_buffers[current_name][started] = True
                    function_buffers[current_name][ended] = True
                    seen_function_name = seen_function_name or current_name

            match = has_start(code)
            if match:
                current_name = match.group('name')
                if current_name in [target_function_name, nameless_annotation]:
                    if not function_buffers[current_name][started]:
                        # discard code that doesn't belong to the function
                        function_buffers[current_name][code_cells] = []
                    if function_buffers[current_name][started]\
                            and not function_buffers[current_name][ended]:
                        raise MagicError('Found multiple consecutive '
                                         + '"start-code" annotations')
                    function_buffers[current_name][started] = True
                    function_buffers[current_name][ended] = False
                    seen_function_name = seen_function_name or current_name

            for function_buffer in function_buffers.values():
                if not function_buffer[ended]:
                    function_buffer[code_cells].append(code)

        io = self.write_code_cells(
            function_buffers[seen_function_name][code_cells],
            config)
        process_env_files(env_files, config)
        py_code = io.getvalue()
        handler_path = environ.get(env_keys.handler_path)
        if handler_path:
            with open(handler_path) as fp:
                py_code = fp.read()

        efiles = []
        if archive_settings:
            if archive_settings['notebook'] and nbname:
                archive_settings['files'] += [nbname + '.ipynb']
            efiles = ','.join(archive_settings['files'])
            config['metadata']['annotations'][meta_keys.extra_files] = efiles

        if env_keys.code_target_path in environ:
            code_path = environ.get(env_keys.code_target_path)
            with open(code_path, 'w') as fp:
                fp.write(py_code)
                fp.close()
        elif efiles and env_keys.drop_nb_outputs not in environ:
            outputs = {'handler.py': py_code,
                       'function.yaml': gen_config(config)}
            for filename in efiles:
                with open(filename) as fp:
                    data = fp.read()
                    outputs[filename] = data
            resources['outputs'] = outputs
        else:
            data = b64encode(py_code.encode('utf-8')).decode('utf-8')
            update_in(config, 'spec.build.functionSourceCode', data)

        config = gen_config(config)
        resources['output_extension'] = '.yaml'

        return config, resources
Example #46
0
#!/usr/bin/env python
"""HdfsCLI: a command line interface for WebHDFS."""

from hdfs import __version__
from os import environ
from setuptools import find_packages, setup

# Allow configuration of the CLI alias. This can be helpful since Hadoop 2
# changed `hadoop fs` to `hdfs` (cf. https://github.com/mtth/hdfs/issues/10).
if environ.get('HDFS_ENTRY_POINT'):
    ENTRY_POINT = environ.get('HDFS_ENTRY_POINT')  # Backwards compatibility.
else:
    ENTRY_POINT = environ.get('HDFSCLI_ENTRY_POINT', 'hdfs')

setup(
    name='hdfs',
    version=__version__,
    description=__doc__,
    long_description=open('README.rst').read(),
    author='Matthieu Monsch',
    author_email='*****@*****.**',
    url='http://hdfscli.readthedocs.org',
    license='MIT',
    packages=find_packages(),
    classifiers=[
        'Development Status :: 5 - Production/Stable',
        'Intended Audience :: Developers',
        'License :: OSI Approved :: MIT License',
        'Programming Language :: Python',
        'Programming Language :: Python :: 2.6',
        'Programming Language :: Python :: 2.7',
Example #47
0
def init_clients():
    mongo_uri = environ.get("MONGO_URI")
    if mongo_uri:
        global mdb
        mdb = AsyncIOMotorClient(mongo_uri)
Example #48
0
import os
from os import environ

import dj_database_url
from boto.mturk import qualification

import otree.settings

BASE_DIR = os.path.dirname(os.path.abspath(__file__))

# the environment variable OTREE_PRODUCTION controls whether Django runs in
# DEBUG mode. If OTREE_PRODUCTION==1, then DEBUG=False
if environ.get('OTREE_PRODUCTION') not in {None, '', '0'}:
    DEBUG = False
else:
    DEBUG = True

ADMIN_USERNAME = '******'
ADMIN_PASSWORD = '******'

# don't share this with anybody.
# Change this to something unique (e.g. mash your keyboard),
# and then delete this comment.
SECRET_KEY = 'zzzzzzzzzzzzzzzzzzzzzzzzzzz'

PAGE_FOOTER = ''

# To use a database other than sqlite,
# set the DATABASE_URL environment variable.
# Examples:
# postgres://USER:PASSWORD@HOST:PORT/NAME
Example #49
0
    except KeyError:
        error_msg = "Set the %s env variable" % setting
        raise ImproperlyConfigured(error_msg)


########## HOST CONFIGURATION
# See: https://docs.djangoproject.com/en/1.5/releases/1.5/#allowed-hosts-required-in-production
ALLOWED_HOSTS = []
########## END HOST CONFIGURATION

########## EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'

# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host
EMAIL_HOST = environ.get('EMAIL_HOST', 'smtp.gmail.com')

# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-password
EMAIL_HOST_PASSWORD = environ.get('EMAIL_HOST_PASSWORD', '')

# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-user
EMAIL_HOST_USER = environ.get('EMAIL_HOST_USER', '*****@*****.**')

# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-port
EMAIL_PORT = environ.get('EMAIL_PORT', 587)

# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-subject-prefix
EMAIL_SUBJECT_PREFIX = '[%s] ' % SITE_NAME

# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-use-tls
EMAIL_USE_TLS = True
Example #50
0
def process_env_files(env_files, config):
    # %nuclio env_file magic will populate this
    from_env = json.loads(environ.get(env_keys.env_files, '[]'))
    for fname in (env_files | set(from_env)):
        with open(fname) as fp:
            set_env(config, iter_env_lines(fp))
Example #51
0
Example:
To create a customer src tree at /local/jason/orca1/zig1[/sw/nic/...]
from regular full tree at /local/jason/b1[/sw/nic/...]
rm -rf /local/jason/orca1/zig1 # delete the old tree
cd /local/jason/b1/sw/nic/athena/apps/athena_app
./gsrc.py --src /local/jason/b1 --dst /local/jason/orca1/zig1/src/github.com/pensando/
'''

from os import system, mkdir, chdir, path, makedirs, getcwd, environ
from shutil import copytree, ignore_patterns, make_archive, copyfile, copymode, copy
import argparse
# from getDepHeaders import Parser
import sys
import os

asic = environ.get('ASIC', 'capri')


def parse_input():
    parser = argparse.ArgumentParser(description="Create mini athena src tree")
    parser.add_argument(
        '--dst',
        default='pensando',
        help=
        'Dst folder relative to /sw where all the src and Athena App should be copied',
        dest='dst')
    parser.add_argument('--src',
                        default='.',
                        help='Src tree folder',
                        dest='src')
    parser.add_argument('--spec',
Example #52
0
from flask import Flask, g, render_template, request, redirect, url_for, flash, session, make_response,Response,jsonify
from app import app,db,appbuilder
from os import environ 
from app.plantas.models import Planta
from app.plan_usua.models import PlanUsua
from app.users.models import MyUser
from app.roles.models import MyRole, MyRoleUser
from app.tipoequipos.models import Tipoequipo
from app.plantaequipos.models import Plantaequipo
# PARA OBTENER LA URL DEL ENTORNO, YA SEA LOCALHOST O PRODUCCIÓN. COnfigurar .env
ENTORNO = environ.get('ENTORNO')

def get_user_id():
    return g.user.id


@app.route('/tequipos/list/')
def listado_tequipos():
    tequipos = db.session.query(Tipoequipo).order_by(Tipoequipo.nombreTipoequipo.asc()).all()
    idPlanta = session["idPlanta"]
    planta_equipos = db.session.query(Plantaequipo).filter(Plantaequipo.idPlanta == idPlanta).all()
    return render_template("tequipos.html", tequipos = tequipos, base_template=appbuilder.base_template, appbuilder=appbuilder)

@app.route('/NuevoTipoEquipo', methods=['POST'])
def crearTipoequipo():
    if request.method=='POST':
        data = request.get_json()
        data = data[0]
        nombre=str(data['nombre'])
        print (nombre)
        new_TipoEquipo=(Tipoequipo(nombre))
Example #53
0
                                                    traceback.format_exc())

        # print 'Request method was %s\n' % environ['REQUEST_METHOD']
        # print 'Environment dictionary is:\n%s\n' % pprint.pformat(environ)
        # print 'Request body was: %s' % request_body
        # print 'Result from the method call is:\n%s\n' % \
        #    pprint.pformat(rpc_result)

        if rpc_result:
            response_body = rpc_result
        else:
            response_body = ''

        response_headers = [('Access-Control-Allow-Origin', '*'),
                            ('Access-Control-Allow-Headers',
                             environ.get('HTTP_ACCESS_CONTROL_REQUEST_HEADERS',
                                         'authorization')),
                            ('content-type', 'application/json'),
                            ('content-length', str(len(response_body)))]
        start_response(status, response_headers)
        return [response_body]

    def process_error(self, error, context, request, trace=None):
        if trace:
            self.log(log.ERR, context, trace.split('\n')[0:-1])
        if 'id' in request:
            error['id'] = request['id']
        if 'version' in request:
            error['version'] = request['version']
            e = error['error'].get('error')
            if not e:
                error['error']['error'] = trace
Example #54
0
from __future__ import with_statement
from os import environ
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Use the environment variable as the URL for SQLAlchemy
config.set_main_option('sqlalchemy.url', environ.get('DATABASE_URL'))

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline():
    """Run migrations in 'offline' mode.
import sys

from jose import JWTError, jwt
import six

import constants


JWT_ALGORITHM = "RS256"

ENV_FILE = find_dotenv()
if ENV_FILE:
    load_dotenv(ENV_FILE)

AUTH0_CALLBACK_URL = env.get(constants.AUTH0_CALLBACK_URL)
AUTH0_CLIENT_ID = env.get(constants.AUTH0_CLIENT_ID)
AUTH0_CLIENT_SECRET = env.get(constants.AUTH0_CLIENT_SECRET)
AUTH0_DOMAIN = env.get(constants.AUTH0_DOMAIN)
AUTH0_BASE_URL = 'https://' + AUTH0_DOMAIN
AUTH0_AUDIENCE = env.get(constants.AUTH0_AUDIENCE)

MYSQL_USERNAME = env.get(constants.MYSQL_USERNAME)
MYSQL_PASSWORD = env.get(constants.MYSQL_PASSWORD)
MYSQL_IP = env.get(constants.MYSQL_IP)
MYSQL_DB = env.get(constants.MYSQL_DB)

SCOPE = 'openid profile '# groups roles permissions read:eaonly read:devonly' #we don't need to request all these scopes. All scopes authorized for the user is auto-added by the rule created by (*) above.
JWT_PAYLOAD = 'jwt_payload'
TOKEN_KEY = 'auth0_token'
MGMNT_API_TOKEN = 'mgmnt_api_token'
Example #56
0
"""
base configuration file
"""
from os.path import abspath, dirname, join
from os import environ as env
import sys

USE_LOCAL_STORAGE = env.get('TEST_MODE') == True or env.get(
    'USE_LOCAL_STORAGE') == 'True'

CORE_ROOT = dirname(abspath(__file__))
PROJECT_ROOT = dirname(CORE_ROOT)

SECRET_KEY = env['FLASK_SECRET_KEY']

STATIC_MEDIA_URL = env['STATIC_MEDIA_URL']
AWS_STORAGE_BUCKET_URL = env['AWS_STORAGE_BUCKET_URL']
SCENEVR_DIST_ROOT_URL = env['SCENEVR_DIST_ROOT_URL']

try:
    # these aren't required for local as they're set with defaults.
    AWS_STORAGE_BUCKET_NAME = env['AWS_STORAGE_BUCKET_NAME']
    AWS_STORAGE_KEY_PREFIX = env['AWS_STORAGE_KEY_PREFIX']
except KeyError as e:
    if not USE_LOCAL_STORAGE:
        print("Missing environment variable {}".format(e))
        sys.exit(-1)

AWS_ACCESS_KEY_ID = env['AWS_ACCESS_KEY_ID']
AWS_SECRET_ACCESS_KEY = env['AWS_SECRET_ACCESS_KEY']
Example #57
0
#!/usr/local/bin/python3

from cgitb import enable
enable()

from cgi import FieldStorage
from os import environ
from hashlib import sha256
from time import time
from shelve import open
from http.cookies import SimpleCookie

result = ''
try:
    cookie = SimpleCookie()
    http_cookie_header = environ.get('HTTP_COOKIE')
    if not http_cookie_header:
        sid = sha256(repr(time()).encode()).hexdigest()
        cookie['sid'] = sid
    else:
        cookie.load(http_cookie_header)
        if 'sid' not in cookie:
            sid = sha256(repr(time()).encode()).hexdigest()
            cookie['sid'] = sid
        else:
            sid = cookie['sid'].value

    session_store = open('sess_' + sid, writeback=True)

    # Get the id of the item being added to the cart
    form_data = FieldStorage()
Example #58
0
#!/usr/bin/python3
#
#HiddenEye by Open Source Community
#
import multiprocessing
import gettext
from os import system, environ
import sys
import ssl
if (not environ.get('PYTHONHTTPSVERIFY', "")
        and getattr(ssl, '_create_unverified_context', None)):
    ssl._create_default_https_context = ssl._create_unverified_context

from Defs.Checks import *
from Defs.Configurations import *
from Defs.Actions import *
from Defs.Languages import *

RED, WHITE, CYAN, GREEN, DEFAULT = '\033[91m', '\033[46m', '\033[36m', '\033[1;32m', '\033[0m'
checkPermissions()
installGetText()
languageSelector()
checkConnection()
checkLocalxpose()
checkNgrok()
ifSettingsNotExists()
readConfig()

if __name__ == "__main__":
    try:
        runMainMenu()
Example #59
0
from os import environ as _environ
from set_env import setup_env as _setup_env

_setup_env()

IS_HEROKU = _environ.get("IS_HEROKU") is not None
# discord specific
DISCORD_CLIENT_ID = _environ["DISCORD_CLIENT_ID"]

DISCORD_SECRET = _environ["DISCORD_SECRET"]

DISCORD_BOT_TOKEN = _environ["DISCORD_BOT_TOKEN"]

PARTICIPANT_ROLE_ID = _environ["DISCORD_PARTICIPANT_ROLE"]

GUILD_ID = _environ["DISCORD_GUILD_ID"]

ALLOW_REMOVALS = _environ.get("NO_REMOVE") is None
# JWT Signing key, make sure this stays same or every user will need to relogin
SIGNING_KEY = _environ.get("JWT_SIGNING_KEY")
# How long an access_token will last
TOKEN_EXPIRATION_TIME_IN_SECONDS = 60 * int(
    _environ.get("TOKEN_EXPIRATION_TIME"))

EVENT_NAMES = ("gaming", "prog", "pentest", "lit", "music", "video",
               "minihalo")
ROLE_ID_DICT = dict(
    zip(
        EVENT_NAMES,
        (
            _environ["DISCORD_GAMING_ROLE"],
Example #60
0
                raise ApplicationError(u"com.myapp.error.mixed_case",
                                       name.lower(), name.upper())

            if len(name) < 3 or len(name) > 10:
                # forward keyword arguments in exceptions
                raise ApplicationError(u"com.myapp.error.invalid_length",
                                       min=3,
                                       max=10)

        await self.register(checkname, u'com.myapp.checkname')

        # defining and automapping WAMP application exceptions
        ##
        self.define(AppError1)

        def compare(a, b):
            if a < b:
                raise AppError1(b - a)

        await self.register(compare, u'com.myapp.compare')


if __name__ == '__main__':
    import six
    url = environ.get("AUTOBAHN_DEMO_ROUTER", u"ws://127.0.0.1:8080/ws")
    if six.PY2 and type(url) == six.binary_type:
        url = url.decode('utf8')
    realm = u"crossbardemo"
    runner = ApplicationRunner(url, realm)
    runner.run(Component)