Example #1
0
def find():
    # type: () -> str
    if sys.platform == "win32":
        return which('ffmpeg.exe') or which('bin/ffmpeg.exe')
    else:
        # I don't know if this works.
        return which('ffmpeg') or which('bin/ffmpeg')
Example #2
0
def run_command(config):
    '''
    Run config.cmd. If the command has a TARGET, replace it with config.target.
    Else append config.target as an argument.
    '''
    appcmd = config.cmd
    # Split the command into an array of words
    if isinstance(appcmd, six.string_types):
        appcmd = shlex.split(appcmd)
    # If the app is a Cygwin app, TARGET should be a Cygwin path too.
    target = config.target
    cygcheck, cygpath, kwargs = which('cygcheck'), which('cygpath'), {'universal_newlines': True}
    if cygcheck is not None and cygpath is not None:
        app_path = check_output([cygpath, '-au', which(appcmd[0])], **kwargs).strip()   # nosec
        is_cygwin_app = check_output([cygcheck, '-f', app_path], **kwargs).strip()      # nosec
        if is_cygwin_app:
            target = check_output([cygpath, '-au', target], **kwargs).strip()           # n osec
    # Replace TARGET with the actual target
    if 'TARGET' in appcmd:
        appcmd = [target if arg == 'TARGET' else arg for arg in appcmd]
    else:
        appcmd.append(target)
    app_log.info('Running %s', ' '.join(appcmd))
    if not safe_rmtree(config.target):
        app_log.error('Cannot delete target %s. Aborting installation', config.target)
        return
    proc = Popen(appcmd, bufsize=-1, stdout=sys.stdout, stderr=sys.stderr, **kwargs)    # nosec
    proc.communicate()
    return proc.returncode
Example #3
0
    def is_available(self):
        if not which(self.latexmk):
            return False

        if self.variant == 'pdflatex':
            return bool(which(self.pdflatex))
        if self.variant == 'xelatex':
            return bool(which(self.xelatex))
Example #4
0
    def is_available(self):
        if not which(self.latexmk):
            return False

        if self.variant == 'pdflatex':
            return bool(which(self.pdflatex))
        if self.variant == 'xelatex':
            return bool(which(self.xelatex))
Example #5
0
def system_information(handler):
    '''Handler for system info'''
    value, error = {}, {}
    try:
        import psutil
        process = psutil.Process(os.getpid())
        value['system', 'cpu-count'] = psutil.cpu_count()
        value['system', 'cpu-usage'] = psutil.cpu_percent()
        value['system', 'memory-usage'] = psutil.virtual_memory().percent
        value['system', 'disk-usage'] = psutil.disk_usage('/').percent
        value['gramex', 'memory-usage'] = process.memory_info()[0]
        value['gramex', 'open-files'] = len(process.open_files())
    except ImportError:
        app_log.warning('psutil required for system stats')
        error['system', 'cpu-count'] = 'psutil not installed'
        error['system', 'cpu-usage'] = 'psutil not installed'
        error['system', 'memory-usage'] = 'psutil not installed'
        error['system', 'disk-usage'] = 'psutil not installed'
        error['gramex', 'memory-usage'] = 'psutil not installed'
        error['gramex', 'open-files'] = 'psutil not installed'
    try:
        import conda
        value['conda', 'version'] = conda.__version__,
    except ImportError:
        app_log.warning('conda required for conda stats')
        error['conda', 'version'] = 'conda not installed'

    from shutilwhich import which
    value['node', 'path'] = which('node')
    value['git', 'path'] = which('git')

    from gramex.cache import Subprocess
    apps = {
        ('node', 'version'): Subprocess('node --version', shell=True),
        ('npm', 'version'): Subprocess('npm --version', shell=True),
        ('yarn', 'version'): Subprocess('yarn --version', shell=True),
        ('git', 'version'): Subprocess('git --version', shell=True),
    }
    for key, proc in apps.items():
        stdout, stderr = yield proc.wait_for_exit()
        value[key] = stdout.strip()
        if not value[key]:
            error[key] = stderr.strip()

    value['python', 'version'] = '{0}.{1}.{2}'.format(*sys.version_info[:3])
    value['python', 'path'] = sys.executable
    value['gramex', 'version'] = gramex.__version__
    value['gramex', 'path'] = os.path.dirname(gramex.__file__)

    import pandas as pd
    df = pd.DataFrame({'value': value, 'error': error}).reset_index()
    df.columns = ['section', 'key'] + df.columns[2:].tolist()
    df = df[['section', 'key', 'value',
             'error']].sort_values(['section', 'key'])
    df['error'] = df['error'].fillna('')
    data = gramex.data.filter(df, handler.args)
    # TODO: handle _format, _meta, _download, etc just like FormHandler
    raise Return(gramex.data.download(data))
Example #6
0
 def grab(self, keys):
     '''Grab specific keys'''
     root_window = self.__display.screen().root
     keycode = 0
     xset_cmd = which("xset")
     xmodmap_cmd = which("xmodmap")
     if not xset_cmd:
         logging.warn(
             "xset not found, you might experience some bad key-repeat problems"
         )
     for key in keys:
         keycode = get_keycode(key)
         if not keycode:
             continue
         if xset_cmd:
             # FIXME: revert on exit
             os.system("%s -r %d" % (xset_cmd, keycode))
         if xmodmap_cmd:
             cmd_status, cmd_stdout = get_cmd_output("%s -pm" % xmodmap_cmd)
             if cmd_status == 0 and cmd_stdout:
                 lines = cmd_stdout.splitlines()
             else:
                 lines = []
             lock_line = [
                 l.strip().split() for l in lines if l.startswith("lock")
             ]
             num_line = [
                 l.strip().split() for l in lines if "Num_Lock" in l
             ]
             key_line = [l.strip().split() for l in lines if key in l]
             if lock_line:
                 parts = lock_line[0]
                 if len(parts) > 1:
                     self.__caps_lock = parts[1]
             if num_line:
                 parts = num_line[0]
                 if len(parts) > 1:
                     self.__num_lock_mod = parts[0]
             if key_line:
                 parts = key_line[0]
                 if len(parts) > 1:
                     self.__key_mod = parts[0]
         if key == "Caps_Lock":
             if not self.__caps_lock:
                 logging.debug("Caps Lock already disabled!")
             else:
                 self.disable_caps_lock()
                 atexit.register(self.enable_caps_lock)
         ownev = not self.__parent.getModality()
         root_window.grab_key(keycode, X.AnyModifier, ownev,
                              X.GrabModeAsync, X.GrabModeAsync)
         return key, keycode
     logging.critical("Couldn't find quasimode key")
     self.__parent.stop()
     return None, None
Example #7
0
def skip_if_missing_python_interpreter(python_version_enum):
    """Skip test if missing the necessary version of the Python interpreter"""
    file_name = python_version_enum.executable_name
    if not shutilwhich.which(file_name):
        message = "Missing the necessary version of the Python interpreter: {}".format(
            file_name)
        pytest.skip(message)
Example #8
0
    def create_template(self, command, params):
        """

        :type command: list[str]
        :type params: dist[str, object]
        :rtype: drmaa.JobTemplate
        """

        jt = self._session.createJobTemplate()
        jt.remoteCommand = shutil.which(command[0])
        jt.args = command[1:]
        if params['num_slots'] > 1:
            jt.nativeSpecification = "-pe make {}".format(params['num_slots'])

        if params['stdout']:
            jt.outputPath = params['stdout']
        if params['stderr']:
            jt.errorPath = params['stderr']
        if params['join_streams']:
            jt.joinFiles = True
        if params['job_name']:
            jt.jobName = params['job_name']
        if params['work_dir']:
            jt.workingDirectory = params['work_dir']

        jt.params = params
        return jt
Example #9
0
    def test_init(self):
        init([], AttrDict())
        source = os.path.join(variables['GRAMEXPATH'], 'apps', 'init',
                              'default')
        self.check_files(source)

        # Ensure templates work
        with io.open(os.path.join(self.app_dir, 'gramex.yaml'),
                     encoding='utf-8') as handle:
            line = handle.readline().strip()
            ok_('don\'t delete this line' in line)
            ok_(re.match(r'# Generated by gramex init 1\.\d+', line))

        # If Git LFS is present, ensure that it's set up to track assets/**
        if which('git-lfs'):
            path = os.path.join(self.app_dir, '.gitattributes')
            ok_(os.path.exists(path), 'Git LFS worked')
            with open(path, encoding='utf-8') as handle:
                ok_('assets/**' in handle.read(), 'Git LFS tracks assets/**')
            path = os.path.join(self.app_dir, '.gitignore')
            with open(path, encoding='utf-8') as handle:
                ok_('assets/**' not in handle.read(),
                    '.gitignore allows assets/**')
        # Else, check that .gitignore does not commit assets/**
        else:
            path = os.path.join(self.app_dir, '.gitignore')
            with open(path, encoding='utf-8') as handle:
                ok_('assets/**' in handle.read(),
                    '.gitignore allows assets/**')
Example #10
0
 def run(self):
     s = None
     try:
         s = open_command_impl.get_shortcut(self.target)
         print s
     except Exception:
         display_xml_message(u"<p>This shortcut can't be found</p>")
     else:
         msg = u"Target info of 'open' shortcut '%s'" % self.target
         result = "\n".join(
             (str(self.target), str(s.target), str(s.shortcut_filename)))
         RecentResult.get().push_result(result, msg)
         display_xml_message(
             u"<p>Target of <command>%s</command> is <command>%s</command>, saved in <command>%s</command></p>"
             % (str(self.target), str(s.target), s.shortcut_filename))
         try:
             # TODO: Implement platform independent
             import subprocess
             from shutilwhich import which
             editor_cmd = os.path.expandvars("$VISUAL")
             if not editor_cmd:
                 editor_cmd = which("gvim")
             if editor_cmd:
                 subprocess.Popen(
                     [editor_cmd, "--nofork", "--", s.shortcut_filename])
         except:
             pass
Example #11
0
def start_psi4_calculation(path, input_file):
    print(input_file)
    output_file = input_file.replace('dat', 'out')
    input_file = os.path.join(path, input_file)
    output_file = os.path.join(path, output_file)
    psi4_binary = which('psi4', mode=os.X_OK)
    cmd = psi4_binary + ' ' + input_file + ' -o ' + output_file + '2>&1'
    os.system(cmd)
Example #12
0
def start_psi4_calculation(path, input_file):
    print(input_file)
    output_file = input_file.replace('dat', 'out')
    input_file = os.path.join(path, input_file)
    output_file = os.path.join(path, output_file)
    psi4_binary = which('psi4', mode=os.X_OK)
    cmd = psi4_binary + ' ' + input_file + ' -o ' + output_file + '2>&1'
    os.system(cmd)
Example #13
0
    def _find_full_path(self):
        """Get a full path to the compiler executable."""
        full_path = which(self.name)

        if full_path is None:
            full_path = self.name

        return full_path
Example #14
0
def installed(b):
    ''' Returns true if an executable named b exists in the current path.
        b may also be a list of binaries.
    '''
    blist = b if isinstance(b, list) else [
        b,
    ]
    return all([which(b) for b in blist])
Example #15
0
File: git.py Project: mbr/unleash
def collect_info():
    gb = opts['git_binary']
    git_binary = which(gb)
    if not git_binary:
        issues.error('Could not find git binary: {}.'.format(gb))

    info['git_path'] = git_binary
    info['git_tag_name'] = info['ref'].tag_name
Example #16
0
def start_psi4_calculation(path, input_file, threads=4):
    print(input_file)
    output_file = input_file.replace('dat', 'out')
    input_file = os.path.join(path, input_file)
    output_file = os.path.join(path, output_file)
    psi4_binary = which('psi4', mode=os.X_OK)
    cmd = psi4_binary + ' ' + input_file + ' -o ' + output_file + ' -n' + threads
    process = subprocess.Popen(cmd, stderr=subprocess.PIPE, shell=True)
    output = process.communicate()
    return(cmd, process.wait(), output)
Example #17
0
def start_psi4_calculation(path, input_file, threads=4):
    print(input_file)
    output_file = input_file.replace('dat', 'out')
    input_file = os.path.join(path, input_file)
    output_file = os.path.join(path, output_file)
    psi4_binary = which('psi4', mode=os.X_OK)
    cmd = psi4_binary + ' ' + input_file + ' -o ' + output_file + ' -n' + threads
    process = subprocess.Popen(cmd, stderr=subprocess.PIPE, shell=True)
    output = process.communicate()
    return (cmd, process.wait(), output)
Example #18
0
	def is_installed(self, custom_name=None):

		name = self.app_name
		if not custom_name == None:
			name = custom_name

		if self.env.is_test and (not name in self.env.pre_installed_apps):
			return False

		return not shutilwhich.which(name) == None
Example #19
0
    def is_installed(self, custom_name=None):

        name = self.app_name
        if not custom_name == None:
            name = custom_name

        if self.env.is_test and (not name in self.env.pre_installed_apps):
            return False

        return not shutilwhich.which(name) == None
Example #20
0
def init(args, kwargs):
    '''Create Gramex scaffolding files.'''
    if len(args) > 1:
        app_log.error(show_usage('init'))
        return
    kwargs.setdefault('target', os.getcwd())
    app_log.info('Initializing Gramex project at %s', kwargs.target)
    data = {
        'appname': os.path.basename(kwargs.target),
        'author': _check_output('git config user.name', default='Author'),
        'email': _check_output('git config user.email',
                               default='*****@*****.**'),
        'date': datetime.datetime.today().strftime('%Y-%m-%d'),
        'version': gramex.__version__,
    }
    # Ensure that appname is a valid Python module name
    appname = slug.module(data['appname'])
    if appname[0] not in string.ascii_lowercase:
        appname = 'app' + appname
    data['appname'] = appname

    # Create a git repo. But if git fails, do not stop. Continue with the rest.
    try:
        _run_console('git init')
    except OSError:
        pass
    # Install Git LFS if available. Set git_lfs=None if it fails, so .gitignore ignores assets/**
    data['git_lfs'] = which('git-lfs')
    if data['git_lfs']:
        try:
            _run_console('git lfs install')
            _run_console('git lfs track "assets/**"')
        except OSError:
            data['git_lfs'] = None

    # Copy all directories & files (as templates)
    source_dir = os.path.join(variables['GRAMEXPATH'], 'apps', 'init')
    for root, dirs, files in os.walk(source_dir):
        for name in dirs + files:
            source = os.path.join(root, name)
            relpath = os.path.relpath(root, start=source_dir)
            target = os.path.join(kwargs.target, relpath,
                                  name.replace('appname', appname))
            _copy(source, target, template_data=data)
    for empty_dir in ('img', 'data'):
        _mkdir(os.path.join(kwargs.target, 'assets', empty_dir))
    # Copy error files as-is (not as templates)
    error_dir = os.path.join(kwargs.target, 'error')
    _mkdir(error_dir)
    for source in glob(
            os.path.join(variables['GRAMEXPATH'], 'handlers', '?0?.html')):
        target = os.path.join(error_dir, os.path.basename(source))
        _copy(source, target)

    run_setup(kwargs.target)
Example #21
0
def check_command(name):
    """ Check command is available on the system.

        Args:
            name (str): Command name.

        Returns:
            True if command ``name`` was found on the system.

    """
    return which(name) is not None
Example #22
0
def flog_gatherer(reactor, temp_dir, flog_binary, request):
    out_protocol = _CollectOutputProtocol()
    gather_dir = join(temp_dir, 'flog_gather')
    process = reactor.spawnProcess(
        out_protocol,
        flog_binary,
        (
            'flogtool', 'create-gatherer',
            '--location', 'tcp:localhost:3117',
            '--port', '3117',
            gather_dir,
        )
    )
    pytest.blockon(out_protocol.done)

    twistd_protocol = _MagicTextProtocol("Gatherer waiting at")
    twistd_process = reactor.spawnProcess(
        twistd_protocol,
        which('twistd'),
        (
            'twistd', '--nodaemon', '--python',
            join(gather_dir, 'gatherer.tac'),
        ),
        path=gather_dir,
    )
    pytest.blockon(twistd_protocol.magic_seen)

    def cleanup():
        try:
            twistd_process.signalProcess('TERM')
            pytest.blockon(twistd_protocol.exited)
        except ProcessExitedAlready:
            pass

        flog_file = mktemp('.flog_dump')
        flog_protocol = _DumpOutputProtocol(open(flog_file, 'w'))
        flog_dir = join(temp_dir, 'flog_gather')
        flogs = [x for x in listdir(flog_dir) if x.endswith('.flog')]

        print("Dumping {} flogtool logfiles to '{}'".format(len(flogs), flog_file))
        reactor.spawnProcess(
            flog_protocol,
            flog_binary,
            (
                'flogtool', 'dump', join(temp_dir, 'flog_gather', flogs[0])
            ),
        )
        pytest.blockon(flog_protocol.done)

    request.addfinalizer(cleanup)

    with open(join(gather_dir, 'log_gatherer.furl'), 'r') as f:
        furl = f.read().strip()
    return furl
Example #23
0
def add_yarn_or_bower(args, fullpath):
    if args.yarn:
        print("Adding yarn dependencies...")
        yarn = args.yarn.split(',')
        yarn_exe = which('yarn')
        if yarn_exe:
            os.chdir(os.path.join(fullpath, 'project', 'client', 'static'))
            output, error = subprocess.Popen([yarn_exe, 'init', '-y'],
                                             stdout=subprocess.PIPE,
                                             stderr=subprocess.PIPE,
                                             cwd=fullpath).communicate()
            if error:
                print("An error occurred at init Yarn, please check the "
                      "package.json file.")
                print(error.decode('ascii'))
            for dependency in yarn:
                output, error = subprocess.Popen([yarn_exe, 'add', dependency],
                                                 stdout=subprocess.PIPE,
                                                 stderr=subprocess.PIPE,
                                                 cwd=fullpath).communicate()
                if error:
                    print("An error occurred with Yarn")
                    print(error.decode('ascii'))
        else:
            print("Could not find yarn. Ignoring.")
    elif args.bower:
        print("Adding bower dependencies...")
        bower = args.bower.split(',')
        bower_exe = which('bower')
        if bower_exe:
            os.chdir(os.path.join(fullpath, 'project', 'client', 'static'))
            for dependency in bower:
                output, error = subprocess.Popen(
                    [bower_exe, 'install', dependency],
                    stdout=subprocess.PIPE,
                    stderr=subprocess.PIPE).communicate()
                if error:
                    print("An error occurred with Bower")
                    print(error)
        else:
            print("Could not find bower. Ignoring.")
Example #24
0
def find_program(env_variable, names):
    """Find an executable in the env_variable environment variable or in PATH in
    with one of the names in the argument names."""

    global search_paths
    for name in chain([os.environ.get(env_variable, "")], names):
        path = which(name, path=search_paths)
        if path:
            return path

    raise RuntimeError("Couldn't find " + env_variable +
                       " or any of the following executables in PATH: " +
                       " ".join(names))
Example #25
0
def add_virtualenv(fullpath):
    """Create a python virtual environment named flask."""
    if args.virtualenv == "2.7" or args.virtualenv == "3.3":
        virtualenv_exe = which(''.join(['virtualenv-', args.virtualenv]))
    else:
        virtualenv_exe = which(''.join(['pyvenv-', args.virtualenv]))
    if virtualenv_exe:
        output, error = Popen(
            [virtualenv_exe, os.path.join(fullpath, 'flask')],
            stdout=PIPE,
            stderr=PIPE
        ).communicate()

        if error:
            with open('virtualenv_error.log', 'w') as fd:
                fd.write(error.decode('utf-8'))
                print('An error occurred with virtualenv!')
                sys.exit(2)

        venv_bin = os.path.join(fullpath, 'flask/bin')
        print('Adding application requirements...')
        output, error = Popen(
            [
                os.path.join(venv_bin, 'pip'),
                'install',
                '-r',
                os.path.join(fullpath, 'requirements.txt')
            ],
            stdout=PIPE,
            stderr=PIPE
        ).communicate()
        if error:
            with open('pip_error.log', 'w') as fd:
                fd.write(error.decode('utf-8'))
                sys.exit(2)
    else:
        print('Could not find a valid virtualenv executable. '
              'Ignoring the request for installing a '
              'virtual environment...')
Example #26
0
    def js(self, code=None, path=None, **kwargs):
        if self.conn is None:
            try:
                self.conn = yield websocket_connect(
                    self.url, connect_timeout=self.timeout)
            except OSError as exc:
                import errno
                if exc.errno != errno.ECONNREFUSED:
                    raise
                # TODO: node_path
                self.proc = yield daemon(
                    [which('node'), self._path,
                     '--port=%s' % self.port],
                    first_line=re.compile('pynode: 1.\d+.\d+ port: %s' %
                                          self.port),
                    cwd=self.cwd,
                )
                self.conn = yield websocket_connect(
                    self.url, connect_timeout=self.timeout)

        # code= takes preference over path=
        if code is not None:
            kwargs['code'] = code
        elif path is not None:
            kwargs['path'] = path

        # Send the commands. If node has died, clear the connection.
        try:
            yield self.conn.write_message(json.dumps(kwargs))
        except WebSocketClosedError:
            self.conn = None
            raise
        # Receive the response.
        # Note: read_message() cannot be called again while a request is running.
        # (Yes, that's odd. Maybe Anand is missing something.)
        # So wait until the read_future is cleared.
        while self.conn.read_future is not None:
            yield sleep(self._delay)
        msg = yield self.conn.read_message()
        # If node has died, clear the connection to restart it.
        if msg is None:
            self.conn = None
            raise WebSocketClosedError()

        # Parse the result as JSON. Log errors if any
        result = json.loads(msg)
        if result['error']:
            app_log.error(result['error']['stack'])
        raise Return(result)
Example #27
0
def check_conversion_tools(do_il=False):
    # Check that the conversion tools are available
    if do_il:
        input_files = six.itervalues(INPUT_FILES)
    else:
        input_files = (f for f in six.itervalues(INPUT_FILES) if f['type'] != 'il')

    for input_file in input_files:
        tool = input_file['conversion_tool']
        if shutilwhich.which(tool) is None:
            error_message = "Failed to find conversion tool: {}".format(tool)
            logging.error(error_message)
            raise OasisException(error_message)

    return True
 def __init__(self, *args, **kwargs):
     super(Smartmon, self).__init__(*args)
     if "DeviceList" in kwargs:
         self.devices = kwargs.get("DeviceList")().devices
     else:
         if which("smartctl") is None:
             sys.exit("smartctl needs to be installed")
         self.devices = []
         for device in DeviceList().devices:
             if not device.supports_smart:
                 LOG.warning("Skipping %s.  SMART is not enabled.", device.path)
             else:
                 self.devices.append(device)
         if len(self.devices) == 0:
             sys.exit("No devices detected.  Check permissions.  Hint run: 'smartctl --scan'")
Example #29
0
def add_bower(bower, skeleton, fullpath):
    """Install the selected bower dependencies."""
    bower_exe = which('bower')
    if bower_exe:
        os.chdir(os.path.join(fullpath, skeletons[skeleton], 'static'))

        for dependency in bower:
            output, error = Popen(
                [bower_exe, 'install', dependency],
                stdout=PIPE,
                stderr=PIPE
            ).communicate()
            if error:
                print('An error occurred with Bower!')
                print(error)
    else:
        print('Could not find bower. '
              'Ignoring the request for bower '
              'dependencies installation...')
Example #30
0
def scan_python_versions():
    """Scan for the python versions existing in the system.
    It will limit the choices of python virtual environments to install.

    It is assumed that the python binayries are in /usr/bin although some
    systems have them in /usr/local/bin, or in any other place,
    so we shall get the directory by using `which` to locate
    those existing in the system.
    """
    # start defining a list of python versions we accept
    # and then find those existing in the system
    acceptable_versions = ['python2.7', 'python3.3', 'python3.4', 'python3.5']
    py_versions_found = []
    for py_version in acceptable_versions:
        py_version = which(py_version)
        if py_version:
            # we just need the number version, so we strip the rest
            py_version = ntpath.basename(py_version)[6:]
            py_versions_found.append(py_version)
    return py_versions_found
Example #31
0
def run_setup(target):
    '''
    Install any setup file in target directory. Target directory can be:

    - An absolute path
    - A relative path to current directory
    - A relative path to the Gramex apps/ folder

    Returns the absolute path of the final target path.

    This supports:

    - ``make`` (if Makefile exists)
    - ``powershell -File setup.ps1``
    - ``bash setup.sh``
    - ``pip install -r requirements.txt``
    - ``python setup.py``
    - ``yarn install`` else ``npm install``
    - ``bower --allow-root install``
    '''
    if not os.path.exists(target):
        app_target = os.path.join(variables['GRAMEXPATH'], 'apps', target)
        if not os.path.exists(app_target):
            raise OSError('No directory %s' % target)
        target = app_target
    target = os.path.abspath(target)
    app_log.info('Setting up %s', target)
    for file, runners in setup_paths.items():
        setup_file = os.path.join(target, file)
        if not os.path.exists(setup_file):
            continue
        for exe, cmd in runners.items():
            exe_path = which(exe)
            if exe_path is not None:
                cmd = cmd.format(FILE=setup_file, EXE=exe_path)
                app_log.info('Running %s', cmd)
                _run_console(cmd, cwd=target)
                break
        else:
            app_log.warning('Skipping %s. No %s found', setup_file, exe)
    def get_mysql_version(cls):
        '''
        Return a tuple: (major, minor). 
        Example, for MySQL 5.7.15, return (5,7).
        Return (None,None) if version number not found.

        '''
        
        # Where is mysql client program?
        mysql_path = which('mysql')
      
        # Get version string, which looks like this:
        #   'Distrib 5.7.15, for osx10.11 (x86_64) using  EditLine wrapper\n'
        version_str = subprocess.check_output([mysql_path, '--version'])
        
        # Isolate the major and minor version numbers (e.g. '5', and '7')
        pat = re.compile(r'Distrib ([0-9]*)[.]([0-9]*)[.]')
        match_obj = pat.search(version_str)
        if match_obj is None:
            return (None,None)
        (major, minor) = match_obj.groups()
        return (int(major), int(minor))
Example #33
0
    def test_setup(self):
        subprocess.call([sys.executable, '-m', 'pip', 'uninstall', '-y', '-r', self.req_path])
        install(['setup'], AttrDict(url=self.install_path))

        result = set()
        for root, dirs, files in os.walk(self.install_path):
            for filename in files:
                path = os.path.join(root, filename)
                result.add(os.path.relpath(path, self.install_path))

        # See http://go.microsoft.com/fwlink/?LinkID=135170
        # Requires: Set-ExecutionPolicy -ExecutionPolicy RemoteSigned
        if which('powershell'):
            result.add('powershell-setup.txt')
        if which('make'):
            result.add('makefile-setup.txt')
        if which('bash'):
            result.add('bash-setup.txt')
        if which('python'):
            result.add('python-setup.txt')
        if which('yarn'):
            result.add('yarn.lock')
            result.add('node_modules/.yarn-integrity')
            result.add('node_modules/gramex-npm-package/package.json')
            result.add('node_modules/gramex-npm-package/npm-setup.js')
        elif which('npm'):
            # package-lock.json needs node 8.x -- which is required for CaptureHandler anyway
            result.add('package-lock.json')
        if which('bower'):
            result.add('bower_components/gramex-bower-package/bower.json')
            result.add('bower_components/gramex-bower-package/bower-setup.txt')
            result.add('bower_components/gramex-bower-package/.bower.json')
        if which('pip'):
            import dicttoxml            # noqa
        self.check_files('setup', result)
        self.check_uninstall('setup')
Example #34
0
def check_conversion_tools(il=False):
    """
    Check that the conversion tools are available

    :param il: Flag whether to check insured loss tools
    :type il: bool

    :return: True if all required tools are present, False otherwise
    :rtype: bool
    """
    if il:
        input_files = INPUT_FILES.values()
    else:
        input_files = (f for f in INPUT_FILES.values() if f['type'] != 'il')

    for input_file in input_files:
        tool = input_file['conversion_tool']
        if shutilwhich.which(tool) is None:
            error_message = "Failed to find conversion tool: {}".format(tool)
            logging.error(error_message)
            raise OasisException(error_message)

    return True
Example #35
0
    def test_init(self):
        if os.path.exists(self.app_dir):
            shutil.rmtree(self.app_dir, onerror=_ensure_remove)
        os.makedirs(self.app_dir)
        os.chdir(self.app_dir)
        init([], AttrDict())

        # Ensure files are present
        source = os.path.join(variables['GRAMEXPATH'], 'apps', 'init')
        for path in os.listdir(source):
            path = path.replace('appname', self.appname.replace('-', '_'))
            ok_(os.path.exists(os.path.join(self.app_dir, path)),
                path + ' in init')

        # Ensure templates work
        with io.open(os.path.join(self.app_dir, 'gramex.yaml'),
                     encoding='utf-8') as handle:
            line = handle.readline().strip()
            ok_('don\'t delete this line' in line)
            ok_(re.match(r'# Generated by gramex init 1\.\d+', line))

        # If Git LFS is present, ensure that it's set up to track assets/**
        if which('git-lfs'):
            path = os.path.join(self.app_dir, '.gitattributes')
            ok_(os.path.exists(path), 'Git LFS worked')
            with open(path, encoding='utf-8') as handle:
                ok_('assets/**' in handle.read(), 'Git LFS tracks assets/**')
            path = os.path.join(self.app_dir, '.gitignore')
            with open(path, encoding='utf-8') as handle:
                ok_('assets/**' not in handle.read(),
                    '.gitignore allows assets/**')
        # Else, check that .gitignore does not commit assets/**
        else:
            path = os.path.join(self.app_dir, '.gitignore')
            with open(path, encoding='utf-8') as handle:
                ok_('assets/**' in handle.read(),
                    '.gitignore allows assets/**')
Example #36
0
def add_git(fullpath):
    """Create a git repository at the project's root directory."""
    git_exe = which('git')
    if git_exe:
        output, error = Popen(
            ['git', 'init', fullpath],
            stdout=PIPE,
            stderr=PIPE
        ).communicate()

        if error:
            with open('git_error.log', 'w') as fd:
                fd.write(error.decode('utf-8'))
                print('Error with git init!')
                sys.exit(2)
        print('Adding a `.gitignore` file...')
        copyfile(
            os.path.join(script_dir, 'templates', '.gitignore'),
            os.path.join(fullpath, '.gitignore')
        )
    else:
        print('Could not find git. '
              'Ignoring the request for initializing '
              'a `.git` repository...')
Example #37
0
def skip_if_no_vagrant_executable():
    """Skip the test if you cannot find the vagrant executable"""
    if not shutilwhich.which('vagrant'):
        pytest.skip("No vagrant executable found")
Example #38
0
def flog_binary():
    return which('flogtool')
Example #39
0
def main(args):

    print("\nScaffolding...")

    # Variables #

    appname = args.appname
    fullpath = os.path.join(cwd, appname)
    skeleton_dir = args.skeleton

    # Tasks #

    # Copy files and folders
    print("Copying files and folders...")
    shutil.copytree(os.path.join(script_dir, skeleton_dir), fullpath)

    # Create config.py
    print("Creating the config...")
    secret_key = codecs.encode(os.urandom(32), 'hex').decode('utf-8')
    template = template_env.get_template('config.jinja2')
    template_var = {
        'secret_key': secret_key,
    }
    dir_path = os.path.join(fullpath, 'project')
    mkdir_p(dir_path)
    with open(os.path.join(dir_path, 'config.py'), 'w') as fd:
        fd.write(template.render(template_var))

    # Add bower dependencies
    # NOTE: make sure bower installed
    # npm install -g bower
    if args.bower:
        print("Adding bower dependencies...")
        bower = args.bower.split(',')
        bower_exe = which('bower')
        if bower_exe:
            dir_path = os.path.join(fullpath, 'project', 'client', 'static')
            mkdir_p(dir_path)
            os.chdir(dir_path)
            for dependency in bower:
                output, error = subprocess.Popen(
                    [bower_exe, 'install', dependency],
                    stdout=subprocess.PIPE,
                    stderr=subprocess.PIPE).communicate()
                if error:
                    print("An error occurred with Bower")
                    print(error)
        else:
            print("Could not find bower. Ignoring.")

    # Add a virtualenv
    virtualenv = args.virtualenv
    if virtualenv:
        print("Adding a virtualenv...")
        # NOTE: We should already be in an activated venv from the skeleton
        python_exe = "{}".format(which('python'))
        if python_exe:
            output, error = subprocess.Popen(
                [python_exe, "-m", "venv",
                 os.path.join(fullpath, 'env')],
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE).communicate()
            if error:
                with open('virtualenv_error.log', 'w') as fd:
                    fd.write(error.decode('utf-8'))
                    print("An error occurred with virtualenv")
                    sys.exit(2)
            venv_bin = os.path.join(fullpath, 'env/bin')
            output, error = subprocess.Popen(
                [
                    os.path.join(venv_bin, 'pip'), 'install', '-r',
                    os.path.join(fullpath, 'requirements.txt')
                ],
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE).communicate()
            if error:
                with open('pip_error.log', 'w') as fd:
                    fd.write(error.decode('utf-8'))
                    sys.exit(2)
        else:
            print("Could not find virtualenv executable. Ignoring")

    # Git init
    if args.git:
        print("Initializing Git...")
        output, error = subprocess.Popen(['git', 'init', fullpath],
                                         stdout=subprocess.PIPE,
                                         stderr=subprocess.PIPE).communicate()
        if error:
            with open('git_error.log', 'w') as fd:
                fd.write(error.decode('utf-8'))
                print("Error with git init")
                sys.exit(2)
        shutil.copyfile(os.path.join(script_dir, 'templates', '.gitignore'),
                        os.path.join(fullpath, '.gitignore'))
Example #40
0
    def _initialize(self):

        # Count number of plots to create:
        num_plots = 0
        for config in self._config.itervalues():
            num_plots += len(config)

        # Set default grid of plot positions:
        if not self._rows * self._cols == num_plots:
            self._cols = int(np.ceil(np.sqrt(num_plots)))
            self._rows = int(np.ceil(num_plots / float(self._cols)))
        self.f, self.axarr = plt.subplots(self._rows,
                                          self._cols,
                                          figsize=self._figsize)

        # Remove unused subplots:
        for i in xrange(num_plots, self._rows * self._cols):
            plt.delaxes(self.axarr[np.unravel_index(i,
                                                    (self._rows, self._cols))])
        cnt = 0
        self.handles = []
        self.types = []
        keywds = ['handle', 'ydata', 'fmt', 'type', 'ids', 'shape', 'norm']
        # TODO: Irregular grid in U will make the plot better
        U, V = np.mgrid[0:np.pi / 2:complex(0, 60), 0:2 * np.pi:complex(0, 60)]
        X = np.cos(V) * np.sin(U)
        Y = np.sin(V) * np.sin(U)
        Z = np.cos(U)
        self._dome_pos_flat = (X.flatten(), Y.flatten(), Z.flatten())
        self._dome_pos = (X, Y, Z)
        self._dome_arr_shape = X.shape
        if not isinstance(self.axarr, np.ndarray):
            self.axarr = np.asarray([self.axarr])
        for LPU, configs in self._config.iteritems():
            for plt_id, config in enumerate(configs):
                ind = np.unravel_index(cnt, self.axarr.shape)
                cnt += 1

                # Some plot types require specific numbers of
                # neuron ID arrays:
                if 'type' in config:
                    if config['type'] == 'quiver':
                        assert len(config['ids']) == 2
                        config['type'] = 0
                    elif config['type'] == 'hsv':
                        assert len(config['ids']) == 2
                        config['type'] = 1
                    elif config['type'] == 'image':
                        assert len(config['ids']) == 1
                        config['type'] = 2
                    elif config['type'] == 'waveform':
                        config['type'] = 3
                    elif config['type'] == 'raster':
                        config['type'] = 4
                    elif config['type'] == 'rate':
                        config['type'] = 5
                    elif config['type'] == 'dome':
                        config['type'] = 6
                    else:
                        raise ValueError('Plot type not supported')
                else:
                    if str(LPU).startswith(
                            'input') and not self._graph[LPU].node[str(
                                config['ids'][0][0])]['spiking']:
                        config['type'] = 2
                    else:
                        config['type'] = 4

                if config['type'] < 3:
                    if not 'shape' in config:

                        # XXX This can cause problems when the number
                        # of neurons is not equal to
                        # np.prod(config['shape'])
                        num_neurons = len(config['ids'][0])
                        config['shape'] = [int(np.ceil(np.sqrt(num_neurons)))]
                        config['shape'].append(
                            int(
                                np.ceil(num_neurons /
                                        float(config['shape'][0]))))

                if config['type'] == 0:
                    config['handle'] = self.axarr[ind].quiver(\
                               np.reshape(self._data[LPU][config['ids'][0],0],config['shape']),\
                               np.reshape(self._data[LPU][config['ids'][1],0],config['shape']))
                elif config['type'] == 1:
                    X = np.reshape(self._data[LPU][config['ids'][0], 0],
                                   config['shape'])
                    Y = np.reshape(self._data[LPU][config['ids'][1], 0],
                                   config['shape'])
                    V = (X**2 + Y**2)**0.5
                    H = (np.arctan2(X, Y) + np.pi) / (2 * np.pi)
                    S = np.ones_like(V)
                    HSV = np.dstack((H, S, V))
                    RGB = hsv_to_rgb(HSV)
                    config['handle'] = self.axarr[ind].imshow(RGB)
                elif config['type'] == 2:
                    if 'trans' in config:
                        if config['trans'] is True:
                            to_transpose = True
                        else:
                            to_transpose = False
                    else:
                        to_transpose = False
                        config['trans'] = False

                    if to_transpose:
                        temp = self.axarr[ind].imshow(np.transpose(np.reshape(\
                                self._data[LPU][config['ids'][0],0], config['shape'])))
                    else:
                        temp = self.axarr[ind].imshow(np.reshape(\
                                self._data[LPU][config['ids'][0],0], config['shape']))

                    temp.set_clim(self._imlim)
                    temp.set_cmap(plt.cm.gist_gray)
                    config['handle'] = temp
                elif config['type'] == 3:
                    fmt = config['fmt'] if 'fmt' in config else ''
                    self.axarr[ind].set_xlim(self._xlim)
                    self.axarr[ind].set_ylim(self._ylim)
                    if len(config['ids'][0]) == 1:
                        config['handle'] = self.axarr[ind].plot([0], \
                                            [self._data[LPU][config['ids'][0][0],0]], fmt)[0]
                        config['ydata'] = [
                            self._data[LPU][config['ids'][0][0], 0]
                        ]
                    else:
                        config['handle'] = self.axarr[ind].plot(
                            self._data[LPU][config['ids'][0], 0])[0]

                elif config['type'] == 4:
                    config['handle'] = self.axarr[ind]
                    config['handle'].vlines(0, 0, 0.01)
                    config['handle'].set_ylim([.5, len(config['ids'][0]) + .5])
                    config['handle'].set_ylabel('Neurons',
                                                fontsize=self._fontsize - 1,
                                                weight='bold')
                    config['handle'].set_xlabel('Time (s)',
                                                fontsize=self._fontsize - 1,
                                                weight='bold')
                    min_id = min(self._id_to_data_idx[LPU].keys())
                    min_idx = self._id_to_data_idx[LPU][min_id]
                    config['handle'].set_xlim(
                        [0, len(self._data[LPU][min_idx, :]) * self._dt])
                    config['handle'].axes.set_yticks([])
                    config['handle'].axes.set_xticks([])
                elif config['type'] == 6:
                    self.axarr[ind].axes.set_yticks([])
                    self.axarr[ind].axes.set_xticks([])
                    self.axarr[ind] = self.f.add_subplot(self._rows,
                                                         self._cols,
                                                         cnt,
                                                         projection='3d')
                    config['handle'] = self.axarr[ind]
                    config['handle'].axes.set_yticks([])
                    config['handle'].axes.set_xticks([])
                    config['handle'].xaxis.set_ticks([])
                    config['handle'].yaxis.set_ticks([])
                    config['handle'].zaxis.set_ticks([])
                    if 'norm' not in config.keys():
                        config['norm'] = Normalize(vmin=-70, vmax=0, clip=True)
                    elif config['norm'] == 'auto':
                        if self._data[LPU].shape[1] > 100:
                            config['norm'] = Normalize(
                                vmin=np.min(self._data[LPU][config['ids'][0],
                                                            100:]),
                                vmax=np.max(self._data[LPU][config['ids'][0],
                                                            100:]),
                                clip=True)
                        else:
                            config['norm'] = Normalize(
                                vmin=np.min(
                                    self._data[LPU][config['ids'][0], :]),
                                vmax=np.max(
                                    self._data[LPU][config['ids'][0], :]),
                                clip=True)

                    node_dict = self._graph[LPU].node
                    if str(LPU).startswith('input'):
                        latpositions = np.asarray([ node_dict[str(nid)]['lat'] \
                                                    for nid in range(len(node_dict)) \
                                                    if node_dict[str(nid)]['extern'] ])
                        longpositions = np.asarray([ node_dict[str(nid)]['long'] \
                                                     for nid in range(len(node_dict)) \
                                                     if node_dict[str(nid)]['extern'] ])
                    else:
                        latpositions = np.asarray([
                            node_dict[str(nid)]['lat']
                            for nid in config['ids'][0]
                        ])
                        longpositions = np.asarray([
                            node_dict[str(nid)]['long']
                            for nid in config['ids'][0]
                        ])
                    xx = np.cos(longpositions) * np.sin(latpositions)
                    yy = np.sin(longpositions) * np.sin(latpositions)
                    zz = np.cos(latpositions)
                    config['positions'] = (xx, yy, zz)
                    colors = griddata(config['positions'],
                                      self._data[LPU][config['ids'][0],
                                                      0], self._dome_pos_flat,
                                      'nearest').reshape(self._dome_arr_shape)
                    colors = config['norm'](colors).data
                    colors = np.tile(
                        np.reshape(colors, [
                            self._dome_arr_shape[0], self._dome_arr_shape[1], 1
                        ]), [1, 1, 4])
                    colors[:, :, 3] = 1.0
                    config['handle'].plot_surface(self._dome_pos[0],
                                                  self._dome_pos[1],
                                                  self._dome_pos[2],
                                                  rstride=1,
                                                  cstride=1,
                                                  facecolors=colors,
                                                  antialiased=False,
                                                  shade=False)

                for key in config.iterkeys():
                    if key not in keywds:
                        try:
                            self._set_wrapper(self.axarr[ind], key,
                                              config[key])
                        except:
                            pass
                        try:
                            self._set_wrapper(config['handle'], key,
                                              config[key])
                        except:
                            pass

                if config['type'] < 3:
                    config['handle'].axes.set_xticks([])
                    config['handle'].axes.set_yticks([])

            if self.suptitle is not None:
                self.f.suptitle(self._title,
                                fontsize=self._fontsize + 1,
                                x=0.5,
                                y=0.03,
                                weight='bold')

        plt.tight_layout()

        if self.out_filename:
            if self.FFMpeg is None:
                if which(matplotlib.rcParams['animation.ffmpeg_path']):
                    self.writer = FFMpegFileWriter(fps=self.fps,
                                                   codec=self.codec)
                elif which(matplotlib.rcParams['animation.avconv_path']):
                    self.writer = AVConvFileWriter(fps=self.fps,
                                                   codec=self.codec)
                else:
                    raise RuntimeError('cannot find ffmpeg or avconv')
            elif self.FFMpeg:
                if which(matplotlib.rcParams['animation.ffmpeg_path']):
                    self.writer = FFMpegFileWriter(fps=self.fps,
                                                   codec=self.codec)
                else:
                    raise RuntimeError('cannot find ffmpeg')
            else:
                if which(matplotlib.rcParams['animation.avconv_path']):
                    self.writer = AVConvFileWriter(fps=self.fps,
                                                   codec=self.codec)
                else:
                    raise RuntimeError('cannot find avconv')

            # Use the output file to determine the name of the temporary frame
            # files so that two concurrently run visualizations don't clobber
            # each other's frames:
            self.writer.setup(
                self.f,
                self.out_filename,
                dpi=80,
                frame_prefix=os.path.splitext(self.out_filename)[0] + '_')
            self.writer.frame_format = 'png'
            self.writer.grab_frame()
        else:
            self.f.show()
Example #41
0
def find():
    if sys.platform == "win32":
        return which('7z.exe') or which('7za.exe') or which('bin/7za.exe')
    else:
        # I don't know if this works.
        return which('7z') or which('7za') or which('bin/7za')
Example #42
0
    def _initialize(self):

        # Count number of plots to create:
        num_plots = 0
        for config in self._config.itervalues():
            num_plots += len(config)

        # Set default grid of plot positions:
        if not self._rows*self._cols == num_plots:
            self._cols = int(np.ceil(np.sqrt(num_plots)))
            self._rows = int(np.ceil(num_plots/float(self._cols)))
        self.f, self.axarr = plt.subplots(self._rows, self._cols,
                                          figsize=self._figsize)

        # Remove unused subplots:
        for i in xrange(num_plots, self._rows*self._cols):
            plt.delaxes(self.axarr[np.unravel_index(i, (self._rows, self._cols))])
        cnt = 0
        self.handles = []
        self.types = []
        keywds = ['handle', 'ydata', 'fmt', 'type', 'ids', 'shape', 'norm']
        # TODO: Irregular grid in U will make the plot better
        U, V = np.mgrid[0:np.pi/2:complex(0, 60),
                        0:2*np.pi:complex(0, 60)]
        X = np.cos(V)*np.sin(U)
        Y = np.sin(V)*np.sin(U)
        Z = np.cos(U)
        self._dome_pos_flat = (X.flatten(), Y.flatten(), Z.flatten())
        self._dome_pos = (X, Y, Z)
        self._dome_arr_shape = X.shape
        if not isinstance(self.axarr, np.ndarray):
            self.axarr = np.asarray([self.axarr])
        for LPU, configs in self._config.iteritems():
            for plt_id, config in enumerate(configs):
                ind = np.unravel_index(cnt, self.axarr.shape)
                cnt+=1

                # Some plot types require specific numbers of
                # neuron ID arrays:
                if 'type' in config:
                    if config['type'] == 'quiver':
                        assert len(config['ids'])==2
                        config['type'] = 0
                    elif config['type'] == 'hsv':
                        assert len(config['ids'])==2
                        config['type'] = 1
                    elif config['type'] == 'image':
                        assert len(config['ids'])==1
                        config['type'] = 2
                    elif config['type'] == 'waveform':
                        config['type'] = 3
                    elif config['type'] == 'raster':
                        config['type'] = 4
                    elif config['type'] == 'rate':
                        config['type'] = 5
                    elif config['type'] == 'dome':
                        config['type'] = 6
                    else:
                        raise ValueError('Plot type not supported')
                else:
                    if str(LPU).startswith('input') and not self._graph[LPU].node[str(config['ids'][0][0])]['spiking']:
                        config['type'] = 2
                    else:
                        config['type'] = 4

                if config['type'] < 3:
                    if not 'shape' in config:

                        # XXX This can cause problems when the number
                        # of neurons is not equal to
                        # np.prod(config['shape'])
                        num_neurons = len(config['ids'][0])
                        config['shape'] = [int(np.ceil(np.sqrt(num_neurons)))]
                        config['shape'].append(int(np.ceil(num_neurons/float(config['shape'][0]))))

                if config['type'] == 0:
                    config['handle'] = self.axarr[ind].quiver(\
                               np.reshape(self._data[LPU][config['ids'][0],0],config['shape']),\
                               np.reshape(self._data[LPU][config['ids'][1],0],config['shape']))
                elif config['type'] == 1:
                    X = np.reshape(self._data[LPU][config['ids'][0],0],config['shape'])
                    Y = np.reshape(self._data[LPU][config['ids'][1],0],config['shape'])
                    V = (X**2 + Y**2)**0.5
                    H = (np.arctan2(X,Y)+np.pi)/(2*np.pi)
                    S = np.ones_like(V)
                    HSV = np.dstack((H,S,V))
                    RGB = hsv_to_rgb(HSV)
                    config['handle'] = self.axarr[ind].imshow(RGB)
                elif config['type'] == 2:
                    if 'trans' in config:
                        if config['trans'] is True:
                            to_transpose = True
                        else:
                            to_transpose = False
                    else:
                        to_transpose = False
                        config['trans'] = False

                    if to_transpose:
                        temp = self.axarr[ind].imshow(np.transpose(np.reshape(\
                                self._data[LPU][config['ids'][0],0], config['shape'])))
                    else:
                        temp = self.axarr[ind].imshow(np.reshape(\
                                self._data[LPU][config['ids'][0],0], config['shape']))



                    temp.set_clim(self._imlim)
                    temp.set_cmap(plt.cm.gist_gray)
                    config['handle'] = temp
                elif config['type'] == 3:
                    fmt = config['fmt'] if 'fmt' in config else '' 
                    self.axarr[ind].set_xlim(self._xlim)
                    self.axarr[ind].set_ylim(self._ylim)
                    if len(config['ids'][0])==1:
                        config['handle'] = self.axarr[ind].plot([0], \
                                            [self._data[LPU][config['ids'][0][0],0]], fmt)[0]
                        config['ydata'] = [self._data[LPU][config['ids'][0][0],0]]
                    else:
                        config['handle'] = self.axarr[ind].plot(self._data[LPU][config['ids'][0],0])[0]

                elif config['type'] == 4:
                    config['handle'] = self.axarr[ind]
                    config['handle'].vlines(0, 0, 0.01)
                    config['handle'].set_ylim([.5, len(config['ids'][0]) + .5])
                    config['handle'].set_ylabel('Neurons',
                                                fontsize=self._fontsize-1, weight='bold')
                    config['handle'].set_xlabel('Time (s)',fontsize=self._fontsize-1, weight='bold')
                    min_id = min(self._id_to_data_idx[LPU].keys())
                    min_idx = self._id_to_data_idx[LPU][min_id]
                    config['handle'].set_xlim([0,len(self._data[LPU][min_idx,:])*self._dt])
                    config['handle'].axes.set_yticks([])
                    config['handle'].axes.set_xticks([])
                elif config['type'] == 6:
                    self.axarr[ind].axes.set_yticks([])
                    self.axarr[ind].axes.set_xticks([])
                    self.axarr[ind] = self.f.add_subplot(self._rows,
                                                         self._cols,
                                                         cnt,
                                                         projection='3d')
                    config['handle' ] = self.axarr[ind]
                    config['handle'].axes.set_yticks([])
                    config['handle'].axes.set_xticks([])
                    config['handle'].xaxis.set_ticks([])
                    config['handle'].yaxis.set_ticks([])
                    config['handle'].zaxis.set_ticks([])
                    if 'norm' not in config.keys():
                        config['norm'] = Normalize(vmin=-70, vmax=0, clip=True)
                    elif config['norm'] == 'auto':
                        if self._data[LPU].shape[1] > 100:
                            config['norm'] = Normalize(vmin = np.min(self._data[LPU][config['ids'][0],100:]),
                                                       vmax = np.max(self._data[LPU][config['ids'][0],100:]),
                                                       clip = True)
                        else:
                            config['norm'] = Normalize(vmin = np.min(self._data[LPU][config['ids'][0],:]),
                                                       vmax = np.max(self._data[LPU][config['ids'][0],:]),
                                                       clip = True)
                            
                    node_dict = self._graph[LPU].node
                    if str(LPU).startswith('input'):
                        latpositions = np.asarray([ node_dict[str(nid)]['lat'] \
                                                    for nid in range(len(node_dict)) \
                                                    if node_dict[str(nid)]['extern'] ])
                        longpositions = np.asarray([ node_dict[str(nid)]['long'] \
                                                     for nid in range(len(node_dict)) \
                                                     if node_dict[str(nid)]['extern'] ])
                    else:
                        latpositions = np.asarray([ node_dict[str(nid)]['lat']
                                                    for nid in config['ids'][0] ])
                        longpositions = np.asarray([ node_dict[str(nid)]['long']
                                                     for nid in config['ids'][0] ])
                    xx = np.cos(longpositions) * np.sin(latpositions)
                    yy = np.sin(longpositions) * np.sin(latpositions)
                    zz = np.cos(latpositions)
                    config['positions'] = (xx, yy, zz)
                    colors = griddata(config['positions'], self._data[LPU][config['ids'][0],0],
                                      self._dome_pos_flat, 'nearest').reshape(self._dome_arr_shape)
                    colors = config['norm'](colors).data
                    colors = np.tile(np.reshape(colors,
                                                [self._dome_arr_shape[0],self._dome_arr_shape[1],1])
                                     ,[1,1,4])
                    colors[:,:,3] = 1.0
                    config['handle'].plot_surface(self._dome_pos[0], self._dome_pos[1],
                                                  self._dome_pos[2], rstride=1, cstride=1,
                                                  facecolors=colors, antialiased=False,
                                                  shade=False)
                    
                for key in config.iterkeys():
                    if key not in keywds:
                        try:
                            self._set_wrapper(self.axarr[ind],key, config[key])
                        except:
                            pass
                        try:
                            self._set_wrapper(config['handle'],key, config[key])
                        except:
                            pass
                
                if config['type']<3:
                    config['handle'].axes.set_xticks([])
                    config['handle'].axes.set_yticks([])

            if self.suptitle is not None:
                self.f.suptitle(self._title, fontsize=self._fontsize+1, x=0.5,y=0.03, weight='bold')

        plt.tight_layout()

        if self.out_filename:
            if self.FFMpeg is None:
                if which(matplotlib.rcParams['animation.ffmpeg_path']):
                    self.writer = FFMpegFileWriter(fps=self.fps, codec=self.codec)
                elif which(matplotlib.rcParams['animation.avconv_path']):
                    self.writer = AVConvFileWriter(fps=self.fps, codec=self.codec)
                else:
                    raise RuntimeError('cannot find ffmpeg or avconv')
            elif self.FFMpeg:
                if which(matplotlib.rcParams['animation.ffmpeg_path']):
                    self.writer = FFMpegFileWriter(fps=self.fps, codec=self.codec)
                else:
                    raise RuntimeError('cannot find ffmpeg')
            else:
                if which(matplotlib.rcParams['animation.avconv_path']):
                    self.writer = AVConvFileWriter(fps=self.fps, codec=self.codec)
                else:
                    raise RuntimeError('cannot find avconv')

            # Use the output file to determine the name of the temporary frame
            # files so that two concurrently run visualizations don't clobber
            # each other's frames:
            self.writer.setup(self.f, self.out_filename, dpi=80,
                              frame_prefix=os.path.splitext(self.out_filename)[0]+'_')
            self.writer.frame_format = 'png'
            self.writer.grab_frame()
        else:
            self.f.show()
Example #43
0
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfdocument import PDFDocument
from pdfminer.converter import PDFPageAggregator
from pdfminer.pdfinterp import PDFResourceManager
from pdfminer.pdfinterp import PDFPageInterpreter
from pdfminer.high_level import extract_text_to_fp
from shutilwhich import which
from tornado.web import create_signed_value
import gramex
from gramex.handlers import Capture
from . import TestGramex, server

_captures = {}
paths = {'phantomjs': which('phantomjs'), 'node': which('node')}


def get_capture(name, **kwargs):
    '''Return a cached Capture() object constructed with kwargs'''
    if name in _captures:
        return _captures[name]
    capture = _captures[name] = Capture(**kwargs)
    end_time, delay = time.time() + 10, 0.05
    logging.info('Waiting for capture.js...')
    while not capture.started:
        if time.time() > end_time:
            raise RuntimeError('capture.js took too long to start')
        time.sleep(delay)
    return capture
Example #44
0
def setup_workdir(workdir=".", overwrite_dir=False, backup_config=True):
    """
    Setup execution environment and dump default configuration file

    Parameters
    ----------
    workdir: str
        The directory to store the created dirs and files.

    require_empty: bool
        when set True, raise RuntimeError if the workdir is not empty

    backup_config: bool
        when not require_empty, this controls whether the files are
        get backed up when an overwriting is about to occur.
    """
    logger = logging.getLogger(name="setup")

    logdir = 'logs'  # stores logs for the apus pipelines
    externdir = 'extern'  # stores external dependencies
    tmpdir = 'tmp'  # stores temporary files
    configfile = DEFAULT_CONFIG_FILE  # main configuration file

    # validate the workdir
    workdir_content = os.listdir(workdir)
    if workdir_content:
        if not overwrite_dir:
            # skip setup when all files are present
            if all(d in workdir_content
                   for d in [logdir, externdir, tmpdir, configfile]):
                raise RuntimeError(
                    "it seems that the workdir has been setup already. "
                    "Re-run with -f to proceed anyway")
            else:
                raise RuntimeError(
                    "the workdir is not empty, re-run with -f to proceed "
                    "anyway")
        else:
            logger.warning(
                "the workdir is not empty but a forced setup is requested")
    logger.info("setup workdir {}".format(workdir))

    # external dependencies
    externdir = os.path.join(workdir, externdir)
    which_path = os.path.abspath(externdir) + ':' + os.environ['PATH']
    if os.path.isdir(externdir):
        logger.warning("use existing extern dir {}".format(externdir))
    else:
        os.makedirs(externdir)
        logger.info("create extern dir {}".format(externdir))
    logger.info("check external dependencies")
    astromatic_prefix = []
    for name, cmds, datadir in [("SExtractor", ("sex",
                                                'ldactoasc'), "sextractor"),
                                ("SCAMP", ("scamp", ), "scamp"),
                                ("SWarp", ("swarp", ), "swarp")]:
        cmds = [which(cmd, path=which_path) for cmd in cmds]
        if any(c is None for c in cmds):
            raise RuntimeError("not able to locate {}".format(name))
        prefix = os.path.normpath(os.path.join(os.path.dirname(cmds[0]),
                                               '../'))
        datadir = os.path.join(prefix, "share", datadir)
        if not os.path.exists(datadir):
            raise RuntimeError(
                "not able to locate data files for {0}. It is likely that {0} "
                "is compiled within the source directory but without proper "
                "installation. To resolve the issue, either run `make install`"
                " in the source directory, or manually link the data "
                "directory to {1}.".format(name, datadir))
        logger.info("{0:10s} ... OK".format(name))
        astromatic_prefix.append(prefix)
    if len(set(astromatic_prefix)) > 1:
        raise RuntimeError(
            "it seems that the SExtractor, SCAMP and SWarp are not installed "
            "into the same prefix. {app_name} for now does not deal with this "
            "situation. Try to re-configure SExtractor, SCAMP and SWarp with "
            "--prefix=<prefixpath>".format(app_name=APP_NAME))
    astromatic_prefix = os.path.normpath(astromatic_prefix[0])
    logger.info("use shared astromatic prefix {}".format(astromatic_prefix))
    stilts_cmd = which("stilts", path=which_path)
    if stilts_cmd is None:
        logger.warning("not able to find stilts. Get from internet")
        # retrieve stilts
        from astropy.utils.data import download_file
        stilts_jar_tmp = download_file(
            "http://www.star.bris.ac.uk/%7Embt/stilts/stilts.jar", cache=True)
        stilts_jar = os.path.join(externdir, 'stilts.jar')
        shutil.copyfile(stilts_jar_tmp, stilts_jar)
        stilts_cmd = os.path.join(externdir, 'stilts')
        with open(stilts_cmd, 'w') as fo:
            fo.write("""#!/bin/sh
java -Xmx4000M -classpath "{0}:$CLASSPATH" uk.ac.starlink.ttools.Stilts "$@"
""".format(os.path.abspath(stilts_jar)))
        os.chmod(stilts_cmd, os.stat(stilts_cmd).st_mode | stat.S_IEXEC)
    logger.info("{0:10s} ... OK".format("stilts"))
    logger.info("use stilts {}".format(stilts_cmd))
    funpack_cmd = which("funpack", path=which_path)
    if funpack_cmd is None:
        logger.warning("not able to find funpack. Get from internet")
        # retrieve stilts
        from astropy.utils.data import download_file
        funpack_tmp = download_file(
            "http://heasarc.gsfc.nasa.gov/FTP/software/fitsio/c/"
            "cfitsio_latest.tar.gz",
            cache=True)
        funpack_src = os.path.join(externdir, 'cfitsio')
        import tarfile
        with tarfile.open(funpack_tmp) as tar:
            tar.extractall(path=externdir)
        logger.warning("try compiling funpack")
        import subprocess
        try:
            for command in [
                    './configure', 'make', 'make fpack', 'make funpack'
            ]:
                subprocess.check_call(command.split(), cwd=funpack_src)
        except subprocess.CalledProcessError:
            raise RuntimeError("unable to compile funpack")
        funpack_cmd = os.path.join(externdir, 'funpack')
        shutil.copy(os.path.join(externdir, 'cfitsio', 'funpack'), funpack_cmd)
    logger.info("{0:10s} ... OK".format("funpack"))
    logger.info("use funpack {}".format(funpack_cmd))

    # create logging directory
    logdir = os.path.join(workdir, logdir)
    if os.path.isdir(logdir):
        logger.warning("use existing log dir {}".format(logdir))
    else:
        os.makedirs(logdir)
        logger.info("create log dir {}".format(logdir))

    # setup scratch space
    tmpdir = os.path.join(workdir, tmpdir)
    # def freespace_GiB(path):
    #     stat = os.statvfs(path)
    #     return stat.f_bfree * stat.f_frsize / 1024 ** 3

    # logger.info("{:.2f} GiB free in {}".format(
    #     freespace_GiB(scratch_dir), scratch_dir))
    if os.path.isdir(tmpdir):
        logger.warning("use existing tmp dir {}".format(tmpdir))
    else:
        os.makedirs(tmpdir)
        logger.info("create tmp dir {}".format(tmpdir))

    # dump default config
    time_fmt = "%b-%d-%Y_%H-%M-%S"
    base_fmt = ("{obsid}_{object}_{instru}_{band}")
    config = """## config file of {app_name:s}
## {time:s}

# calib setting
phot_model_flags: 'color,chip,expo'

# qa inputs
qa_headers:
    odi: [
        'OBSID', 'CRVAL1', 'CRVAL2', 'OBJECT', 'EXPMEAS', 'AIRMASS',
        'SEEING', 'SKY_MEDI', 'SKY_STD',
        'FILTER', 'INSTRUME', 'MJD-OBS'
        ]
    decam: [
        'OBSID', 'CRVAL1', 'CRVAL2', 'OBJECT', 'EXPTIME', 'AIRMASS',
        'FWHM',  'AVSKY', 'SKYSIGMA',
        'FILTER', 'INSTRUME', 'MJD-OBS'
    ]

# naming
reg_arch: '{reg_arch:s}'  # regex to parse the filenames from data archive
fmt_orig: '{{imflag}}{base_fmt}.{{ext}}'
reg_orig: '{reg_orig:s}'  # regex to parse the filenames in job-in dir

reg_inputs: '{reg_inputs:s}'  # regex to parse the filenames in jobdir
fmt_inputs: 'orig_{base_fmt}.{{ext}}'  # format string for image in jobdir
sel_inputs: ['orig_*_?.fits', 'orig_*_?.fits.fz']

fmt_masked: 'masked_{base_fmt:s}.fits'  # format string of masked images
sel_masked: 'masked_*_?.fits'   # selection string of masked images

fmt_selected: '{{ppflag}}{{imflag}}_{base_fmt:s}.fits'
sel_fcomb: 'fcomb[0-9]*_masked_*.fits'
fmt_objcat: '{{ppflag}}objcat_{base_fmt:s}.cat'  # format of object catalog
fmt_objmask: '{{ppflag}}objmask_{base_fmt:s}.fits'  # format of object masks
sel_objmask: 'fcomb[0-9]*_objmask_*.fits'   # selection string of object masks
fmt_sky: '{{ppflag}}sky_{base_fmt:s}.fits'  # format string of sky images
fmt_fcomb: '{{ppflag}}combined.fits'
reg_fcomb: '{reg_fcomb:s}'  # regex to parse grouped master
fmt_fsmooth: '{{ppflag}}smoothed.fits'

sel_fsub: 'fsub[0-9]*_masked_*.fits'
fmt_fsub_fsmooth: 'fcomb{{grpid}}_smoothed.fits'  # fmt for subtract fcomb
fmt_fsub: 'fsub_{base_fmt:s}.fits'

sel_fsubed: 'fsub_*.fits'
sel_phot: 'phot[0-9]*_*_*.fits'
fmt_photcat: '{{ppflag}}{{imflag}}_{base_fmt:s}.cat'
fmt_photcat_cleaned: '{{ppflag}}{{imflag}}_{base_fmt:s}.cln.cat'
fmt_photcat_matched: '{{ppflag}}{{imflag}}_{base_fmt:s}.zp.cat'
fmt_phot: '{{ppflag}}{{instru}}_{{band}}.cat'
reg_phot: '{reg_phot:s}'  # regex to parse grouped phot master
phot_hdr_suffix: 'hdr_phot'
phot_hdr_glob: 'phot[0-9]*_*{{obsid}}*{{instru}}_{{band}}.hdr_phot'

sel_mosaic: 'mosaic[0-9]*_*_*.fits'
fmt_mosaic_orig: 'swarp{{grpid}}_{{imflag}}_{{instru}}_{{band}}.fits'
fmt_mosaic_hdr: 'coadd{{grpid}}.mosaic'
fmt_mosaic: 'coadd{{grpid}}_{{imflag}}_{{instru}}_{{band}}.fits'
fmt_mosaic_wht: 'coadd{{grpid}}_{{imflag}}_{{instru}}_{{band}}.wht.fits'
reg_mosaic: {reg_mosaic:s}
reg_mosaic_fits: {reg_mosaic_fits:s}
fmt_msccat_matched: 'coadd{{grpid}}{{imflag}}_{{instru}}_{{band}}.zp.cat'

reg_grp: '{reg_grp:s}'  # regex to parse grouped images
fmt_grp: '{{ppflag}}{{grpid}}_{{imflag}}_{base_fmt:s}.fits'


# environ
workdir: {workdir}
tmpdir: {tmpdir}
logdir: {logdir}
astromatic_prefix: {astromatic_prefix}
stilts_cmd: {stilts_cmd}
funpack_cmd: {funpack_cmd}
""".format(
        app_name=APP_NAME,
        time=datetime.now().strftime(time_fmt),
        version="0.0",
        reg_arch=r'(?P<imflag>[^_/]+_)?'
        r'(?P<obsid>[^_/]*20\d{6}[Tt]\d{6}(?:\.\d)?)'
        r'_(?P<object>.+?)'
        r'_(?P<instru>odi|decam)_(?P<band>[ugrizY])(?:_.+)?\.'
        r'(?P<ext>fits|fits\.fz)$',
        reg_orig=r'(?P<ppflag>[^_/]+_)?(?P<imflag>[^_/]+)_'
        r'(?P<obsid>[^_/]*20\d{6}[Tt]\d{6}(?:\.\d)?)'
        r'_(?P<object>.+?)'
        r'_(?P<instru>odi|decam)_(?P<band>[ugrizY])'
        r'\.(?P<ext>fits|fits\.fz)$',
        reg_inputs=r'(?P<ppflag>[^_/]+_)?(?P<imflag>[^_/]+)_'
        r'(?P<obsid>[^_/]*20\d{6}[Tt]\d{6}(?:\.\d)?)'
        r'_(?P<object>.+?)'
        r'_(?P<instru>odi|decam)_(?P<band>[ugrizY])'
        r'\.(?P<ext>[^/]+)$',
        reg_fcomb=r'(?P<ppflag>[^_/]+_)(?P<imflag>[^/]+)\.fits',
        reg_phot=r'(?P<ppflag>[^_/]+)'
        r'_(?P<instru>odi|decam)_(?P<band>[ugrizY])'
        r'\.(?P<ext>[^/]+)',
        reg_mosaic=r'(?P<ppflag>[a-z]+)(?P<grpid>\d+)_(?P<imflag>[^_/]+)'
        r'_(?P<instru>odi|decam)_(?P<band>[ugrizY])'
        r'\.(?P<ext>[^/]+)$',
        reg_mosaic_fits=r'(?P<ppflag>[a-z]+)(?P<grpid>\d+)'
        r'_(?P<imflag>[^_/]+)'
        r'_(?P<instru>odi|decam)_(?P<band>[ugrizY])'
        r'\.fits$',
        reg_grp=r'(?P<ppflag>[a-z]+)(?P<grpid>\d+)_(?P<imflag>[^_/]+)_'
        r'(?P<obsid>[^_/]*20\d{6}[Tt]\d{6}(?:\.\d)?)'
        r'_(?P<object>.+?)'
        r'_(?P<instru>odi|decam)_(?P<band>[ugrizY])'
        r'\.(?P<ext>[^/]+)$',
        # reg_inputs=r'(?P<ppflag>[^_/]+_)?(?P<imflag>[^_/]+)_'
        #            r'(?P<obsid>20\d{6}T\d{6}\.\d)_(?P<object>.+?)'
        #            r'_odi_(?P<band>[ugriz])'
        #            r'_(?P<featgrp>\d+)_(?P<photgrp>\d+)_(?P<mscgrp>\d+)'
        #            r'\.(?P<ext>fits|fits\.fz)$',
        **locals())
    configfile = os.path.join(workdir, configfile)
    if os.path.exists(configfile):
        if backup_config:
            timestamp = datetime.fromtimestamp(
                os.path.getmtime(configfile)).strftime(time_fmt)
            bakfile = os.path.join(
                workdir, "{1}_{0}{2}".format(
                    timestamp,
                    *os.path.splitext(os.path.basename(configfile))))
            logger.warning("backup existing config file to {}".format(bakfile))
            os.rename(configfile, bakfile)
        else:
            logger.warning(
                "overwrite existing config file {}".format(configfile))
    with open(configfile, 'w') as fo:
        fo.write(config)
Example #45
0
 def is_available(self):
     return bool(which(self.pdflatex))
	def is_installed(self):
		return not shutilwhich.which('subl') == None
def main(args):

    print("\nScaffolding...")

    # Variables #

    appname = args.appname
    fullpath = os.path.join(cwd, appname)
    skeleton_dir = args.skeleton

    # Tasks #

    # Copy files and folders
    print("Copying files and folders...")
    shutil.copytree(os.path.join(script_dir, skeleton_dir), fullpath)

    # Create config.py
    print("Creating the config...")
    secret_key = codecs.encode(os.urandom(32), 'hex').decode('utf-8')
    template = template_env.get_template('config.jinja2')
    template_var = {
        'secret_key': secret_key,
    }
    with open(os.path.join(fullpath, 'project', 'config.py'), 'w') as fd:
        fd.write(template.render(template_var))

    # Add bower dependencies
    if args.bower:
        print("Adding bower dependencies...")
        bower = args.bower.split(',')
        bower_exe = which('bower')
        if bower_exe:
            os.chdir(os.path.join(fullpath, 'project', 'client', 'static'))
            for dependency in bower:
                output, error = subprocess.Popen(
                    [bower_exe, 'install', dependency],
                    stdout=subprocess.PIPE,
                    stderr=subprocess.PIPE
                ).communicate()
                if error:
                    print("An error occurred with Bower")
                    print(error)
        else:
            print("Could not find bower. Ignoring.")

    # Add a virtualenv
    virtualenv = args.virtualenv
    if virtualenv:
        print("Adding a virtualenv...")
        virtualenv_exe = which('pyvenv')
        if virtualenv_exe:
            output, error = subprocess.Popen(
                [virtualenv_exe, os.path.join(fullpath, 'env')],
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE
            ).communicate()
            if error:
                with open('virtualenv_error.log', 'w') as fd:
                    fd.write(error.decode('utf-8'))
                    print("An error occurred with virtualenv")
                    sys.exit(2)
            venv_bin = os.path.join(fullpath, 'env/bin')
            output, error = subprocess.Popen(
                [
                    os.path.join(venv_bin, 'pip'),
                    'install',
                    '-r',
                    os.path.join(fullpath, 'requirements.txt')
                ],
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE
            ).communicate()
            if error:
                with open('pip_error.log', 'w') as fd:
                    fd.write(error.decode('utf-8'))
                    sys.exit(2)
        else:
            print("Could not find virtualenv executable. Ignoring")

    # Git init
    if args.git:
        print("Initializing Git...")
        output, error = subprocess.Popen(
            ['git', 'init', fullpath],
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE
        ).communicate()
        if error:
            with open('git_error.log', 'w') as fd:
                fd.write(error.decode('utf-8'))
                print("Error with git init")
                sys.exit(2)
        shutil.copyfile(
            os.path.join(script_dir, 'templates', '.gitignore'),
            os.path.join(fullpath, '.gitignore')
        )
Example #48
0
def main(argv):

    # Arguments #

    parser = argparse.ArgumentParser(description='Scaffold a Flask Skeleton.')
    parser.add_argument('appname', help='The application name')
    parser.add_argument('-s', '--skeleton', help='The skeleton folder to use.')
    parser.add_argument('-b', '--bower', help='Install dependencies via bower')
    parser.add_argument('-v', '--virtualenv', action='store_true')
    parser.add_argument('-g', '--git', action='store_true')
    args = parser.parse_args()

    # Variables #

    appname = args.appname
    fullpath = os.path.join(cwd, appname)
    skeleton_dir = args.skeleton

    # Tasks #

    # Copy files and folders
    shutil.copytree(os.path.join(script_dir, skeleton_dir), fullpath)

    # Create config.py
    template = template_env.get_template('config.jinja2')
    template_var = {
        'secret_key': secret_key,
    }
    with open(os.path.join(fullpath, 'project', 'config.py'), 'w') as fd:
        fd.write(template.render(template_var))

    # Add bower dependencies
    if args.bower:
        bower = args.bower.split(',')
        bower_exe = shutil.which('bower')
        if bower_exe:
            os.chdir(os.path.join(fullpath, 'project', 'static'))
            for dependency in bower:
                output, error = subprocess.Popen(
                    [bower_exe, 'install', dependency],
                    stdout=subprocess.PIPE,
                    stderr=subprocess.PIPE
                ).communicate()
                # print(output)
                if error:
                    print("An error occurred with Bower")
                    print(error)
        else:
            print("Could not find bower. Ignoring.")

    # Add a virtualenv
    virtualenv = args.virtualenv
    if virtualenv:
        virtualenv_exe = which('virtualenv')
        if virtualenv_exe:
            output, error = subprocess.Popen(
                [virtualenv_exe, os.path.join(fullpath, 'env')],
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE
            ).communicate()
            if error:
                with open('virtualenv_error.log', 'w') as fd:
                    fd.write(error.decode('utf-8'))
                    print("An error occurred with virtualenv")
                    sys.exit(2)
            venv_bin = os.path.join(fullpath, 'env/bin')
            output, error = subprocess.Popen(
                [
                    os.path.join(venv_bin, 'pip'),
                    'install',
                    '-r',
                    os.path.join(fullpath, 'requirements.txt')
                ],
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE
            ).communicate()
            if error:
                with open('pip_error.log', 'w') as fd:
                    fd.write(error.decode('utf-8'))
                    sys.exit(2)
        else:
            print("Could not find virtualenv executable. Ignoring")

    # Git init
    if args.git:
        output, error = subprocess.Popen(
            ['git', 'init', fullpath],
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE
        ).communicate()
        if error:
            with open('git_error.log', 'w') as fd:
                fd.write(error.decode('utf-8'))
                print("Error with git init")
                sys.exit(2)
        shutil.copyfile(
            os.path.join(script_dir, 'templates', '.gitignore'),
            os.path.join(fullpath, '.gitignore')
        )
Example #49
0
	def is_installed(self):
		return not shutilwhich.which(self.app_name) == None
Example #50
0
def is_installed(b):
    ''' Returns true if an executable named b exists in the current path.
        b may also be a list of binaries.
    '''
    blist = b if isinstance(b, list) else [b,]
    return all([which(b) for b in blist])