Ejemplo n.º 1
0
 def __init__(self, queue, condition, dbpath):
     threading.Thread.__init__(self, name="FiledataThread")
     self.queue = queue
     self.condition = condition
     self.dbpath = dbpath
     self.running = True
     self.soxi = which('soxi')
     self.sox = which('sox')
     self.soundstretch = which('soundstretch')
    def run_mongo_restore(self, restore, destination_uri, dump_dir, source_database_name,
                          log_file_name, dump_log_file_name,
                          exclude_system_users=None,
                          exclude_admin_system_users=None,
                          exclude_system_roles=None,
                          options=None):

        if source_database_name:
            source_dir = os.path.join(dump_dir, source_database_name)
        else:
            source_dir = dump_dir

        workspace = self.get_task_workspace_dir(restore)
        # IMPORTANT delete dump log file so the restore command would not break
        dump_log_path = os.path.join(workspace, dump_dir, dump_log_file_name)
        if os.path.exists(dump_log_path):
            os.remove(dump_log_path)

        if exclude_system_users:
            self._delete_system_users_from_dump(restore, source_dir)

        if exclude_admin_system_users:
            self._delete_admin_system_users_from_dump(restore, source_dir)

        if exclude_system_roles:
            self._delete_roles_from_dump(restore, source_dir)

        working_dir = workspace
        log_path = os.path.join(workspace, log_file_name)

        restore_cmd = [
            which("mongoctl"),
            "restore",
            destination_uri,
            source_dir
        ]

        if options:
            restore_cmd.extend(options)

        restore_cmd_display = restore_cmd[:]

        restore_cmd_display[restore_cmd_display.index("restore") + 1] = mask_mongo_uri(destination_uri)

        logger.info("Running mongoctl restore command: %s" %
                    " ".join(restore_cmd_display))

        returncode = execute_command_wrapper(restore_cmd,
                                             output_path=log_path,
                                             cwd=working_dir)

        # read the last dump log line
        last_line_tail_cmd = [which('tail'), '-1', log_path]
        last_log_line = execute_command(last_line_tail_cmd)

        if returncode:
            raise RestoreError(returncode, last_log_line)
Ejemplo n.º 3
0
def archlinux_env():
    if utils.which('yaourt'):
        pacman = 'yaourt'
    elif utils.which('pacman'):
        pacman = 'sudo pacman'
    else:
        return False, None

    enabled_by_default = utils.which('pkgfile')

    return enabled_by_default, pacman
Ejemplo n.º 4
0
def serveur():
    serveur_menu = menu.Menu()
    serveur_menu_selection = serveur_menu.show(
        {
            utils.SERVEUR_APACHE: u'Apache',
            utils.SERVEUR_LIGHTTPD: u'Lighttpd'
        }, u'Menu installation serveur')

    if serveur_menu_selection == utils.SERVEUR_APACHE:
        if utils.which('apache2') is None:
            print(u'_____ Installation d''apache _____')
            os.system('apt-get -y install apache2')
        else:
            print(u'Apache est déjà installé')
    elif serveur_menu_selection == utils.SERVEUR_LIGHTTPD:
        if utils.which('lighttpd') is None:
            print(u'_____ Installation de lighttpd _____')
            os.system('apt-get -y install lighttpd')

            f = open('/etc/php5/cgi/php.ini', 'r')
            file_data = f.read()
            f.close()

            new_data = file_data.replace(';cgi.fix_pathinfo=1', 'cgi.fix_pathinfo=1')

            f = open('/etc/php5/cgi/php.ini', 'w')
            f.write(new_data)
            f.close()

            os.system('lighttpd-enable-mod fastcgi')
            os.system('lighttpd-enable-mod fastcgi-php')
            os.system('/etc/init.d/lighttpd force-reload')
            os.system('/bin/echo \'## directory listing configuration## we disable the directory listing by default##$HTTP["url"] =~ "^/" {  dir-listing.activate = "disable"}\' | /usr/bin/tee /etc/lighttpd/conf-available/20-disable-listing.conf')
            os.system('/usr/sbin/lighty-enable-mod disable-listing')
            os.system('/etc/init.d/lighttpd force-reload')

            f = open('/etc/lighttpd/lighttpd.conf', 'r')
            file_data = f.read()
            f.close()

            new_data = file_data.replace('#       "mod_rewrite"', '        "mod_rewrite"')
            new_data = new_data + '\r\n server.error-handler-404 = "/index.php?error=404"'

            f = open('/etc/lighttpd/lighttpd.conf', 'w')
            f.write(new_data)
            f.close()

            os.system('/etc/init.d/lighttpd force-reload')
        else:
            print(u'Lighttpd est déjà installé')

    return serveur_menu_selection
Ejemplo n.º 5
0
def mux_audio_video(file_name, output_dir):
	"""
		Rebuild the AVI container for source file with hardsubbed video track
		:param filename: Name of the file that had to be reencoded
		:type filename: str 
		:param output_dir: Directory where to place raw hardsubbed video
		:type output_dir: str
	"""
	list_of_files = [f for f in os.listdir(output_dir)  if re.match(r'.*\.(xvid|audio)', f)]
	input_param = []
	map_param = []
	count = 0
	for f in reversed(list_of_files):
		if f[-5:] == '.xvid':
			video_file = '-i "' + output_dir + os.sep + f + '"'
		else:
			count = count + 1
			input_param.append('-i "' + output_dir + os.sep + f + '"')
			map_param.append('-map ' + str(count) + ':0')

	# Gather data for the progress bar.
	# If you have ffmpeg you have ffprobe, so it is not checked in REQUIRED_EXECUTABLES
	command = '{ffprobe} -show_streams "{video_input}"'.format(
		ffprobe=which('ffprobe')[0],
                video_input=output_dir + os.sep + f,
	)
	thread = pexpect.spawn(command)
	pl = thread.compile_pattern_list([
		pexpect.EOF,
		"nb_frames=(\d+)"
		])
	while True:
		i = thread.expect_list(pl, timeout=None)
		if i == 0:  # EOF, Process exited
			break
		if i == 1:  # Status
			tot_frames = int(thread.match.group(1))
	thread.close()	 

	command = '{ffmpeg} -y {video_input} {input_params} -c copy -map 0:0 {map_params} "{dest_file}"'.format(
		ffmpeg=which('ffmpeg')[0],
                video_input=video_file,
		input_params=' '.join(input_param),
		map_params=' '.join(map_param),
		dest_file=output_dir + os.sep + os.path.basename(file_name)
	)
	launch_process_with_progress_bar(command, REQUIRED_EXECUTABLES['ffmpeg'], tot_frames, 'Rebuilding file: ', verbose, debug)
	# Cleaning some mess
	for f in reversed(list_of_files):
		os.remove(output_dir + os.sep + f) 
Ejemplo n.º 6
0
def __zcat():
    """
    get the path to the zcat/gzcat executable
    or raise an exception
    """
    global __zcat_path
    if __zcat_path is not None:
        return __zcat_path

    __zcat_path = which("gzcat")
    if not __zcat_path:
        __zcat_path = which("zcat")
    if not __zcat_path:
        raise ValueError("Unable to find a zcat|gzcat executable in PATH!")
    return __zcat_path
Ejemplo n.º 7
0
def generate_new_keypair(self, settings):
    """
    Calls :func:`openssh_generate_new_keypair` or
    :func:`dropbear_generate_new_keypair` depending on what's available on the
    system.
    """
    self.ssh_log.debug('generate_new_keypair()')
    users_ssh_dir = get_ssh_dir(self)
    name = 'id_ecdsa'
    keytype = None
    bits = None
    passphrase = ''
    comment = ''
    if 'name' in settings:
        name = settings['name']
    if 'keytype' in settings:
        keytype = settings['keytype']
    if 'bits' in settings:
        bits = settings['bits']
    if 'passphrase' in settings:
        passphrase = settings['passphrase']
    if 'comment' in settings:
        comment = settings['comment']
    log_metadata = {
        'name': name,
        'keytype': keytype,
        'bits': bits,
        'comment': comment
    }
    self.ssh_log.info("Generating new SSH keypair", metadata=log_metadata)
    if which('ssh-keygen'): # Prefer OpenSSH
        openssh_generate_new_keypair(
            self,
            name, # Name to use when generating the keypair
            users_ssh_dir, # Path to save it
            keytype=keytype,
            passphrase=passphrase,
            bits=bits,
            comment=comment
        )
    elif which('dropbearkey'):
        dropbear_generate_new_keypair(self,
            name, # Name to use when generating the keypair
            users_ssh_dir, # Path to save it
            keytype=keytype,
            passphrase=passphrase,
            bits=bits,
            comment=comment)
Ejemplo n.º 8
0
    def _multi_part_put(self, file_path, destination_path, file_size):
        """
            Uploads file in chunks using Swift Tool (st) command
            http://bazaar.launchpad.net/~hudson-openstack/swift/1.2/view/head:/bin/st

        """
        logger.info("RackspaceCloudFilesTarget: Starting multi-part put "
                    "for %s " % file_path)

        # calculate chunk size
        # split into 10 chunks if possible
        chunk_size = int(file_size / 10)
        if chunk_size > MAX_SPLIT_SIZE:
            chunk_size = MAX_SPLIT_SIZE

        st_exe = which("st")
        st_command = [
            st_exe,
            "-A", "https://auth.api.rackspacecloud.com/v1.0",
            "-U", self.username,
            "-K", self.api_key,
            "upload",
            "--segment-size", str(chunk_size),
            self.container_name, destination_path

        ]
        logger.info("RackspaceCloudFilesTarget: Executing command: %s" %
                    " ".join(st_command))
        working_dir = os.path.dirname(file_path)
        execute_command(st_command, cwd=working_dir)
        logger.info("RackspaceCloudFilesTarget: Multi-part put for %s "
                    "completed successfully!" % file_path)
Ejemplo n.º 9
0
def get_systemheader():
    options = settings.get_settings()
    fn = utils.which(
        "header.ps", list(options.user_data_dirs) + [SYSTEM_DATA_DIR])
    if fn:
        return open(fn).read()
    return "%%\%%  System header %s not found!\n%%" % fn
Ejemplo n.º 10
0
def getHash(fileName, pluginName):
    """Given a valid fileName it returns a string containing a md5sum
    of the file content. If we are running on a system which prelink
    binaries (aka RedHat based) the command prelink must be on the PATH"""
    global _isPrelink
    if _isPrelink == None:
        #first execution let's check for prelink
        _isPrelink = utils.which("prelink")
        if _isPrelink == None:
            _isPrelink = ""
        else:
            print "Using: ", _isPrelink
    if pluginName == 'ELF' and len(_isPrelink) > 0:
        #let's use prelink for the md5sum
        #TODO what if isPrelink fails
        (temp, returncode) = utils.getOutputAsList([_isPrelink, '-y', '--md5', fileName])
        if returncode == 0:
            return temp[0].split()[0]
        else:
            #undoing prelinking failed for some reasons
            pass
    try:
        #ok let's do standard md5sum
        fd=open(fileName)
        md=md5()
        md.update(fd.read())
        fd.close()
        return md.hexdigest()
    except IOError:
        #file not found
        return None
Ejemplo n.º 11
0
def create_config_file(config_file, random_music_home):
    """
    Create a configuration file.

    :param config_file: path to config file we are creating
    :type config_file: str
    :param random_music_home: home of random_music application (i.e. where 
    index files are stored
    :type random_music_home: str
    """
    sys.stdout.write("You do not appear to have a config file, lets create one!\n")
    sys.stdout.write("Creating config file at %s\n" % config_file)
    config = RawConfigParser()
    config.add_section('config')
    config.set('config', 'loop_songs', 'true')
    config.set('config', 'randomise', 'true')
    config.set('config', 'index_dir', os.path.join(random_music_home, 
                                                   "indicies"))
    music_client = DEFAULT_MUSIC_CLIENT
    while not which(music_client):
        music_client = raw_input("The music player '%s' could not be found "
                                   "on your path. Please input a different "
                                   "music player:" % music_client)   
    
    config.set('config', 'music_client', music_client) 

    user_music_dirs = ""
    while not all([os.path.isdir(d) for d in user_music_dirs.split(",")]):
        user_music_dirs = raw_input("Input a csv list of full paths to "
                                   "your music dirs:")
    config.set('config', 'music_dirs', user_music_dirs)
            
    with open(config_file, 'wb') as fh:
        config.write(fh)
    def dump_backup(self, backup, uri, destination, log_file_name, options=None):
        mongoctl_exe = which("mongoctl")
        if not mongoctl_exe:
            raise MBSError("mongoctl exe not found in PATH")

        dump_cmd = [mongoctl_exe, "--noninteractive", "dump", uri, "-o", destination]

        if options:
            dump_cmd.extend(options)

        dump_cmd_display = dump_cmd[:]
        # mask mongo uri
        dump_cmd_display[3] = mask_mongo_uri(uri)

        logger.info("Running dump command: %s" % " ".join(dump_cmd_display))

        workspace = self.get_task_workspace_dir(backup)
        log_path = os.path.join(workspace, destination, log_file_name)
        last_error_line = {"line": ""}

        def capture_last_error(line):
            if is_mongo_error_log_line(line):
                last_error_line["line"] = line
        # execute dump command
        return_code = execute_command_wrapper(dump_cmd, cwd=workspace, output_path=log_path,
                                             on_output=capture_last_error)

        # raise an error if return code is not 0
        if return_code:
            errors.raise_dump_error(return_code, last_error_line["line"])
Ejemplo n.º 13
0
def test_run_compress():

    if which('nccopy') is None:
        print("Could not find nccopy in path")
        assert(False)
    # retdict = nccompress.run_nccopy('simple_xy.nc','simple_xy.run_nccopy.nc',level=3,verbose=False,shuffle=True)
    # pdb.set_trace()
    retdict = nccompress.run_compress('simple_xy.nc','simple_xy.run_nccopy.nc',level=3,verbose=True,shuffle=True,nccopy=True,timing=False)
    print(retdict)
    assert (retdict['orig_size']/retdict['comp_size'] >= 5.)
    assert (retdict['dlevel'] == 3)
    assert retdict['shuffle']
    assert nccompress.are_equal('simple_xy.nc','simple_xy.run_nccopy.nc',verbose=True)

    # This requires nc2nc to be in the path. If nccompress/nc2nc.py has changed this will not be reflect
    # any change until installation. This is a test for nccompres to correctly call nc2nc
    retdict = nccompress.run_compress('simple_xy.nc','simple_xy.run_nc2nc.nc',level=3,verbose=True,shuffle=True,nccopy=False,timing=False)
    print(retdict)
    assert (retdict['orig_size']/retdict['comp_size'] >= 5.)
    assert (retdict['dlevel'] == 3)
    assert retdict['shuffle']

    assert nccompress.are_equal('simple_xy.nc','simple_xy.run_nc2nc.nc',verbose=True)

    assert nccompress.are_equal('simple_xy.run_nc2nc.nc','simple_xy.run_nccopy.nc',verbose=True)
Ejemplo n.º 14
0
 def on_openvpn_connected(self):
     if onWindows:
         # For unknown reason, it can take quite some time before
         # "explorer \\10.18.0.1" works properly after connecting.
         # We poll using "net view \\SMB_HOSTNAME" until we're connected.
         # TODO is there a way to speed this up?
         while True:
             l.info("calling 'net view'")
             # WTF subprocess.Popen behaves differently than
             # subprocess.call.  This difference only occurs with
             # the custom startupinfo.
             pipe = subprocess.Popen(['net', 'view', '\\\\' + SMB_HOSTNAME],
                                     stdout=subprocess.PIPE,
                                     stderr=subprocess.PIPE,
                                     startupinfo=subprocess_sui)
             out, err = pipe.communicate()
             if pipe.returncode in (0, 2):
                 break
             l.debug("returncode: %s", pipe.returncode)
             l.debug("Message: %s",  repr(err))
             time.sleep(0.5)
             if self.quiting:
                 return
     self.set_state(STATE_CONNECTED)
     if not onWindows and which('gvfs-mount') is not None:
         subprocess.call(['gvfs-mount', 'smb://'+ SMB_HOSTNAME])
     if self.open_files_on_connection:
         self._show_files()
         self.open_files_on_connection = False
def find_java():
    required_major = 1
    required_minor = 7
    exec_command = None
    has_server_jvm = True
    java_path = utils.which('java')

    if not java_path:
        print 'NOTE: No Java executable found in $PATH.'
        sys.exit(1)

    is_ok = False
    java_version_out, _ = popen([java_path, '-version']).communicate()
    # pylint: disable=E1103
    match = re.search(java_build_regex, java_version_out)
    if match:
        major = int(match.group(1))
        minor = int(match.group(2))
        is_ok = major >= required_major and minor >= required_minor
    if is_ok:
        exec_command = [java_path, '-Xms1024m', '-server', '-XX:+TieredCompilation']
        check_server_proc = popen(exec_command + ['-version'])
        check_server_proc.communicate()
        if check_server_proc.returncode != 0:
            # Not all Java installs have server JVMs.
            exec_command = exec_command.remove('-server')
            has_server_jvm = False

    if not is_ok:
        print 'NOTE: Java executable version %d.%d or above not found in $PATH.' % (required_major, required_minor)
        sys.exit(1)
    print 'Java executable: %s%s' % (java_path, '' if has_server_jvm else ' (no server JVM)')
    return exec_command
Ejemplo n.º 16
0
    def run(self, action):
        icv = InteractiveConfigValidation()
        if action == 'pinging':
            icv.run(require_domain=True)
        else:
            icv.run()

        config = icv.get()

        if action == "pinging":
            print "In a second you will be presented with your default text editor. Copy text below and paste it at the bottom of that file, save and exit:"
            print ""
            print "@reboot %s pinging start" % (which('pd'),)
            print ""
            print "Copy the line and press enter to continue."
            raw_input()
            subprocess.call("crontab -e", shell=True)
            print "Good job! If all went well pinging will now start automatically."
        elif action == 'autohosts':
            ensure_sudo()
            cron_file = "/etc/cron.d/pd_etchosts"
            cron_contents = "HOME=/home/%s\n*/1 * * * * root %s etchosts" % (getlogin(), which('pd'),)

            def writeback():
                with open(cron_file, "w+") as f:
                    f.write(cron_contents)
                    f.write('\n')
            print "The following lines are going to be added to %s:\n\n%s\n" % (cron_file, cron_contents)
            _, agreed = icv.ask_action("Do you wish to continue?", writeback)
            if not agreed:
                print 'Aborting.'
            else:
                print 'Done.'
Ejemplo n.º 17
0
def mux_audio_video(file_name, output_dir):
	"""
		Rebuild the MP4 container for source file with hardsubbed video track
		:param filename: Name of the file that had to be reencoded
		:type filename: str 
		:param output_dir: Directory where to place raw hardsubbed video
		:type output_dir: str
	"""
        base_file_name = get_base_file_name(file_name)
	list_of_files = [f for f in os.listdir(output_dir) if re.match(base_file_name + r'.*\.(264|aac|audio)', f)]
	file_param = []
	for f in reversed(list_of_files):
		file_param.append('-add "' + output_dir + os.sep + f + '"')

	# If the destination file already exists, MP4Box adds the new tracks to it
	output_file = output_dir + os.sep + os.path.basename(file_name)
	index_out = 0
	while os.path.isfile(output_file):
		print (colorama.Style.BRIGHT + 'File ' + output_file + ' already exists' + colorama.Style.NORMAL);
		index_out = index_out + 1
		output_file = output_dir + os.sep + os.path.basename(file_name) + '_' + str(index_out)

	command = '{MP4Box} {add_audio_opts} "{dest_file}"'.format(
		MP4Box = which('MP4Box')[0],
		add_audio_opts = ' '.join(file_param),
		dest_file = output_file
	)
	launch_process_with_progress_bar(command, REQUIRED_EXECUTABLES['MP4Box'], 100, 'Rebuilding file: ', verbose, debug)
	# Cleaning some mess
	for f in reversed(list_of_files):
		os.remove(output_dir + os.sep + f) 
Ejemplo n.º 18
0
def open_sub_channel(term, tws):
    """
    Opens a sub-channel of communication by executing a new shell on the SSH
    server using OpenSSH's `Master mode <http://en.wikibooks.org/wiki/OpenSSH/Cookbook/Multiplexing>`_
    capability (it spawns a new slave) and returns the resulting
    :class:`termio.Multiplex` instance.  If a slave has already been opened for
    this purpose it will re-use the existing channel.
    """
    logging.debug("open_sub_channel() term: %s" % term)
    global OPEN_SUBCHANNELS
    if term in OPEN_SUBCHANNELS and OPEN_SUBCHANNELS[term].isalive():
        # Use existing sub-channel (much faster this way)
        return OPEN_SUBCHANNELS[term]
    # NOTE: When connecting a slave via ssh you can't tell it to execute a
    # command like you normally can (e.g. 'ssh user@host <some command>').  This
    # is why we're using the termio.Multiplex.expect() functionality below...
    session = tws.session
    session_dir = tws.settings['session_dir']
    session_path = os.path.join(session_dir, session)
    if not session_path:
        raise SSHMultiplexingException(_(
            "SSH Plugin: Unable to open slave sub-channel."))
    socket_path = None
    # Find the SSH socket path...
    for f in os.listdir(session_path):
        if f.startswith('ssh:%s:' % term):
            # Grab the SSH socket path from the file
            for line in open(os.path.join(session_path, f)):
                if line.startswith('SSH_SOCKET'):
                    # NOTE: This will includes quotes (which is fine):
                    socket_path = line.split('=')[1].strip()
    # Interesting: When using an existing socket you don't need to give it all
    # the same options as you used to open it but you still need to give it
    # *something* in place of the hostname or it will report a syntax error and
    # print out the help.  So that's why I've put 'go_ssh_remote_cmd' below.
    # ...but I could have just used 'foo' :)
    if not socket_path:
        raise SSHMultiplexingException(_(
            "SSH Plugin: Unable to open slave sub-channel."))
    users_ssh_dir = get_ssh_dir(tws)
    ssh_config_path = os.path.join(users_ssh_dir, 'config')
    if not os.path.exists(ssh_config_path):
        # Create it (an empty one so ssh doesn't error out)
        with open(ssh_config_path, 'w') as f:
            f.write('\n')
    # Hopefully 'go_ssh_remote_cmd' will be a clear enough indication of
    # what is going on by anyone that has to review the logs...
    ssh = which('ssh')
    ssh_command = '%s -x -S%s -F%s go_ssh_remote_cmd' % (
        ssh, socket_path, ssh_config_path)
    logging.debug("ssh_command: %s" % ssh_command)
    OPEN_SUBCHANNELS[term] = m = tws.new_multiplex(
        ssh_command, "%s (sub)" % term)
    # Using huge numbers here so we don't miss much (if anything) if the user
    # executes something like "ps -ef".
    fd = m.spawn(rows=100, cols=200) # Hopefully 100/200 lines/cols is enough
    # ...if it isn't, well, that's not really what this is for :)
    # Set the term title so it gets a proper name in the logs
    m.writeline(u'echo -e "\\033]0;Term %s sub-channel\\007"' % term)
    return m
def resolve_node_paths():
    if has_valid_global_node():
        if sys.platform == "win32":
            return (utils.which('node'), utils.which('npm.cmd'))
        return (utils.which('node'), utils.which('npm'))
    has_installed_local_node = path.isfile(local_node_binary_path)
    if has_installed_local_node:
        return (local_node_binary_path, local_npm_binary_path)
    if path.isdir(local_node_runtimes_path):
        shutil.rmtree(local_node_runtimes_path)
    if sys.platform == 'linux2' or sys.platform == 'darwin':
        install_node()
        return (local_node_binary_path, local_npm_binary_path)
    print('ERROR: Please install the latest node.js LTS version using the Windows installer:')
    print('https://nodejs.org/en/download/')
    raise
Ejemplo n.º 20
0
def extract_audio(file_name, output_dir):
	"""
		Extract all audio tracks from a AVI container
		:param filename: Name of the file that contains audio track
		:type filename: str 
		:param output_dir: Directory where to place raw audio track
		:type output_dir: str	
	"""
	# detect how many audio track
	command = '{mplayer} -noconfig all -cache-min 0 -vo null -ao null -frames 0 -identify "{input_file}" 2>/dev/null | grep ID_AUDIO_ID'.format(mplayer=which('mplayer')[0], input_file=file_name)
	if verbose:
		print command
	thread = pexpect.spawn(command)
	pl = thread.compile_pattern_list([
		pexpect.EOF,
		"ID_AUDIO_ID=(\d+).*"
		])
	audio_tracks = []
	while True:
		i = thread.expect_list(pl, timeout=None)
		if i == 0:  # EOF, Process exited
			break
		if i == 1:  # Status
			audio_tracks.append(int(thread.match.group(1)))	
	thread.close()	 
	# Now extract each audio track
	for track in audio_tracks:
		t_command = '{mplayer} -aid {track} -dumpaudio -dumpfile {dest_file} "{input_file}"'.format(
			mplayer=which("mplayer")[0],
			input_file=file_name,
			track=track,
			dest_file=output_dir + os.sep + "{}".format(track) + ".audio"
		)
		launch_process_with_progress_bar(t_command, REQUIRED_EXECUTABLES['mplayer'], 100, 'Extract audio track {}: '.format(track), verbose, debug)
Ejemplo n.º 21
0
	def __init__(self):

		self.settings_file = ""
		self.folder = ""
		self.setDefaultValues()

		#self.config = configparser.RawConfigParser()
		self.folder = ComicTaggerSettings.getSettingsFolder()

		if not os.path.exists( self.folder ):
			os.makedirs( self.folder )

		self.settings_file = os.path.join( self.folder, "settings.ini")
                self.CFG = ConfigObj(self.settings_file, encoding='utf-8')

		# if config file doesn't exist, write one out
		if not os.path.exists( self.settings_file ):
			self.save()
		else:
			self.load()

		# take a crack at finding rar exes, if not set already
		if self.rar_exe_path == "":
			if platform.system() == "Windows":
				# look in some likely places for windows machine
				if os.path.exists( "C:\Program Files\WinRAR\Rar.exe" ):
					self.rar_exe_path = "C:\Program Files\WinRAR\Rar.exe"
				elif os.path.exists( "C:\Program Files (x86)\WinRAR\Rar.exe" ):
					self.rar_exe_path = "C:\Program Files (x86)\WinRAR\Rar.exe"
			else:
				# see if it's in the path of unix user
				if utils.which("rar") is not None:
					self.rar_exe_path = utils.which("rar")
			if self.rar_exe_path != "":
				self.save()

		if self.unrar_exe_path == "":
			if platform.system() != "Windows":
				# see if it's in the path of unix user
				if utils.which("unrar") is not None:
					self.unrar_exe_path = utils.which("unrar")
			if self.unrar_exe_path != "":
				self.save()

		# make sure unrar/rar program is now in the path for the UnRAR class to use
		utils.addtopath(os.path.dirname(self.unrar_exe_path))
		utils.addtopath(os.path.dirname(self.rar_exe_path))
Ejemplo n.º 22
0
def openssh_generate_new_keypair(name, path, keytype=None, passphrase="", bits=None, comment="", tws=None):
    """
    Generates a new private and public key pair--stored in the user's directory
    using the given *name* and other optional parameters (using OpenSSH).

    If *keytype* is given, it must be one of "ecdsa", "rsa" or "dsa" (case
    insensitive).  If *keytype* is "rsa" or "ecdsa", *bits* may be specified to
    specify the size of the key.

    .. note:: Defaults to generating a 521-byte ecdsa key if OpenSSH is version 5.7+. Otherwise a 2048-bit rsa key will be used.
    """
    logging.debug("openssh_generate_new_keypair()")
    openssh_version = shell_command("ssh -V")[1]
    ssh_major_version = int(openssh_version.split()[0].split("_")[1].split(".")[0])
    key_path = os.path.join(path, name)
    ssh_minor_version = int(openssh_version.split()[0].split("_")[1].split(".")[1][0])
    ssh_version = "%s.%s" % (ssh_major_version, ssh_minor_version)
    ssh_version = float(ssh_version)
    if not keytype:
        if ssh_version >= 5.7:
            keytype = "ecdsa"
        else:
            keytype = "rsa"
    else:
        keytype = keytype.lower()
    if not bits and keytype == "ecdsa":
        bits = 521  # Not a typo: five-hundred-twenty-one bits
    elif not bits and keytype == "rsa":
        bits = 2048
    if not passphrase:  # This just makes sure False and None end up as ''
        passphrase = ""
    hostname = os.uname()[1]
    if not comment:
        now = datetime.now().isoformat()
        comment = "Generated by Gate One on %s %s" % (hostname, now)
    ssh_keygen_path = which("ssh-keygen")
    command = (
        "%s "  # Path to ssh-keygen
        "-b %s "  # bits
        "-t %s "  # keytype
        "-C '%s' "  # comment
        "-f %s" % (ssh_keygen_path, bits, keytype, comment, key_path)  # Key path
    )
    m = tws.new_multiplex(command, "gen_ssh_keypair")
    call_errorback = partial(errorback, tws)
    m.expect("^Overwrite.*", overwrite, optional=True, timeout=10)
    passphrase_handler = partial(enter_passphrase, passphrase)
    m.expect("^Enter passphrase", passphrase_handler, errorback=call_errorback, timeout=10)
    m.expect("^Enter same passphrase again", passphrase_handler, errorback=call_errorback, timeout=10)
    finalize = partial(finished, tws)
    # The regex below captures the md5 fingerprint which tells us the
    # operation was successful.
    m.expect(
        "(([0-9a-f][0-9a-f]\:){15}[0-9a-f][0-9a-f])",
        finalize,
        errorback=call_errorback,
        timeout=15,  # Key generation can take a little while
    )
    m.spawn()
Ejemplo n.º 23
0
def test_compress_files():

    if which('nccopy') is None:
        print("Could not find nccopy in path")
        assert(False)
    nccompress.main_parse_args(['-v','-p','simple_xy.nc'])

    assert nccompress.are_equal('simple_xy.nc','tmp.nc_compress/simple_xy.nc',verbose=True)
Ejemplo n.º 24
0
 def validate_settings(self):
     # Check the values of the supplied input and output files
     self.listFile = validateInputFile( self.listFile, ['.list'])
     self.ccsFile = validateInputFile( self.ccsFile, ['.fastq'])
     self.sequenceFile = validateInputFile( self.sequenceFile, 
                                             ['.bas.h5', '.fofn'])
     self.outputFile = validateOutputFile( self.outputFile )
     try: # Check the value of the numProc argument
         assert self.numProc >= 1
     except AssertionError:
         raise ValueError('numProc must be a positive integer!')
     # Find all of external tools we will need to use
     self.filterPlsH5 = which('filterPlsH5.py')
     self.compareSequences = which('compareSequences.py')
     self.cmph5tools = which('cmph5tools.py')
     self.loadPulses = which('loadPulses')
     self.variantCaller = which('variantCaller.py')
Ejemplo n.º 25
0
def _node(build, *args, **kw):
	node = "node"
	if not utils.which(node):
		raise WebError("couldn't find {tool} on your PATH or in likely "
				"locations - is it installed?".format(tool=node))

	kw['check_for_interrupt'] = True
	run_shell(node, *args, **kw)
Ejemplo n.º 26
0
 def validate_settings(self):
     # Check the values of the supplied input and output files
     self.listFile = validateInputFile(self.listFile, ['.list'])
     self.ccsFile = validateInputFile(self.ccsFile, ['.fastq'])
     self.sequenceFile = validateInputFile(self.sequenceFile,
                                           ['.bas.h5', '.fofn'])
     self.outputFile = validateOutputFile(self.outputFile)
     try:  # Check the value of the numProc argument
         assert self.numProc >= 1
     except AssertionError:
         raise ValueError('numProc must be a positive integer!')
     # Find all of external tools we will need to use
     self.filterPlsH5 = which('filterPlsH5.py')
     self.compareSequences = which('compareSequences.py')
     self.cmph5tools = which('cmph5tools.py')
     self.loadPulses = which('loadPulses')
     self.variantCaller = which('variantCaller.py')
Ejemplo n.º 27
0
 def setUp(self):
     self.tleap = utils.which('tleap')
     self.cwd = os.getcwd()
     try:
         os.mkdir(get_fn('writes'))
     except OSError:
         pass
     os.chdir(get_fn('writes'))
def resolve_node_paths():
    if has_valid_global_node():
        if sys.platform == "win32":
            return (utils.which('node'), utils.which('npm.cmd'))
        return (utils.which('node'), utils.which('npm'))
    has_installed_local_node = path.isfile(local_node_binary_path)
    if has_installed_local_node:
        return (local_node_binary_path, local_npm_binary_path)
    if path.isdir(local_node_runtimes_path):
        shutil.rmtree(local_node_runtimes_path)
    if sys.platform == 'linux2' or sys.platform == 'darwin':
        install_node()
        return (local_node_binary_path, local_npm_binary_path)
    print(
        'ERROR: Please install the latest node.js LTS version using the Windows installer:'
    )
    print('https://nodejs.org/en/download/')
    raise
Ejemplo n.º 29
0
    def _get_ldconf_paths(self):
        """return a the list of path used by the dynamic loader

        this list is gathered from what you have in /etc/ld.so.conf"""
        return_paths = []
	ldconf_cmd = "ldconfig"
	if not utils.which(ldconf_cmd) :
            ldconf_cmd = "/sbin/ldconfig"
	if not utils.which(ldconf_cmd) :
	    logger.error("Unable to find ldconfig. You need ldconfig in you PATH to properly run fingerprint.")
        (output, retcode) = utils.getOutputAsList([ldconf_cmd, "-v"])
        #default_paths = ["/lib", "/usr/lib"]

        # if run as a user it will fail so we don't care for retcode
        for line in output:
            if line and line[0] == '/':
                return_paths.append(line.split(":")[0])
        return return_paths
Ejemplo n.º 30
0
def test_run_compress():

    if which('nccopy') is None:
        print("Could not find nccopy in path")
        assert (False)
    # retdict = nccompress.run_nccopy('simple_xy.nc','simple_xy.run_nccopy.nc',level=3,verbose=False,shuffle=True)
    retdict = nccompress.run_compress('simple_xy.nc',
                                      'simple_xy.run_nccopy.nc',
                                      level=3,
                                      verbose=True,
                                      shuffle=True,
                                      nccopy=True,
                                      timing=False)
    print(retdict)
    assert (retdict['orig_size'] / retdict['comp_size'] >= 5.)
    assert (retdict['dlevel'] == 3)
    assert retdict['shuffle']
    assert nccompress.are_equal('simple_xy.nc',
                                'simple_xy.run_nccopy.nc',
                                verbose=True)

    retdict = nccompress.run_compress('simple_xy_noclassic.nc',
                                      'simple_xy_noclassic.run_nccopy.nc',
                                      level=3,
                                      verbose=True,
                                      shuffle=True,
                                      nccopy=True,
                                      timing=False)
    print(retdict)
    assert (retdict['orig_size'] / retdict['comp_size'] >= 5.)
    assert (retdict['dlevel'] == 3)
    assert retdict['shuffle']
    assert nccompress.are_equal('simple_xy_noclassic.nc',
                                'simple_xy_noclassic.run_nccopy.nc',
                                verbose=True)

    # This requires nc2nc to be in the path. If nccompress/nc2nc.py has changed this will not be reflect
    # any change until installation. This is a test for nccompres to correctly call nc2nc
    retdict = nccompress.run_compress('simple_xy.nc',
                                      'simple_xy.run_nc2nc.nc',
                                      level=3,
                                      verbose=True,
                                      shuffle=True,
                                      nccopy=False,
                                      timing=False)
    print(retdict)
    assert (retdict['orig_size'] / retdict['comp_size'] >= 5.)
    assert (retdict['dlevel'] == 3)
    assert retdict['shuffle']

    assert nccompress.are_equal('simple_xy.nc',
                                'simple_xy.run_nc2nc.nc',
                                verbose=True)

    assert nccompress.are_equal('simple_xy.run_nc2nc.nc',
                                'simple_xy.run_nccopy.nc',
                                verbose=True)
Ejemplo n.º 31
0
def get(flag='host'):

    if flag == 'host':
        compiler_val = overrides.get('CHPL_HOST_COMPILER', '')

    elif flag == 'target':
        compiler_val = overrides.get('CHPL_TARGET_COMPILER', '')

    else:
        error("Invalid flag: '{0}'".format(flag), ValueError)

    default_llvm = False
    if not compiler_val:
        default_llvm = default_to_llvm(flag)

        # If allowable, look at CC/CXX
        if should_consider_cc_cxx(flag):
            compiler_val = get_compiler_from_cc_cxx()

    if compiler_val:
        validate_compiler(compiler_val, flag)
        return compiler_val

    prgenv_compiler = get_prgenv_compiler()

    if default_llvm:
        compiler_val = 'llvm'

    elif prgenv_compiler != 'none':
        # The cray platforms are a special case in that we want to
        # "cross-compile" by default. (the compiler is different between host
        # and target, but the platform is the same).
        if flag == 'host':
            compiler_val = 'gnu'
        else:
            compiler_val = prgenv_compiler

    else:
        platform_val = chpl_platform.get(flag)
        # Normal compilation (not "cross-compiling")
        # inherit the host compiler if the target compiler is not set and
        # the host and target platforms are the same
        if flag == 'target':
            if chpl_platform.get('host') == platform_val:
                compiler_val = get('host')
        elif platform_val.startswith('pwr'):
            compiler_val = 'ibm'
        elif platform_val == 'darwin' or platform_val == 'freebsd':
            if which('clang'):
                compiler_val = 'clang'
            else:
                compiler_val = 'gnu'
        else:
            compiler_val = 'gnu'

    validate_compiler(compiler_val, flag)
    return compiler_val
Ejemplo n.º 32
0
def get_userheader():
    options = settings.get_settings()
    templates = []
    for fn in settings.get_settings().user_headers:
        filename = utils.which(fn, options.user_data_dirs)
        if filename:
            templates.append(USERHEADER_INFO % fn)
            templates.append(open(filename).read())
    return string.join(templates, '\n')
Ejemplo n.º 33
0
def condor_stats(timeout=TIMEOUT):
    """
    Return the Condor Schedd stats as a ClassAd instance.
    """
    ad = classad.ClassAd(MyType='Scheduler',
                 Name='Error communicating with the Condor Schedd.')
    args = [utils.which('condor_status'), '-long', '-schedd']
    res = _run_condor_cmd(args, [ad, ], timeout=timeout)
    return(res[0])
Ejemplo n.º 34
0
 def __init__(self, queue, condition, dbpath):
     threading.Thread.__init__(self, name="AcoustidMetadataThread")
     # musicbrainz.set_useragent('Cycle', '0.1')
     self.daemon = True
     self.queue = queue
     self.condition = condition
     self.dbpath = dbpath
     self.running = True
     self.fpcalc = which('/usr/bin/fpcalc')
Ejemplo n.º 35
0
 def __init__(self, queue, condition, dbpath):
     threading.Thread.__init__(self, name="AcoustidMetadataThread")
     # musicbrainz.set_useragent('Cycle', '0.1')
     self.daemon = True
     self.queue = queue
     self.condition = condition
     self.dbpath = dbpath
     self.running = True
     self.fpcalc = which('/usr/bin/fpcalc')
Ejemplo n.º 36
0
def get_userheader():
    options = settings.get_settings()
    templates = []
    for fn in settings.get_settings().user_headers:
        filename = utils.which(fn, options.user_data_dirs)
        if filename:
            templates.append(USERHEADER_INFO % fn)
            templates.append(open(filename).read())
    return string.join(templates, '\n')
Ejemplo n.º 37
0
    def __init__(self, command=None):
        """
        """
        full_path = which(command)

        if full_path is None:
            raise RuntimeError("Command not found.")

        self.command = full_path
Ejemplo n.º 38
0
def open_sub_channel(term, tws):
    """
    Opens a sub-channel of communication by executing a new shell on the SSH
    server using OpenSSH's `Master mode <http://en.wikibooks.org/wiki/OpenSSH/Cookbook/Multiplexing>`_
    capability (it spawns a new slave) and returns the resulting
    :class:`termio.Multiplex` instance.  If a slave has already been opened for
    this purpose it will re-use the existing channel.
    """
    logging.debug("open_sub_channel() term: %s" % term)
    global OPEN_SUBCHANNELS
    if term in OPEN_SUBCHANNELS and OPEN_SUBCHANNELS[term].isalive():
        # Use existing sub-channel (much faster this way)
        return OPEN_SUBCHANNELS[term]
    # NOTE: When connecting a slave via ssh you can't tell it to execute a
    # command like you normally can (e.g. 'ssh user@host <some command>').  This
    # is why we're using the termio.Multiplex.expect() functionality below...
    session = tws.session
    session_dir = tws.settings['session_dir']
    session_path = os.path.join(session_dir, session)
    socket_path = None
    # Find the SSH socket path...
    for f in os.listdir(session_path):
        if f.startswith('ssh:%s:' % term):
            # Grab the SSH socket path from the file
            for line in open(os.path.join(session_path, f)):
                if line.startswith('SSH_SOCKET'):
                    # NOTE: This will includes quotes (which is fine):
                    socket_path = line.split('=')[1].strip()
    # Interesting: When using an existing socket you don't need to give it all
    # the same options as you used to open it but you still need to give it
    # *something* in place of the hostname or it will report a syntax error and
    # print out the help.  So that's why I've put 'go_ssh_remote_cmd' below.
    # ...but I could have just used 'foo' :)
    if not socket_path:
        raise SSHMultiplexingException(
            _("SSH Plugin: Unable to open slave sub-channel."))
    users_ssh_dir = get_ssh_dir(tws)
    ssh_config_path = os.path.join(users_ssh_dir, 'config')
    if not os.path.exists(ssh_config_path):
        # Create it (an empty one so ssh doesn't error out)
        with open(ssh_config_path, 'w') as f:
            f.write('\n')
    # Hopefully 'go_ssh_remote_cmd' will be a clear enough indication of
    # what is going on by anyone that has to review the logs...
    ssh = which('ssh')
    ssh_command = '%s -x -S%s -F%s go_ssh_remote_cmd' % (ssh, socket_path,
                                                         ssh_config_path)
    OPEN_SUBCHANNELS[term] = m = tws.new_multiplex(ssh_command,
                                                   "%s (sub)" % term)
    # Using huge numbers here so we don't miss much (if anything) if the user
    # executes something like "ps -ef".
    fd = m.spawn(rows=100, cols=200)  # Hopefully 100/200 lines/cols is enough
    # ...if it isn't, well, that's not really what this is for :)
    # Set the term title so it gets a proper name in the logs
    m.writeline(u'\x1b]0;Term %s sub-channel\007' % term)
    return m
def has_valid_global_node():
    node_path = utils.which('node')
    if not node_path:
        return False
    node_process = popen([node_path, '--version'])
    (node_process_out, _) = node_process.communicate()
    if node_process.returncode != 0:
        return False
    major_version = node_process_out[1]
    return int(major_version) >= MIN_NODE_VERSION
Ejemplo n.º 40
0
def generate_new_keypair(self, settings):
    """
    Calls :func:`openssh_generate_new_keypair` or
    :func:`dropbear_generate_new_keypair` depending on what's available on the
    system.
    """
    logging.debug('generate_new_keypair()')
    users_ssh_dir = get_ssh_dir(self)
    name = 'id_ecdsa'
    keytype = None
    bits = None
    passphrase = ''
    comment = ''
    if 'name' in settings:
        name = settings['name']
    if 'keytype' in settings:
        keytype = settings['keytype']
    if 'bits' in settings:
        bits = settings['bits']
    if 'passphrase' in settings:
        passphrase = settings['passphrase']
    if 'comment' in settings:
        comment = settings['comment']
    #if which('dropbearkey'):
        #DROPBEAR_VERSION = shell_command('dropbear -V')[1].splitlines()[1]
    if which('ssh-keygen'): # Prefer OpenSSH
        openssh_generate_new_keypair(
            self,
            name, # Name to use when generating the keypair
            users_ssh_dir, # Path to save it
            keytype=keytype,
            passphrase=passphrase,
            bits=bits,
            comment=comment
        )
    elif which('dropbearkey'):
        dropbear_generate_new_keypair(self,
            name, # Name to use when generating the keypair
            users_ssh_dir, # Path to save it
            keytype=keytype,
            passphrase=passphrase,
            bits=bits,
            comment=comment)
Ejemplo n.º 41
0
def test_compress_files():

    if which('nccopy') is None:
        print("Could not find nccopy in path")
        assert (False)
    nccompress.main_parse_args(['-v', '-p', 'simple_xy.nc'])

    assert nccompress.are_equal('simple_xy.nc',
                                'tmp.nc_compress/simple_xy.nc',
                                verbose=True)
Ejemplo n.º 42
0
def openssh_generate_public_key(self, path, passphrase=None, settings=None):
    """
    Generates a public key from the given private key at *path*.  If a
    *passphrase* is provided, it will be used to generate the public key (if
    necessary).
    """
    self.ssh_log.debug('openssh_generate_public_key()')
    ssh_keygen_path = which('ssh-keygen')
    pubkey_path = "%s.pub" % path
    command = (
        "%s "  # Path to ssh-keygen
        "-f '%s' "  # Key path
        "-y "  # Output public key to stdout
        "2>&1 "  # Redirect stderr to stdout so we can catch failures
        "> '%s'"  # Redirect stdout to the public key path
        % (ssh_keygen_path, path, pubkey_path))
    import termio
    m = termio.Multiplex(command)

    def request_passphrase(*args, **kwargs):
        "Called if this key requires a passphrase.  Ask the client to provide"
        message = {'terminal:sshjs_ask_passphrase': settings}
        self.write_message(message)

    def bad_passphrase(m_instance, match):
        "Called if the user entered a bad passphrase"
        settings['bad'] = True
        request_passphrase()

    if passphrase:
        m.expect('^Enter passphrase',
                 "%s\n" % passphrase,
                 optional=True,
                 preprocess=False,
                 timeout=5)
        m.expect('^load failed',
                 bad_passphrase,
                 optional=True,
                 preprocess=False,
                 timeout=5)
    elif settings:
        m.expect('^Enter passphrase',
                 request_passphrase,
                 optional=True,
                 preprocess=False,
                 timeout=5)

    def atexit(child, exitstatus):
        "Raises an SSHKeygenException if the *exitstatus* isn't 0"
        if exitstatus != 0:
            print(m.dump)
            raise SSHKeygenException(
                _("Error generating public key from private key at %s" % path))

    m.spawn(exitfunc=atexit)
Ejemplo n.º 43
0
    def _get_ldconf_paths(self):
        """return a the list of path used by the dynamic loader

        this list is gathered from what you have in /etc/ld.so.conf"""
        return_paths = []
        ldconf_cmd = "ldconfig"
        if not utils.which(ldconf_cmd):
            ldconf_cmd = "/sbin/ldconfig"
        if not utils.which(ldconf_cmd):
            logger.error(
                "Unable to find ldconfig. You need ldconfig in you PATH to properly run fingerprint."
            )
        (output, retcode) = utils.getOutputAsList([ldconf_cmd, "-v"])
        #default_paths = ["/lib", "/usr/lib"]

        # if run as a user it will fail so we don't care for retcode
        for line in output:
            if line and line[0] == '/':
                return_paths.append(line.split(":")[0])
        return return_paths
Ejemplo n.º 44
0
def node_js():
    if utils.which('node') is None:
        print(u'_____ Installation de Nodejs _____')
        cmd = 'git clone https://github.com/creationix/nvm.git /var/www/nvm'
        os.system(cmd)
        os.system('cd /var/www/nvm/')
        os.system('git checkout `git describe --abbrev=0 --tags`')
        execfile('/var/www/nvm/nvm.sh')
        os.system('nvm install latest')
    else:
        print(u'Nodejs est déjà installé')
Ejemplo n.º 45
0
def mysql(serveur):
    if utils.which('mysql') is None:
        print(u'_____ Installation de mysql _____')
        os.system('apt-get -y install mysql-server php5-mysql')

        if serveur == utils.SERVEUR_APACHE:
            os.system('apt-get -y install libapache2-mod-auth-mysql')

        os.system('mysql_install_db')
        os.system('/usr/bin/mysql_secure_installation')
    else:
        print(u'Mysql déjà installé')
Ejemplo n.º 46
0
def open_sub_channel(self, term):
    """
    Opens a sub-channel of communication by executing a new shell on the SSH
    server using OpenSSH's `Master mode <http://en.wikibooks.org/wiki/OpenSSH/Cookbook/Multiplexing>`_
    capability (it spawns a new slave) and returns the resulting
    :class:`termio.Multiplex` instance.  If a slave has already been opened for
    this purpose it will re-use the existing channel.
    """
    term = int(term)
    global OPEN_SUBCHANNELS
    if term in OPEN_SUBCHANNELS and OPEN_SUBCHANNELS[term].isalive():
        # Use existing sub-channel (much faster this way)
        return OPEN_SUBCHANNELS[term]
    self.ssh_log.info("Opening SSH sub-channel", metadata={'term': term})
    # NOTE: When connecting a slave via ssh you can't tell it to execute a
    # command like you normally can (e.g. 'ssh user@host <some command>').  This
    # is why we're using the termio.Multiplex.expect() functionality below...
    session = self.ws.session
    session_dir = self.ws.settings['session_dir']
    session_path = os.path.join(session_dir, session)
    if not session_path:
        raise SSHMultiplexingException(
            _("SSH Plugin: Unable to open slave sub-channel."))
    socket_path = self.loc_terms[term]['ssh_socket']
    # Interesting: When using an existing socket you don't need to give it all
    # the same options as you used to open it but you still need to give it
    # *something* in place of the hostname or it will report a syntax error and
    # print out the help.  So that's why I've put 'go_ssh_remote_cmd' below.
    # ...but I could have just used 'foo' :)
    if not socket_path:
        raise SSHMultiplexingException(
            _("SSH Plugin: Unable to open slave sub-channel."))
    users_ssh_dir = get_ssh_dir(self)
    ssh_config_path = os.path.join(users_ssh_dir, 'config')
    if not os.path.exists(ssh_config_path):
        # Create it (an empty one so ssh doesn't error out)
        with open(ssh_config_path, 'w') as f:
            f.write('\n')
    # Hopefully 'go_ssh_remote_cmd' will be a clear enough indication of
    # what is going on by anyone that has to review the logs...
    ssh = which('ssh')
    ssh_command = "%s -x -S'%s' -F'%s' go_ssh_remote_cmd" % (ssh, socket_path,
                                                             ssh_config_path)
    OPEN_SUBCHANNELS[term] = m = self.new_multiplex(ssh_command,
                                                    "%s (sub)" % term)
    # Using huge numbers here so we don't miss much (if anything) if the user
    # executes something like "ps -ef".
    m.spawn(rows=100, cols=200)  # Hopefully 100/200 lines/cols is enough
    # ...if it isn't, well, that's not really what this is for :)
    # Set the term title so it gets a proper name in the logs
    m.writeline(u'echo -e "\\033]0;Term %s sub-channel\\007"' % term)
    return m
Ejemplo n.º 47
0
    def tar_backup(self, backup, dump_dir, file_name):
        tar_exe = which("tar")

        tar_cmd = [tar_exe, "-cvzf", file_name, dump_dir]
        cmd_display = " ".join(tar_cmd)
        workspace = self.get_task_workspace_dir(backup)
        try:
            logger.info("Running tar command: %s" % cmd_display)
            execute_command(tar_cmd, cwd=workspace)
            self._delete_dump_dir(backup, dump_dir)
        except CalledProcessError, e:
            last_log_line = e.output.split("\n")[-1]
            errors.raise_archive_error(e.returncode, last_log_line)
Ejemplo n.º 48
0
    def extract_restore_source_backup(self, restore):
        working_dir = self.get_task_workspace_dir(restore)
        file_reference = restore.source_backup.target_reference
        logger.info("Extracting tar file '%s'" % file_reference.file_name)

        tarx_cmd = [which("tar"), "-xf", file_reference.file_name]

        logger.info("Running tar extract command: %s" % tarx_cmd)
        try:
            execute_command(tarx_cmd, cwd=working_dir)
        except CalledProcessError, cpe:
            logger.error("Failed to execute extract command: %s" % tarx_cmd)
            raise ExtractError(cause=cpe)
Ejemplo n.º 49
0
def _npm(build, *args, **kw):
	if sys.platform.startswith("win"):
		npm = "npm.cmd"
	else:
		npm = "npm"
	if not utils.which(npm):
		raise WebError("""Couldn't find {tool} on your PATH or in likely locations - is it installed?

You can use the 'node_path' setting in your local configuration to set a custom install directory"""
.format(tool=npm))

	kw['check_for_interrupt'] = True
	run_shell(npm, *args, **kw)
Ejemplo n.º 50
0
 def is_available(self):
     """
     Determines if the prerequisites for UNAFold have been met.
     :return: True or False
     """
     if all([
             which(x) for x in
         ['UNAFold.pl', 'hybrid-ss-min', 'hybrid-min', 'ct-energy']
     ]):
         #self.available = True
         return True
     else:
         return False
Ejemplo n.º 51
0
def get_host_fingerprint(self, settings):
    """
    Returns a the hash of the given host's public key by making a remote
    connection to the server (not just by looking at known_hosts).
    """
    out_dict = {}
    if 'port' not in settings:
        port = 22
    else:
        port = settings['port']
    if 'host' not in settings:
        out_dict['result'] = _("Error:  You must supply a 'host'.")
        message = {'terminal:sshjs_display_fingerprint': out_dict}
        self.write_message(message)
    else:
        host = settings['host']
    self.ssh_log.debug("get_host_fingerprint(%s:%s)" % (host, port),
                       metadata={
                           'host': host,
                           'port': port
                       })
    out_dict.update({'result': 'Success', 'host': host, 'fingerprint': None})
    ssh = which('ssh')
    command = "%s -p %s -oUserKnownHostsFile=none -F. %s" % (ssh, port, host)
    m = self.new_multiplex(
        command, 'get_host_key',
        logging=False)  # Logging is false so we don't make tons of silly logs

    def grab_fingerprint(m_instance, match):
        out_dict['fingerprint'] = match.splitlines()[-1][:-1]
        m_instance.terminate()
        message = {'terminal:sshjs_display_fingerprint': out_dict}
        self.write_message(message)
        del m_instance

    def errorback(m_instance):
        leftovers = [a.rstrip() for a in m_instance.dump() if a.strip()]
        out_dict['result'] = _(
            "Error: Could not determine the fingerprint of %s:%s... '%s'" %
            (host, port, "\n".join(leftovers)))
        m_instance.terminate()  # Don't leave stuff hanging around!
        message = {'terminal:sshjs_display_fingerprint': out_dict}
        self.write_message(message)
        del m_instance

    # "The authenticity of host 'localhost (127.0.0.1)' can't be established.\r\nECDSA key fingerprint is 83:f5:b1:f1:d3:8c:b8:fe:d3:be:e5:dd:95:a5:ba:73.\r\nAre you sure you want to continue connecting (yes/no)? "
    m.expect('\n.+fingerprint .+\n',
             grab_fingerprint,
             errorback=errorback,
             preprocess=False)
    m.spawn()
Ejemplo n.º 52
0
def _run_condor_cmd(argv, error_result, timeout=TIMEOUT):
    """
    Internal: execute the command specified in `argv` and either return its
    parsed STDOUT (as list of ClassAd instances) or `error_result` in case of
    error.
    """
    ads = error_result
    stdout = _run_and_get_stdout([utils.which(argv.pop(0)), ] + argv, timeout)
    if(stdout is not None):
        f = open(stdout, 'r')
        ads = _parse_classads(f)
        f.close()
        os.remove(stdout)
    return(ads)
Ejemplo n.º 53
0
def get():
    make_val = overrides.get('CHPL_MAKE')
    if not make_val:
        platform_val = chpl_platform.get()
        if platform_val.startswith('cygwin') or platform_val == 'darwin':
            make_val = 'make'
        elif platform_val.startswith('linux'):
            if which('gmake'):
                make_val = 'gmake'
            else:
                make_val = 'make'
        else:
            make_val = 'gmake'
    return make_val
Ejemplo n.º 54
0
    def __init__(self):
        """Initializer.
        """

        self.png2icns = ''
        self.icns2png = ''
        self.converttool = ''
        self.notices = []

        # check and/or find the correct file locations
        if not (os.path.isfile(Converter.PNG2ICNS)
            or os.access(Converter.PNG2ICNS, os.X_OK)):
            self.png2icns = which(os.path.basename(Converter.PNG2ICNS))
            if not self.png2icns:
                raise Exception("Unable to locate png2icns binary: %s" %
                    Converter.PNG2ICNS)
        else:
            self.png2icns = Converter.PNG2ICNS

        if not (os.path.isfile(Converter.ICNS2PNG)
            or os.access(Converter.ICNS2PNG, os.X_OK)):
            self.icns2png = which(os.path.basename(Converter.ICNS2PNG))
            if not self.icns2png:
                raise Exception("Unable to locate icns2png binary: %s" %
                    Converter.ICNS2PNG)
        else:
            self.icns2png = Converter.ICNS2PNG

        if not (os.path.isfile(Converter.CONVERTTOOL)
            or os.access(Converter.CONVERTTOOL, os.X_OK)):
            self.converttool = which(os.path.basename(Converter.CONVERTTOOL))
            if not self.converttool:
                raise Exception("Unable to locate image conversion tool: %s" %
                    Converter.CONVERTTOOL)
        else:
            self.converttool = Converter.CONVERTTOOL
Ejemplo n.º 55
0
    def __init__(self):
        """
		Initialises the object
		
		@type	self: Options
		@param 	self: Options object
		"""
        self.os = sys.platform
        self.cwd = os.getcwd()
        if (self.os == "win32"):
            self.exec_ext = ".exe"
            self.path_sep = "\\"
            #self.shell = False

        self.gnuplot_path = utils.which("pgnuplot" + self.exec_ext)
Ejemplo n.º 56
0
def openssh_generate_public_key(path):
    """
    Generates a public key from the given private key at *path*.
    """
    ssh_keygen_path = which('ssh-keygen')
    pubkey_path = "%s.pub" % path
    command = (
        "%s "  # Path to ssh-keygen
        "-f %s "  # Key path
        "-y "  # Output public key to stdout
        "> %s"  # Redirect stdout to the public key path
        % (ssh_keygen_path, path, pubkey_path))
    exitstatus, output = shell_command(command)
    if exitstatus != 0:
        raise SSHKeygenException(
            _("Error generating public key from private key at %s" % path))
Ejemplo n.º 57
0
def download(mlink):
    exe_path = which('webtorrent')
    if exe_path is None:
        print(
            red("\nError:: ") +
            yellow("`webtorrent` is not installed, can't download torrent"))
        print(
            red("Error:: ") + yellow(
                "please run `npm install webtorrent` to install `webtorrent`"))
    else:
        try:
            os.system(
                "/usr/local/bin/webtorrent --out ~/Downloads/ download " +
                mlink)
        except OSError as e:
            print("Error: %s - %s." % (e.filename, e.strerror))
Ejemplo n.º 58
0
def condor_status(machine_name=None, timeout=TIMEOUT):
    """
    Wrapper around condor_status: retrieve the list of machines in the pool and
    their current status. If `machine_name` == None, then return information on
    all machines that belong to the pool. Otherwise just return info on that one
    machine.

    Return
        [classad, ...]
    """
    m = classad.ClassAd(MyType='Machine',
                Name='Error communicating with condor please try again later.')
    machines = [m, ]

    args = [utils.which('condor_status'), '-long']
    if(machine_name):
        args.append(machine_name)
    return(_run_condor_cmd(args, machines, timeout=timeout))
Ejemplo n.º 59
0
def get_host_fingerprint(settings, tws):
    """
    Returns a the hash of the given host's public key by making a remote
    connection to the server (not just by looking at known_hosts).
    """
    if 'port' not in settings:
        port = 22
    else:
        port = settings['port']
    if 'host' not in settings:
        out_dict['result'] = _("Error:  You must supply a 'host'.")
        message = {'sshjs_display_fingerprint': out_dict}
        tws.write_message(message)
    else:
        host = settings['host']
    logging.debug("get_host_fingerprint(%s:%s)" % (host, port))
    out_dict = {
        'result': 'Success',
        'host': host,
        'fingerprint': None
    }
    ssh = which('ssh')
    m = tws.new_multiplex(
        '%s -p %s -oUserKnownHostsFile=none -F. %s' % (ssh, port, host),
        'get_host_key',
        logging=False) # Logging is false so we don't make tons of silly logs
    def grab_fingerprint(m_instance, match):
        out_dict['fingerprint'] = match.split()[-1][:-1]
        m_instance.terminate()
        message = {'sshjs_display_fingerprint': out_dict}
        tws.write_message(message)
        del m_instance
    def errorback(m_instance):
        leftovers = [a.rstrip() for a in m_instance.dump() if a.strip()]
        out_dict['result'] = _(
            "Error: Could not determine the fingerprint of %s:%s... '%s'"
            % (host, port, "\n".join(leftovers)))
        m_instance.terminate() # Don't leave stuff hanging around!
        message = {'sshjs_display_fingerprint': out_dict}
        tws.write_message(message)
        del m_instance
    m.expect('.+fingerprint .+$', grab_fingerprint, errorback=errorback)
    m.spawn()