Exemple #1
0
    def _initialize(self):
        if boto is None:
            raise Exception(NO_BOTO_ERROR_MESSAGE)

        # for multipart upload
        self.s3server = {
            'access_key': self.access_key,
            'secret_key': self.secret_key,
            'is_secure': self.is_secure,
            'max_chunk_size': self.max_chunk_size,
            'host': self.host,
            'port': self.port,
            'use_rr': self.use_rr,
            'conn_path': self.conn_path
        }

        self._configure_connection()
        self.bucket = self._get_bucket(self.bucket)
        # Clean cache only if value is set in galaxy.ini
        if self.cache_size != -1:
            # Convert GBs to bytes for comparison
            self.cache_size = self.cache_size * 1073741824
            # Helper for interruptable sleep
            self.sleeper = Sleeper()
            self.cache_monitor_thread = threading.Thread(
                target=self.__cache_monitor)
            self.cache_monitor_thread.start()
            log.info("Cache cleaner manager started")
        # Test if 'axel' is available for parallel download and pull the key into cache
        if which('axel'):
            self.use_axel = True
        else:
            self.use_axel = False
Exemple #2
0
    def _initialize(self):
        if boto is None:
            raise Exception(NO_BOTO_ERROR_MESSAGE)

        # for multipart upload
        self.s3server = {'access_key': self.access_key,
                         'secret_key': self.secret_key,
                         'is_secure': self.is_secure,
                         'max_chunk_size': self.max_chunk_size,
                         'host': self.host,
                         'port': self.port,
                         'use_rr': self.use_rr,
                         'conn_path': self.conn_path}

        self._configure_connection()
        self.bucket = self._get_bucket(self.bucket)
        # Clean cache only if value is set in galaxy.ini
        if self.cache_size != -1:
            # Convert GBs to bytes for comparison
            self.cache_size = self.cache_size * 1073741824
            # Helper for interruptable sleep
            self.sleeper = Sleeper()
            self.cache_monitor_thread = threading.Thread(target=self.__cache_monitor)
            self.cache_monitor_thread.start()
            log.info("Cache cleaner manager started")
        # Test if 'axel' is available for parallel download and pull the key into cache
        if which('axel'):
            self.use_axel = True
        else:
            self.use_axel = False
Exemple #3
0
def download_command(url, quote_url=False):
    if quote_url:
        url = "'%s'" % url
    if which("wget"):
        download_cmd = ["wget", "-q", "--recursive", "-O" "-", url]
    else:
        download_cmd = ["curl", "-L", url]
    return download_cmd
def download_command(url, quote_url=False):
    if quote_url:
        url = "'%s'" % url
    if which("wget"):
        download_cmd = ["wget", "-q", "--recursive", "-O" "-", url]
    else:
        download_cmd = ["curl", "-L", url]
    return download_cmd
Exemple #5
0
def database_app(request):
    if request.param == 'postgres_app':
        if not which('initdb'):
            pytest.skip("initdb must be on PATH for postgresql fixture")
    if request.param == 'sqlite_rabbitmq_app':
        if not os.environ.get('GALAXY_TEST_AMQP_INTERNAL_CONNECTION'):
            pytest.skip(
                "rabbitmq tests will be skipped if GALAXY_TEST_AMQP_INTERNAL_CONNECTION env var is unset"
            )
    return request.getfixturevalue(request.param)
    def set_meta(self, dataset, **kwd):
        if which('ffprobe'):
            metadata, streams = ffprobe(dataset.file_name)

            dataset.metadata.duration = metadata['duration']
            dataset.metadata.audio_codecs = [
                stream['codec_name'] for stream in streams
                if stream['codec_type'] == 'audio'
            ]
            dataset.metadata.sample_rates = [
                stream['sample_rate'] for stream in streams
                if stream['codec_type'] == 'audio'
            ]
            dataset.metadata.audio_streams = len([
                stream for stream in streams if stream['codec_type'] == 'audio'
            ])
Exemple #7
0
def download_command(url, to=STDOUT_INDICATOR):
    """Build a command line to download a URL.

    By default the URL will be downloaded to standard output but a specific
    file can be specified with the `to` argument.
    """
    if which("wget"):
        download_cmd = ["wget", "-q"]
        if to == STDOUT_INDICATOR:
            download_cmd.extend(["-O", STDOUT_INDICATOR, url])
        else:
            download_cmd.extend(["--recursive", "-O", to, url])
    else:
        download_cmd = ["curl", "-L", url]
        if to != STDOUT_INDICATOR:
            download_cmd.extend(["-o", to])
    return download_cmd
Exemple #8
0
def download_command(url, to=STDOUT_INDICATOR, quote_url=False):
    """Build a command line to download a URL.

    By default the URL will be downloaded to standard output but a specific
    file can be specified with the `to` argument.
    """
    if quote_url:
        url = "'%s'" % url
        if to != STDOUT_INDICATOR:
            to = "'%s'" % to
    if which("wget"):
        download_cmd = ["wget", "-q"]
        if to == STDOUT_INDICATOR:
            download_cmd.extend(["-O", STDOUT_INDICATOR, url])
        else:
            download_cmd.extend(["--recursive", "-O", to, url])
    else:
        download_cmd = ["curl", "-L", url]
        if to != STDOUT_INDICATOR:
            download_cmd.extend(["-o", to])
    return download_cmd
Exemple #9
0
def download_command(url, to=STDOUT_INDICATOR, quote_url=False):
    """Build a command line to download a URL.

    By default the URL will be downloaded to standard output but a specific
    file can be specified with the `to` argument.
    """
    if quote_url:
        url = "'%s'" % url
        if to != STDOUT_INDICATOR:
            to = "'%s'" % to
    if which("wget"):
        download_cmd = ["wget", "-q"]
        if to == STDOUT_INDICATOR:
            download_cmd += ["-O", STDOUT_INDICATOR, url]
        else:
            download_cmd += ["--recursive", "-O", to, url]
    else:
        download_cmd = ["curl", "-L", url]
        if to != STDOUT_INDICATOR:
            download_cmd += ["-o", to]
    return download_cmd
Exemple #10
0
    def _initialize(self):
        if boto is None:
            raise Exception(NO_BOTO_ERROR_MESSAGE)

        # for multipart upload
        self.s3server = {'access_key': self.access_key,
                         'secret_key': self.secret_key,
                         'is_secure': self.is_secure,
                         'max_chunk_size': self.max_chunk_size,
                         'host': self.host,
                         'port': self.port,
                         'use_rr': self.use_rr,
                         'conn_path': self.conn_path}

        self._configure_connection()
        self._bucket = self._get_bucket(self.bucket)
        self.start_cache_monitor()
        # Test if 'axel' is available for parallel download and pull the key into cache
        if which('axel'):
            self.use_axel = True
        else:
            self.use_axel = False
    def set_meta(self, dataset, **kwd):
        if which('ffprobe'):
            metadata, streams = ffprobe(dataset.file_name)
            (w, h, fps) = self._get_resolution(streams)
            dataset.metadata.resolution_w = w
            dataset.metadata.resolution_h = h
            dataset.metadata.fps = fps

            dataset.metadata.audio_codecs = [
                stream['codec_name'] for stream in streams
                if stream['codec_type'] == 'audio'
            ]
            dataset.metadata.video_codecs = [
                stream['codec_name'] for stream in streams
                if stream['codec_type'] == 'video'
            ]

            dataset.metadata.audio_streams = len([
                stream for stream in streams if stream['codec_type'] == 'audio'
            ])
            dataset.metadata.video_streams = len([
                stream for stream in streams if stream['codec_type'] == 'video'
            ])
Exemple #12
0
 def __init__(self, config, config_xml):
     if boto is None:
         raise Exception(NO_BOTO_ERROR_MESSAGE)
     super(S3ObjectStore, self).__init__(config)
     self.staging_path = self.config.file_path
     self.transfer_progress = 0
     self._parse_config_xml(config_xml)
     self._configure_connection()
     self.bucket = self._get_bucket(self.bucket)
     # Clean cache only if value is set in galaxy.ini
     if self.cache_size != -1:
         # Convert GBs to bytes for comparison
         self.cache_size = self.cache_size * 1073741824
         # Helper for interruptable sleep
         self.sleeper = Sleeper()
         self.cache_monitor_thread = threading.Thread(target=self.__cache_monitor)
         self.cache_monitor_thread.start()
         log.info("Cache cleaner manager started")
     # Test if 'axel' is available for parallel download and pull the key into cache
     if which('axel'):
         self.use_axel = True
     else:
         self.use_axel = False
Exemple #13
0
 def __init__(self, config, config_xml):
     if boto is None:
         raise Exception(NO_BOTO_ERROR_MESSAGE)
     super(S3ObjectStore, self).__init__(config)
     self.staging_path = self.config.file_path
     self.transfer_progress = 0
     self._parse_config_xml(config_xml)
     self._configure_connection()
     self.bucket = self._get_bucket(self.bucket)
     # Clean cache only if value is set in galaxy.ini
     if self.cache_size != -1:
         # Convert GBs to bytes for comparison
         self.cache_size = self.cache_size * 1073741824
         # Helper for interruptable sleep
         self.sleeper = Sleeper()
         self.cache_monitor_thread = threading.Thread(
             target=self.__cache_monitor)
         self.cache_monitor_thread.start()
         log.info("Cache cleaner manager started")
     # Test if 'axel' is available for parallel download and pull the key into cache
     if which('axel'):
         self.use_axel = True
     else:
         self.use_axel = False
    try:
        list_file = os.path.join(test_dir, 'list_file.txt')
        with open(list_file, 'w') as f:
            f.write(
                'bbmap:36.84--0\nbiobambam:2.0.42--0\nconnor:0.5.1--py35_0\ndiamond:0.8.26--0\nedd:1.1.18--py27_0'
            )
        assert get_list_from_file(list_file) == [
            'bbmap:36.84--0', 'biobambam:2.0.42--0', 'connor:0.5.1--py35_0',
            'diamond:0.8.26--0', 'edd:1.1.18--py27_0'
        ]
    finally:
        shutil.rmtree(test_dir)


@external_dependency_management
@pytest.mark.skipif(not which('singularity'),
                    reason="requires singularity but singularity not on PATH")
def test_docker_to_singularity(tmp_path):
    tmp_dir = str(tmp_path)
    docker_to_singularity('abundancebin:1.0.1--0',
                          'singularity',
                          tmp_dir,
                          no_sudo=True)
    assert tmp_path.joinpath('abundancebin:1.0.1--0').exists()


@external_dependency_management
@pytest.mark.skipif(not which('singularity'),
                    reason="requires singularity but singularity not on PATH")
def test_singularity_container_test(tmp_path):
    test_dir = tempfile.mkdtemp()
Exemple #15
0
 def check_pykube(self):
     return "galaxy.jobs.runners.kubernetes:KubernetesJobRunner" in self.job_runners or which(
         'kubectl')
Exemple #16
0
 def __init__(self, *args, **kwargs):
     self._cli_available = bool(which(self.cli))
     super().__init__(*args, **kwargs)
Exemple #17
0
def skip_unless_executable(executable):
    if which(executable):
        return lambda func: func
    return skip("PATH doesn't contain executable %s" % executable)
 def sniff(self, filename):
     if which('ffprobe'):
         metadata, streams = ffprobe(filename)
         return 'mp3' in metadata['format_name'].split(',')