def _read_commander_init_status_file(self):
        commander_file_path = self._get_commander_file_path()

        local_file_name = _tempfile.mktemp(prefix='dml_file_')
        try:
            if _file_util.is_hdfs_path(commander_file_path):
                _file_util.download_from_hdfs(
                    commander_file_path,
                    local_file_name,
                    hadoop_conf_dir = self.environment.hadoop_conf_dir)
            elif _file_util.is_s3_path(commander_file_path):
                _file_util.download_from_s3(
                    commander_file_path,
                    local_file_name,
                    aws_credentials = self.environment.get_credentials(),
                    silent = True)

            with open(local_file_name,'r') as f:
                status_json = _json.load(f)
                port = status_json['port']
                host_name = status_json['host_name']

            if port > 0:
                return 'http://%s:%s' % (host_name, port)
            else:
                return None
        except:
            # Ignore exception, we will fail after a few retry
            pass
        finally:
            if _os.path.exists(local_file_name):
                _os.remove(local_file_name)
Esempio n. 2
0
    def _load_remote(cls, path, schema_version, aws_credentials={}):
        temp_dir = _gl.util._make_temp_filename(prefix='predictive_policy_')

        if _file_util.is_s3_path(path):
            _file_util.download_from_s3(path, temp_dir, is_dir=True,
                              aws_credentials=aws_credentials, silent=True)
        elif _file_util.is_hdfs_path(path):
            _file_util.download_from_hdfs(path, temp_dir, is_dir=True)
        else:
            assert 'Only support S3 and HDFS path for Predictive Object saving location!'

        return cls._load_local(temp_dir)
    def _load_remote(cls, path, schema_version, aws_credentials={}):
        temp_dir = _tempfie.mkdtemp(prefix='predictive_object_')

        if fu.is_s3_path(path):
            fu.download_from_s3(path, temp_dir, is_dir=(schema_version > 2),
                              aws_credentials=aws_credentials)
        elif fu.is_hdfs_path(path):
            fu.download_from_hdfs(path, temp_dir, is_dir=(schema_version > 2))
        else:
            assert 'Only support S3 and HDFS path for Predictive Object saving location!'

        return cls._load_local(temp_dir)
Esempio n. 4
0
    def _load_file_and_parse(self, file_name, parser_func, silent=False, test_url=True):
        '''
        Read remote file to a local temporary file, and use parser_func
        to parse the content, returns the parsed result.

        This function is used for parsing state and progress files from
        either local, S3 or HDFS.

        If there is any exception happened, returns None
        '''
        file_is_local = _file_util.is_local_path(file_name)
        local_file_name = file_name if file_is_local else _tempfile.mktemp(prefix='job-status-')

        try:
            try:
                if test_url and not self._test_url(file_name):
                    if not silent:
                        __LOGGER__.info("File %s is not available yet." % file_name)
                    return None

                if _file_util.is_hdfs_path(file_name):

                    _file_util.download_from_hdfs(
                        hdfs_path = file_name,
                        local_path = local_file_name,
                        hadoop_conf_dir=self.environment.hadoop_conf_dir)

                elif _file_util.is_s3_path(file_name):

                    _file_util.download_from_s3(
                        s3_path = file_name,
                        local_path = local_file_name,
                        is_dir = False,
                        aws_credentials = self.environment.ec2_config.get_credentials(),
                        silent = silent)

            except Exception as e:
                # It is ok the status file is not ready yet as the job is getting prepared
                if not silent:
                    __LOGGER__.warning("Exception encountered when trying to download file from %s, error: %s" % (file_name, e))
                return None

            try:
                # parse the local file
                return parser_func(local_file_name)
            except Exception as e:
                __LOGGER__.info("Exception when parsing file %s. Error: %s" % (file_name, e))
                return None
        finally:
            if (not file_is_local) and _os.path.exists(local_file_name):
                _os.remove(local_file_name)
    def _load_file_and_parse(self, file_name, parser_func, silent=False, test_url=True):
        '''
        Read remote file to a local temporary file, and use parser_func
        to parse the content, returns the parsed result.

        This function is used for parsing state and progress files from
        either local, S3 or HDFS.

        If there is any exception happened, returns None
        '''
        file_is_local = _file_util.is_local_path(file_name)
        local_file_name = file_name if file_is_local else _tempfile.mktemp(prefix='job-status-')

        try:
            try:
                if test_url and not self._test_url(file_name):
                    if not silent:
                        __LOGGER__.info("File %s is not available yet." % file_name)
                    return None

                if _file_util.is_hdfs_path(file_name):

                    _file_util.download_from_hdfs(
                        hdfs_path = file_name,
                        local_path = local_file_name,
                        hadoop_conf_dir=self.environment.hadoop_conf_dir)

                elif _file_util.is_s3_path(file_name):

                    _file_util.download_from_s3(
                        s3_path = file_name,
                        local_path = local_file_name,
                        is_dir = False,
                        aws_credentials = self.environment.ec2_config.get_credentials(),
                        silent = silent)

            except Exception as e:
                # It is ok the status file is not ready yet as the job is getting prepared
                if not silent:
                    __LOGGER__.warning("Exception encountered when trying to download file from %s, error: %s" % (file_name, e))
                return None

            try:
                # parse the local file
                return parser_func(local_file_name)
            except Exception as e:
                __LOGGER__.info("Exception when parsing file %s. Error: %s" % (file_name, e))
                return None
        finally:
            if (not file_is_local) and _os.path.exists(local_file_name):
                _os.remove(local_file_name)
def _copy_predictive_object_files(source_path, target_path, is_dir, src_credentials, tgt_credentials):
    '''
    Copy either file or folder from source location to target location
    '''
    # Cleanup existing file path if exists
    if _file_util.is_local_path(target_path) and _os.path.exists(target_path):
        _shutil.rmtree(target_path)

    if _file_util.is_s3_path(source_path) and _file_util.is_s3_path(target_path):

        # compare credentials
        _check_aws_credentials(src_credentials, tgt_credentials, source_path)

        # intra s3 copy model
        _file_util.intra_s3_copy_model(source_path, target_path, is_dir, tgt_credentials)
    elif _file_util.is_local_path(source_path):

        _file_util.copy_from_local(source_path, target_path, is_dir = is_dir)

    else:
        tmp_dir = _tempfile.mkdtemp(prefix = 'copy_predictive_object')
        try:
            # download to local first
            local_path = _os.path.join(tmp_dir, 'temp_po_file')
            if _file_util.is_s3_path(source_path):
                _file_util.download_from_s3(
                    source_path,
                    local_path,
                    is_dir=is_dir,
                    aws_credentials=src_credentials,
                    silent=False)
            elif _file_util.is_hdfs_path(source_path):
                _file_util.download_from_hdfs(source_path, local_path, is_dir = False)
            else:
                raise RuntimeError('Unsupported file system type: %s' % source_path)

            # upload from local to remote
            if _file_util.is_s3_path(target_path):
                _file_util.upload_to_s3(local_path, target_path, is_dir=is_dir,
                    aws_credentials=tgt_credentials, silent=False)
            elif _file_util.is_hdfs_path(target_path):
                _file_util.hdfs_mkdir(target_path)
                _file_util.upload_to_hdfs(local_path, target_path, force=True, silent=False)
            else:
                _file_util.upload_to_local(local_path, target_path, is_dir=is_dir, silent=False)

        finally:
            _shutil.rmtree(tmp_dir)
Esempio n. 7
0
    def _download_remote_folder_to_local(self, remote_path, silent=False):
        '''
        Download all files from remote path to local. Caller is responsible for
        cleaning up the local folder after finishing usage

        Returns the local temporary folder
        '''
        local_path = _tempfile.mkdtemp(prefix='job-results')

        try:
            if _file_util.is_hdfs_path(remote_path):

                _file_util.download_from_hdfs(
                    hdfs_path = remote_path,
                    local_path = local_path,
                    is_dir = True,
                    hadoop_conf_dir=self.environment.hadoop_conf_dir)

            elif _file_util.is_s3_path(remote_path):

                _file_util.download_from_s3(
                    s3_path = remote_path,
                    local_path = local_path,
                    is_dir = True,
                    aws_credentials = self.environment.ec2_config.get_credentials(),
                    silent = silent)
            else:
                raise RuntimeError("'%s' is not a supported remote path. Only S3 and HDFS"
                                    " remote path are supported" % remote_path)
        except:
            # Make sure we cleanup local files if we cannot successfully
            # download files
            if _os.path.isdir(local_path):
                _shutil.rmtree(local_path)

            raise

        return local_path
    def _download_remote_folder_to_local(self, remote_path, silent=False):
        '''
        Download all files from remote path to local. Caller is responsible for
        cleaning up the local folder after finishing usage

        Returns the local temporary folder
        '''
        local_path = _tempfile.mkdtemp(prefix='job-results')

        try:
            if _file_util.is_hdfs_path(remote_path):

                _file_util.download_from_hdfs(
                    hdfs_path = remote_path,
                    local_path = local_path,
                    is_dir = True,
                    hadoop_conf_dir=self.environment.hadoop_conf_dir)

            elif _file_util.is_s3_path(remote_path):

                _file_util.download_from_s3(
                    s3_path = remote_path,
                    local_path = local_path,
                    is_dir = True,
                    aws_credentials = self.environment.ec2_config.get_credentials(),
                    silent = silent)
            else:
                raise RuntimeError("'%s' is not a supported remote path. Only S3 and HDFS"
                                    " remote path are supported" % remote_path)
        except:
            # Make sure we cleanup local files if we cannot successfully
            # download files
            if _os.path.isdir(local_path):
                _shutil.rmtree(local_path)

            raise

        return local_path