def _remove_cache_file(self, data_item): if not self.parsed_args.keep_in_cache and os.path.isfile( data_item.cache.relative): self._remove_dvc_path(data_item.cache, 'cache') else: if not self.parsed_args.keep_in_cache: Logger.warn( u'[Cmd-Remove] Unable to find cache file for data item %s' % data_item.data.relative) pass
def _get_data_item(self, target): try: data_item = self.settings.path_factory.existing_data_item(target) except DataItemError: Logger.warn( u'[TraverseFileTree] Data file {} is not valid symbolic link'. format(target)) data_item = self.settings.path_factory.data_item(target) return data_item
def _collect_target(self, target): if System.islink(target): item = self._settings.path_factory.data_item(target) return [item] elif os.path.isdir(target): return self._collect_dir(target) Logger.warn('Target "{}" does not exist'.format(target)) return []
def _remove_cache_file(self, data_item): if not self.parsed_args.keep_in_cache and os.path.isfile( data_item.cache.relative): self._remove_dvc_path(data_item.cache, 'cache') else: if not self.parsed_args.keep_in_cache: msg = u'[Cmd-Remove] Unable to find cache file {} for data item {}' Logger.warn( msg.format(data_item.cache.relative, data_item.data.relative)) pass
def _cloud(self): remote = self._core.get(Config.SECTION_CORE_REMOTE, '') if remote != '': return self._init_remote(remote) if self._core.get(Config.SECTION_CORE_CLOUD, None): # backward compatibility msg = 'Using obsoleted config format. Consider updating.' Logger.warn(msg) return self._init_compat() return None
def _credential_paths(self, default_cred_location): results = [] if self._conf_credpath is not None and len(self._conf_credpath) > 0: credpath = os.path.expanduser(self._conf_credpath) if os.path.isfile(credpath): results.append((credpath, self._conf_credsect)) else: msg = 'AWS CredentialPath {} not found; falling back to default file {} and section {}' Logger.warn(msg.format(credpath, default_cred_location[0], default_cred_location[1])) results.append(default_cred_location) else: results.append(default_cred_location) return results
def _init_cloud(self, cloud_config, cloud_type): global_storage_path = self._config[Config.SECTION_CORE].get( Config.SECTION_CORE_STORAGEPATH, None) if global_storage_path: Logger.warn('Using obsoleted config format. Consider updating.') cloud_settings = CloudSettings(cache=self._cache, global_storage_path=global_storage_path, cloud_config=cloud_config) cloud = cloud_type(cloud_settings) cloud.sanity_check() return cloud
def _collect_target(self, target): """ Collect target as a file or directory. """ if self._settings.path_factory.is_data_item(target): item = self._settings.path_factory.existing_data_item(target) return [item] elif os.path.isdir(target): return self._settings.path_factory.all_existing_data_items(target) Logger.warn('Target "{}" does not exist'.format(target)) return []
def download(self, from_infos, to_infos, no_progress_bar=False, names=None): names = self._verify_path_args(from_infos, to_infos, names) ssh = self.ssh(host=from_infos[0]['host'], user=from_infos[0]['user'], port=from_infos[0]['port']) for to_info, from_info, name in zip(to_infos, from_infos, names): if from_info['scheme'] != 'ssh': raise NotImplementedError if to_info['scheme'] == 'ssh': assert from_info['host'] == to_info['host'] assert from_info['port'] == to_info['port'] assert from_info['user'] == to_info['user'] self.cp(from_info, to_info, ssh=ssh) continue if to_info['scheme'] != 'local': raise NotImplementedError msg = "Downloading '{}/{}' to '{}'".format(from_info['host'], from_info['path'], to_info['path']) Logger.debug(msg) if not name: name = os.path.basename(to_info['path']) self._makedirs(to_info['path']) tmp_file = self.tmp_file(to_info['path']) try: ssh.open_sftp().get(from_info['path'], tmp_file, callback=create_cb(name)) except Exception as exc: msg = "Failed to download '{}/{}' to '{}'" Logger.warn( msg.format(from_info['host'], from_info['path'], to_info['path']), exc) continue os.rename(tmp_file, to_info['path']) progress.finish_target(name) ssh.close()
def run(self): target = self.settings.parsed_args.target if not target: target = self.settings.config.target_file if not os.path.exists(target): Logger.warn(u'Target is not defined: use empty target') target = '' else: target = open(target).read() Logger.debug(u'Set show workflow target as {}'.format(target)) wf = self.git.get_all_commits(target, self.settings) wf.build_graph() return 0
def credential_paths(self, default): paths = [] credpath = self._cloud_settings.cloud_config.get( 'CredentialPath', None) if credpath is not None and len(credpath) > 0: credpath = os.path.expanduser(credpath) if os.path.isfile(credpath): paths.append(credpath) else: Logger.warn( 'AWS CredentialPath "%s" not found; falling back to default "%s"' % (credpath, default)) paths.append(default) else: paths.append(default) return paths
def get_aws_credentials(self): """ gets aws credentials, looking in various places Params: Searches: 1 any override in dvc.conf [AWS] CredentialPath; 2 ~/.aws/credentials Returns: if successfully found, (access_key_id, secret) None otherwise """ default = os.path.expanduser('~/.aws/credentials') paths = [] credpath = self._config['AWS'].get('CredentialPath', None) if credpath is not None and len(credpath) > 0: credpath = os.path.expanduser(credpath) if os.path.isfile(credpath): paths.append(credpath) else: Logger.warn( 'AWS CredentialPath "%s" not found; falling back to default "%s"' % (credpath, default)) paths.append(default) else: paths.append(default) for path in paths: cc = configparser.SafeConfigParser() threw = False try: # use readfp(open( ... to aid mocking. cc.readfp(open(path, 'r')) except Exception as e: threw = True if not threw and 'default' in cc.keys(): access_key = cc['default'].get('aws_access_key_id', None) secret = cc['default'].get('aws_secret_access_key', None) if access_key is not None and secret is not None: return (access_key, secret) return None
def remove_from_cloud(self, aws_file_name): Logger.debug(u'[Cmd-Remove] Remove from cloud {}.'.format(aws_file_name)) if not self.config.aws_access_key_id or not self.config.aws_secret_access_key: Logger.debug('[Cmd-Remove] Unable to check cache file in the cloud') return conn = S3Connection(self.config.aws_access_key_id, self.config.aws_secret_access_key) bucket_name = self.config.storage_bucket bucket = conn.lookup(bucket_name) if bucket: key = bucket.get_key(aws_file_name) if not key: Logger.warn('[Cmd-Remove] S3 remove warning: file "{}" does not exist in S3'.format(aws_file_name)) else: key.delete() Logger.info('[Cmd-Remove] File "{}" was removed from S3'.format(aws_file_name)) pass
def __init__(self, cache=None, config=None): self._cache = cache self._config = config remote = self._config[Config.SECTION_CORE].get( Config.SECTION_CORE_REMOTE, '') if remote == '': if config[Config.SECTION_CORE].get(Config.SECTION_CORE_CLOUD, None): # backward compatibility Logger.warn( 'Using obsoleted config format. Consider updating.') self._cloud = self.__init__compat() else: self._cloud = None return self._cloud = self._init_remote(remote)
def storage_path(self): """ get storage path Precedence: Storage, then cloud specific """ if self._cloud_settings.global_storage_path: return self._cloud_settings.global_storage_path if not self.url: path = self._cloud_settings.cloud_config.get( Config.SECTION_CORE_STORAGEPATH, None) if path: Logger.warn( 'Using obsoleted config format. Consider updating.') else: path = self.path return path
def credential_paths(self, default): """ Try obtaining path to aws credentials from config file. """ paths = [] credpath = self._cloud_settings.cloud_config.get( Config.SECTION_AWS_CREDENTIALPATH, None) if credpath is not None and len(credpath) > 0: credpath = os.path.expanduser(credpath) if os.path.isfile(credpath): paths.append(credpath) else: Logger.warn('AWS CredentialPath "%s" not found;' 'falling back to default "%s"' % (credpath, default)) paths.append(default) else: paths.append(default) return paths