def execCommand(args, following_args): if args.link_target: c = validate.currentDirectoryModule() if not c: return 1 err = validate.targetNameValidationError(args.link_target) if err: logging.error(err) return 1 fsutils.mkDirP(os.path.join(os.getcwd(), 'yotta_targets')) src = os.path.join(folders.globalTargetInstallDirectory(), args.link_target) dst = os.path.join(os.getcwd(), 'yotta_targets', args.link_target) # if the target is already installed, rm it fsutils.rmRf(dst) else: t = validate.currentDirectoryTarget() if not t: return 1 fsutils.mkDirP(folders.globalTargetInstallDirectory()) src = os.getcwd() dst = os.path.join(folders.globalTargetInstallDirectory(), t.getName()) if args.link_target: realsrc = fsutils.realpath(src) if src == realsrc: logging.warning( ('%s -> %s -> ' % (dst, src)) + colorama.Fore.RED + 'BROKEN' + colorama.Fore.RESET #pylint: disable=no-member ) else: logging.info('%s -> %s -> %s' % (dst, src, realsrc)) else: logging.info('%s -> %s' % (dst, src)) fsutils.symlink(src, dst)
def unpackFromCache(cache_key, to_directory): ''' If the specified cache key exists, unpack the tarball into the specified directory, otherwise raise NotInCache (a KeyError subclass). ''' if cache_key is None: raise NotInCache('"None" is never in cache') cache_dir = folders.cacheDirectory() fsutils.mkDirP(cache_dir) path = os.path.join(cache_dir, cache_key) logger.debug('attempt to unpack from cache %s -> %s', path, to_directory) try: unpackFrom(path, to_directory) try: shutil.copy(path + '.json', os.path.join(to_directory, '.yotta_origin.json')) except IOError as e: if e.errno == errno.ENOENT: pass else: raise cache_logger.debug('unpacked %s from cache into %s', cache_key, to_directory) return except IOError as e: if e.errno == errno.ENOENT: cache_logger.debug('%s not in cache', cache_key) raise NotInCache('not in cache')
def pruneCache(): ''' Prune the cache ''' cache_dir = folders.cacheDirectory() def fullpath(f): return os.path.join(cache_dir, f) def getMTimeSafe(f): # it's possible that another process removed the file before we stat # it, handle this gracefully try: return os.stat(f).st_mtime except FileNotFoundError: import time return time.clock() # ensure cache exists fsutils.mkDirP(cache_dir) max_cached_modules = getMaxCachedModules() for f in sorted([ f for f in os.listdir(cache_dir) if os.path.isfile(fullpath(f)) and not f.endswith('.json') and not f.endswith('.locked') ], key=lambda f: getMTimeSafe(fullpath(f)), reverse=True)[max_cached_modules:]: cache_logger.debug('cleaning up cache file %s', f) removeFromCache(f) cache_logger.debug('cache pruned to %s items', max_cached_modules)
def pruneCache(): ''' Prune the cache ''' cache_dir = folders.cacheDirectory() def fullpath(f): return os.path.join(cache_dir, f) def getMTimeSafe(f): # it's possible that another process removed the file before we stat # it, handle this gracefully try: return os.stat(f).st_mtime except FileNotFoundError: import time return time.clock() # ensure cache exists fsutils.mkDirP(cache_dir) max_cached_modules = getMaxCachedModules() for f in sorted( [f for f in os.listdir(cache_dir) if os.path.isfile(fullpath(f)) and not f.endswith('.json') ], key = lambda f: getMTimeSafe(fullpath(f)), reverse = True )[max_cached_modules:]: cache_logger.debug('cleaning up cache file %s', f) removeFromCache(f) cache_logger.debug('cache pruned to %s items', max_cached_modules)
def unpackFromCache(cache_key, to_directory): ''' If the specified cache key exists, unpack the tarball into the specified directory, otherwise raise KeyError. ''' if cache_key is None: raise KeyError('"None" is never in cache') cache_dir = folders.cacheDirectory() fsutils.mkDirP(cache_dir) path = os.path.join(cache_dir, cache_key) logger.debug('attempt to unpack from cache %s -> %s', path, to_directory) try: unpackFrom(path, to_directory) try: shutil.copy(path + '.json', os.path.join(to_directory, '.yotta_origin.json')) except IOError as e: if e.errno == errno.ENOENT: pass else: raise cache_logger.debug('unpacked %s from cache into %s', cache_key, to_directory) return except IOError as e: if e.errno == errno.ENOENT: cache_logger.debug('%s not in cache', cache_key) raise KeyError('not in cache')
def setUp(self): self.test_dir = tempfile.mkdtemp() for path, contents in Test_Files.items(): path_dir, file_name = os.path.split(path) path_dir = os.path.join(self.test_dir, path_dir) mkDirP(path_dir) with open(os.path.join(path_dir, file_name), 'w') as f: f.write(contents)
def writeTestFiles(files): test_dir = tempfile.mkdtemp() for path, contents in files.items(): path_dir, file_name = os.path.split(path) path_dir = os.path.join(test_dir, path_dir) mkDirP(path_dir) with open(os.path.join(path_dir, file_name), 'w') as f: f.write(contents) return test_dir
def writeTestFiles(self, files, add_space_in_path=False): test_dir = tempfile.mkdtemp() if add_space_in_path: test_dir = test_dir + ' spaces in path' for path, contents in files.items(): path_dir, file_name = os.path.split(path) path_dir = os.path.join(test_dir, path_dir) mkDirP(path_dir) with open(os.path.join(path_dir, file_name), 'w') as f: f.write(contents) return test_dir
def _downloadToCache(stream, hashinfo={}, origin_info=dict()): ''' Download the specified stream to a temporary cache directory, and returns a cache key that can be used to access/remove the file. You should use either removeFromCache(cache_key) or _moveCachedFile to move the downloaded file to a known key after downloading. ''' hash_name = None hash_value = None m = None if len(hashinfo): # check for hashes in preferred order. Currently this is just sha256 # (which the registry uses). Initial investigations suggest that github # doesn't return a header with the hash of the file being downloaded. for h in ('sha256', ): if h in hashinfo: hash_name = h hash_value = hashinfo[h] m = getattr(hashlib, h)() break if not hash_name: logger.warning('could not find supported hash type in %s', hashinfo) cache_dir = folders.cacheDirectory() fsutils.mkDirP(cache_dir) file_size = 0 (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir, suffix='.locked') with os.fdopen(download_file, 'wb') as f: f.seek(0) for chunk in stream.iter_content(4096): f.write(chunk) if hash_name: m.update(chunk) if hash_name: calculated_hash = m.hexdigest() logger.debug('calculated %s hash: %s check against: %s' % (hash_name, calculated_hash, hash_value)) if hash_value and (hash_value != calculated_hash): raise Exception('Hash verification failed.') file_size = f.tell() logger.debug('wrote tarfile of size: %s to %s', file_size, download_fname) f.truncate() extended_origin_info = {'hash': hashinfo, 'size': file_size} extended_origin_info.update(origin_info) ordered_json.dump(download_fname + '.json', extended_origin_info) return os.path.basename(download_fname)
def writeTestFiles(files, add_space_in_path=False): ''' write a dictionary of filename:contents into a new temporary directory ''' test_dir = tempfile.mkdtemp() if add_space_in_path: test_dir = test_dir + ' spaces in path' for path, contents in files.items(): path_dir, file_name = os.path.split(path) path_dir = os.path.join(test_dir, path_dir) fsutils.mkDirP(path_dir) with open(os.path.join(path_dir, file_name), 'w') as f: f.write(contents) return test_dir
def unpackFrom(tar_file_path, to_directory): # first unpack into a sibling directory of the specified directory, and # then move it into place. # we expect our tarballs to contain a single top-level directory. We strip # off this name as we extract to minimise the path length into_parent_dir = os.path.dirname(to_directory) fsutils.mkDirP(into_parent_dir) temp_directory = tempfile.mkdtemp(dir=into_parent_dir) try: with tarfile.open(tar_file_path) as tf: strip_dirname = '' # get the extraction directory name from the first part of the # extraction paths: it should be the same for all members of # the archive for m in tf.getmembers(): split_path = fsutils.fullySplitPath(m.name) logger.debug('process member: %s %s', m.name, split_path) if os.path.isabs(m.name) or '..' in split_path: raise ValueError('archive uses invalid paths') if not strip_dirname: if len(split_path) != 1 or not len(split_path[0]): raise ValueError( 'archive does not appear to contain a single module' ) strip_dirname = split_path[0] continue else: if split_path[0] != strip_dirname: raise ValueError( 'archive does not appear to contain a single module' ) m.name = os.path.join(*split_path[1:]) tf.extract(m, path=temp_directory) # make sure the destination directory doesn't exist: fsutils.rmRf(to_directory) shutil.move(temp_directory, to_directory) temp_directory = None logger.debug('extraction complete %s', to_directory) except IOError as e: if e.errno != errno.ENOENT: logger.error('failed to extract tarfile %s', e) fsutils.rmF(tar_file_path) raise finally: if temp_directory is not None: # if anything has failed, cleanup fsutils.rmRf(temp_directory)
def write(self, filename=None): # fsutils, , misc filesystem utils, internal from yotta.lib import fsutils if filename is None: filename, data = self._firstConfig() elif filename in self.configs: data = self.configs[filename] else: raise ValueError('No such file.') dirname = os.path.normpath(os.path.dirname(filename)) logging.debug('write settings to "%s" (will ensure directory "%s" exists)', filename, dirname) try: fsutils.mkDirP(dirname) ordered_json.dump(filename, data) except OSError as e: logging.error('Failed to save user settings to %s/%s, please check that the path exists and is writable.', dirname, filename)
def unpackFrom(tar_file_path, to_directory): # first unpack into a sibling directory of the specified directory, and # then move it into place. # we expect our tarballs to contain a single top-level directory. We strip # off this name as we extract to minimise the path length into_parent_dir = os.path.dirname(to_directory) fsutils.mkDirP(into_parent_dir) temp_directory = tempfile.mkdtemp(dir=into_parent_dir) try: with tarfile.open(tar_file_path) as tf: strip_dirname = '' # get the extraction directory name from the first part of the # extraction paths: it should be the same for all members of # the archive for m in tf.getmembers(): split_path = fsutils.fullySplitPath(m.name) logger.debug('process member: %s %s', m.name, split_path) if os.path.isabs(m.name) or '..' in split_path: raise ValueError('archive uses invalid paths') if not strip_dirname: if len(split_path) != 1 or not len(split_path[0]): raise ValueError('archive does not appear to contain a single module') strip_dirname = split_path[0] continue else: if split_path[0] != strip_dirname: raise ValueError('archive does not appear to contain a single module') m.name = os.path.join(*split_path[1:]) tf.extract(m, path=temp_directory) # make sure the destination directory doesn't exist: fsutils.rmRf(to_directory) shutil.move(temp_directory, to_directory) temp_directory = None logger.debug('extraction complete %s', to_directory) except IOError as e: if e.errno != errno.ENOENT: logger.error('failed to extract tarfile %s', e) fsutils.rmF(tar_file_path) raise finally: if temp_directory is not None: # if anything has failed, cleanup fsutils.rmRf(temp_directory)
def pruneCache(): ''' Prune the cache ''' cache_dir = folders.cacheDirectory() def fullpath(f): return os.path.join(cache_dir, f) # ensure cache exists fsutils.mkDirP(cache_dir) max_cached_modules = getMaxCachedModules() for f in sorted( [f for f in os.listdir(cache_dir) if os.path.isfile(fullpath(f)) and not f.endswith('.json') ], key = lambda f: os.stat(fullpath(f)).st_mtime, reverse = True )[max_cached_modules:]: cache_logger.debug('cleaning up cache file %s', f) removeFromCache(f) cache_logger.debug('cache pruned to %s items', max_cached_modules)
def write(self, filename=None): # fsutils, , misc filesystem utils, internal from yotta.lib import fsutils if filename is None: filename, data = self._firstConfig() elif filename in self.configs: data = self.configs[filename] else: raise ValueError('No such file.') dirname = os.path.normpath(os.path.dirname(filename)) logging.debug( 'write settings to "%s" (will ensure directory "%s" exists)', filename, dirname) try: fsutils.mkDirP(dirname) ordered_json.dump(filename, data) except OSError as e: logging.error( 'Failed to save user settings to %s/%s, please check that the path exists and is writable.', dirname, filename)
def test_component_init(self): # test things about components that don't (and shouldn't) require # hitting the network try: shutil.rmtree(testdir) except OSError: pass mkDirP(testdir) with open(os.path.join(testdir, 'module.json'), 'w') as f: f.write(self.test_json) c = component.Component(testdir) self.assertTrue(c) self.assertEqual(c.getName(), 'yottos') self.assertEqual(str(c.getVersion()), '0.0.7') deps = c.getDependencies() self.assertEqual(list(deps.keys()), self.deps_in_order)
def unpackFromCache(cache_key, to_directory): ''' If the specified cache key exists, unpack the tarball into the specified directory, otherwise raise NotInCache (a KeyError subclass). ''' if cache_key is None: raise NotInCache('"None" is never in cache') cache_key = _encodeCacheKey(cache_key) cache_dir = folders.cacheDirectory() fsutils.mkDirP(cache_dir) path = os.path.join(cache_dir, cache_key) logger.debug('attempt to unpack from cache %s -> %s', path, to_directory) try: unpackFrom(path, to_directory) try: shutil.copy(path + '.json', os.path.join(to_directory, '.yotta_origin.json')) except IOError as e: if e.errno == errno.ENOENT: pass else: raise cache_logger.debug('unpacked %s from cache into %s', cache_key, to_directory) return except IOError as e: if e.errno == errno.ENOENT: cache_logger.debug('%s not in cache', cache_key) raise NotInCache('not in cache') except OSError as e: if e.errno == errno.ENOTEMPTY: logger.error( 'directory %s was not empty: probably simultaneous invocation of yotta! It is likely that downloaded sources are corrupted.' ) else: raise
def execCommand(args, following_args): # standard library modules, , , import logging import os # colorama, BSD 3-Clause license, color terminal output, pip install colorama import colorama # validate, , validate things, internal from yotta.lib import validate # folders, , get places to install things, internal from yotta.lib import folders # fsutils, , misc filesystem utils, internal from yotta.lib import fsutils c = validate.currentDirectoryModule() if not c: return 1 link_module_name = None if args.module_or_path: link_module_name = args.module_or_path err = validate.componentNameValidationError(args.module_or_path) if err: # check if the module name is really a path to a module if os.path.isdir(args.module_or_path): # make sure the first half of the link exists, src = os.path.abspath(args.module_or_path) # if it isn't a valid module, that's an error: dep = validate.directoryModule(src) if not dep: logging.error("%s is not a valid module: %s", args.module_or_path, dep.getError()) return 1 link_module_name = dep.getName() dst = os.path.join(folders.globalInstallDirectory(), link_module_name) errcode = tryLink(src, dst) if errcode: return errcode else: logging.error("%s is neither a valid module name, nor a path to an existing module.", args.module_or_path) logging.error(err) return 1 fsutils.mkDirP(os.path.join(os.getcwd(), 'yotta_modules')) src = os.path.join(folders.globalInstallDirectory(), link_module_name) dst = os.path.join(os.getcwd(), 'yotta_modules', link_module_name) # if the component is already installed, rm it fsutils.rmRf(dst) else: fsutils.mkDirP(folders.globalInstallDirectory()) src = os.getcwd() dst = os.path.join(folders.globalInstallDirectory(), c.getName()) if link_module_name: realsrc = fsutils.realpath(src) if src == realsrc: logging.warning( ('%s -> %s -> ' % (dst, src)) + colorama.Fore.RED + 'BROKEN' + colorama.Fore.RESET #pylint: disable=no-member ) else: logging.info('%s -> %s -> %s' % (dst, src, realsrc)) # check if the thing we linked is actually a dependency, if it isn't # warn about that. To do this we may have to get the current target # description. This might fail, in which case we warn that we couldn't # complete the check: target = c.getTarget(args.target, args.config) if target: if not c.hasDependencyRecursively(link_module_name, target=target, test_dependencies=True): logging.warning( '"%s" is not installed as a dependency, so will not '+ ' be built. Perhaps you meant to "yotta install %s" '+ 'first?', link_module_name, link_module_name ) else: logging.warning( 'Could not check if linked module "%s" is installed as a '+ 'dependency, because target "%s" is not available. Run ' '"yotta ls" to check.', link_module_name, args.target ) else: logging.info('%s -> %s' % (dst, src)) return tryLink(src, dst)
def execCommand(args, following_args): c = None t = None if args.link_target: c = validate.currentDirectoryModule() if not c: return 1 err = validate.targetNameValidationError(args.link_target) if err: logging.error(err) return 1 fsutils.mkDirP(os.path.join(os.getcwd(), 'yotta_targets')) src = os.path.join(folders.globalTargetInstallDirectory(), args.link_target) dst = os.path.join(os.getcwd(), 'yotta_targets', args.link_target) # if the target is already installed, rm it fsutils.rmRf(dst) else: t = validate.currentDirectoryTarget() if not t: return 1 fsutils.mkDirP(folders.globalTargetInstallDirectory()) src = os.getcwd() dst = os.path.join(folders.globalTargetInstallDirectory(), t.getName()) broken_link = False if args.link_target: realsrc = fsutils.realpath(src) if src == realsrc: broken_link = True logging.warning(('%s -> %s -> ' % (dst, src)) + colorama.Fore.RED + 'BROKEN' + colorama.Fore.RESET #pylint: disable=no-member ) else: logging.info('%s -> %s -> %s' % (dst, src, realsrc)) # check that the linked target is actually set as the target (or is # inherited from by something set as the target), if it isn't, warn the # user: if c and args.link_target != nameFromTargetSpec(args.target): target = c.getTarget(args.target, args.config) if target: if not target.inheritsFrom(args.link_target): logging.warning( 'target "%s" is not used by the current target (%s), so ' 'this link will have no effect. Perhaps you meant to ' 'use "yotta target <targetname>" to set the build ' 'target first.', args.link_target, nameFromTargetSpec(args.target)) else: logging.warning( 'Could not check if linked target "%s" is used by the ' + 'current target "%s": run "yotta target" to check.', args.link_target, nameFromTargetSpec(args.target)) else: logging.info('%s -> %s' % (dst, src)) try: fsutils.symlink(src, dst) except Exception as e: if broken_link: logging.error( 'failed to create link (create the first half of the link first)' ) else: logging.error('failed to create link: %s', e)
def execCommand(args, following_args): c = None t = None if args.link_target: c = validate.currentDirectoryModule() if not c: return 1 err = validate.targetNameValidationError(args.link_target) if err: logging.error(err) return 1 fsutils.mkDirP(os.path.join(os.getcwd(), 'yotta_targets')) src = os.path.join(folders.globalTargetInstallDirectory(), args.link_target) dst = os.path.join(os.getcwd(), 'yotta_targets', args.link_target) # if the target is already installed, rm it fsutils.rmRf(dst) else: t = validate.currentDirectoryTarget() if not t: return 1 fsutils.mkDirP(folders.globalTargetInstallDirectory()) src = os.getcwd() dst = os.path.join(folders.globalTargetInstallDirectory(), t.getName()) broken_link = False if args.link_target: realsrc = fsutils.realpath(src) if src == realsrc: broken_link = True logging.warning( ('%s -> %s -> ' % (dst, src)) + colorama.Fore.RED + 'BROKEN' + colorama.Fore.RESET #pylint: disable=no-member ) else: logging.info('%s -> %s -> %s' % (dst, src, realsrc)) # check that the linked target is actually set as the target (or is # inherited from by something set as the target), if it isn't, warn the # user: if c and args.link_target != nameFromTargetSpec(args.target): target = c.getTarget(args.target, args.config) if target: if not target.inheritsFrom(args.link_target): logging.warning( 'target "%s" is not used by the current target (%s), so ' 'this link will have no effect. Perhaps you meant to ' 'use "yotta target <targetname>" to set the build ' 'target first.', args.link_target, nameFromTargetSpec(args.target) ) else: logging.warning( 'Could not check if linked target "%s" is used by the '+ 'current target "%s": run "yotta target" to check.', args.link_target, nameFromTargetSpec(args.target) ) else: logging.info('%s -> %s' % (dst, src)) try: fsutils.symlink(src, dst) except Exception as e: if broken_link: logging.error('failed to create link (create the first half of the link first)') else: logging.error('failed to create link: %s', e)
def downloadToCache(stream, hashinfo={}, cache_key=None, origin_info=dict()): ''' Download the specified stream to a temporary cache directory, and returns a cache key that can be used to access/remove the file. If cache_key is None, then a cache key will be generated and returned. You will probably want to use removeFromCache(cache_key) to remove it. ''' hash_name = None hash_value = None m = None if len(hashinfo): # check for hashes in preferred order. Currently this is just sha256 # (which the registry uses). Initial investigations suggest that github # doesn't return a header with the hash of the file being downloaded. for h in ('sha256', ): if h in hashinfo: hash_name = h hash_value = hashinfo[h] m = getattr(hashlib, h)() break if not hash_name: logger.warning('could not find supported hash type in %s', hashinfo) if cache_key is None: cache_key = '%032x' % random.getrandbits(256) cache_dir = folders.cacheDirectory() fsutils.mkDirP(cache_dir) cache_as = os.path.join(cache_dir, cache_key) file_size = 0 (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir) with os.fdopen(download_file, 'wb') as f: f.seek(0) for chunk in stream.iter_content(4096): f.write(chunk) if hash_name: m.update(chunk) if hash_name: calculated_hash = m.hexdigest() logger.debug('calculated %s hash: %s check against: %s' % (hash_name, calculated_hash, hash_value)) if hash_value and (hash_value != calculated_hash): raise Exception('Hash verification failed.') file_size = f.tell() logger.debug('wrote tarfile of size: %s to %s', file_size, download_fname) f.truncate() try: os.rename(download_fname, cache_as) extended_origin_info = {'hash': hashinfo, 'size': file_size} extended_origin_info.update(origin_info) ordered_json.dump(cache_as + '.json', extended_origin_info) except Exception as e: # windows error 183 == file already exists # (be careful not to use WindowsError on non-windows platforms as it # isn't defined) if (isinstance(e, OSError) and e.errno == errno.ENOENT) or \ (isinstance(e, getattr(__builtins__, "WindowsError", type(None))) and e.errno == 183): # if we failed, it's because the file already exists (probably # because another process got there first), so just rm our # temporary file and continue cache_logger.debug('another process downloaded %s first', cache_key) fsutils.rmF(download_fname) else: raise return cache_key
def _writeFile(self, path, contents): dirname = os.path.dirname(path) fsutils.mkDirP(dirname) self.writeIfDifferent(path, contents)
def execCommand(args, following_args): # standard library modules, , , import logging import os # colorama, BSD 3-Clause license, color terminal output, pip install colorama import colorama # fsutils, , misc filesystem utils, internal from yotta.lib import fsutils # validate, , validate things, internal from yotta.lib import validate # folders, , get places to install things, internal from yotta.lib import folders c = None t = None link_target_name = None if args.target_or_path: link_target_name = args.target_or_path c = validate.currentDirectoryModule() if not c: return 1 err = validate.targetNameValidationError(args.target_or_path) if err: # check if the target name is really a path to an existing target if os.path.isdir(args.target_or_path): # make sure the first half of the link exists, src = os.path.abspath(args.target_or_path) # if it isn't a valid target, that's an error: tgt = validate.directoryTarget(src) if not tgt: logging.error("%s is not a valid target: %s", args.target_or_path, tgt.getError()) return 1 link_target_name = tgt.getName() dst = os.path.join(folders.globalInstallDirectory(), link_target_name) errcode = tryLink(src, dst) if errcode: return errcode else: logging.error(err) return 1 fsutils.mkDirP(os.path.join(os.getcwd(), 'yotta_targets')) src = os.path.join(folders.globalTargetInstallDirectory(), link_target_name) dst = os.path.join(os.getcwd(), 'yotta_targets', link_target_name) # if the target is already installed, rm it fsutils.rmRf(dst) else: t = validate.currentDirectoryTarget() if not t: return 1 fsutils.mkDirP(folders.globalTargetInstallDirectory()) src = os.getcwd() dst = os.path.join(folders.globalTargetInstallDirectory(), t.getName()) broken_link = False if link_target_name: realsrc = fsutils.realpath(src) if src == realsrc: broken_link = True logging.warning( ('%s -> %s -> ' % (dst, src)) + colorama.Fore.RED + 'BROKEN' + colorama.Fore.RESET #pylint: disable=no-member ) else: logging.info('%s -> %s -> %s' % (dst, src, realsrc)) # check that the linked target is actually set as the target (or is # inherited from by something set as the target), if it isn't, warn the # user: if c and link_target_name != nameFromTargetSpec(args.target): target = c.getTarget(args.target, args.config) if target: if not target.inheritsFrom(link_target_name): logging.warning( 'target "%s" is not used by the current target (%s), so ' 'this link will have no effect. Perhaps you meant to ' 'use "yotta target <targetname>" to set the build ' 'target first.', link_target_name, nameFromTargetSpec(args.target) ) else: logging.warning( 'Could not check if linked target "%s" is used by the '+ 'current target "%s": run "yotta target" to check.', link_target_name, nameFromTargetSpec(args.target) ) else: logging.info('%s -> %s' % (dst, src)) try: fsutils.symlink(src, dst) except Exception as e: if broken_link: logging.error('failed to create link (create the first half of the link first)') else: logging.error('failed to create link: %s', e)
def execCommand(args, following_args): # standard library modules, , , import logging import os # colorama, BSD 3-Clause license, color terminal output, pip install colorama import colorama # validate, , validate things, internal from yotta.lib import validate # folders, , get places to install things, internal from yotta.lib import folders # fsutils, , misc filesystem utils, internal from yotta.lib import fsutils c = validate.currentDirectoryModule() if not c: return 1 if args.component: err = validate.componentNameValidationError(args.component) if err: logging.error(err) return 1 fsutils.mkDirP(os.path.join(os.getcwd(), 'yotta_modules')) src = os.path.join(folders.globalInstallDirectory(), args.component) dst = os.path.join(os.getcwd(), 'yotta_modules', args.component) # if the component is already installed, rm it fsutils.rmRf(dst) else: fsutils.mkDirP(folders.globalInstallDirectory()) src = os.getcwd() dst = os.path.join(folders.globalInstallDirectory(), c.getName()) broken_link = False if args.component: realsrc = fsutils.realpath(src) if src == realsrc: broken_link = True logging.warning( ('%s -> %s -> ' % (dst, src)) + colorama.Fore.RED + 'BROKEN' + colorama.Fore.RESET #pylint: disable=no-member ) else: logging.info('%s -> %s -> %s' % (dst, src, realsrc)) # check if the thing we linked is actually a dependency, if it isn't # warn about that. To do this we may have to get the current target # description. This might fail, in which case we warn that we couldn't # complete the check: target = c.getTarget(args.target, args.config) if target: if not c.hasDependencyRecursively(args.component, target=target, test_dependencies=True): logging.warning( '"%s" is not installed as a dependency, so will not '+ ' be built. Perhaps you meant to "yotta install %s" '+ 'first?', args.component, args.component ) else: logging.warning( 'Could not check if linked module "%s" is installed as a '+ 'dependency, because target "%s" is not available. Run ' '"yotta ls" to check.', args.component, args.target ) else: logging.info('%s -> %s' % (dst, src)) try: fsutils.symlink(src, dst) except Exception as e: if broken_link: logging.error('failed to create link (create the first half of the link first)') else: logging.error('failed to create link: %s', e)
def downloadToCache(stream, hashinfo={}, cache_key=None, origin_info=dict()): ''' Download the specified stream to a temporary cache directory, and returns a cache key that can be used to access/remove the file. If cache_key is None, then a cache key will be generated and returned. You will probably want to use removeFromCache(cache_key) to remove it. ''' hash_name = None hash_value = None m = None if len(hashinfo): # check for hashes in preferred order. Currently this is just sha256 # (which the registry uses). Initial investigations suggest that github # doesn't return a header with the hash of the file being downloaded. for h in ('sha256',): if h in hashinfo: hash_name = h hash_value = hashinfo[h] m = getattr(hashlib, h)() break if not hash_name: logger.warning('could not find supported hash type in %s', hashinfo) if cache_key is None: cache_key = '%032x' % random.getrandbits(256) cache_dir = folders.cacheDirectory() fsutils.mkDirP(cache_dir) cache_as = os.path.join(cache_dir, cache_key) file_size = 0 (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir) with os.fdopen(download_file, 'wb') as f: f.seek(0) for chunk in stream.iter_content(4096): f.write(chunk) if hash_name: m.update(chunk) if hash_name: calculated_hash = m.hexdigest() logger.debug( 'calculated %s hash: %s check against: %s' % ( hash_name, calculated_hash, hash_value ) ) if hash_value and (hash_value != calculated_hash): raise Exception('Hash verification failed.') file_size = f.tell() logger.debug('wrote tarfile of size: %s to %s', file_size, download_fname) f.truncate() try: os.rename(download_fname, cache_as) extended_origin_info = { 'hash': hashinfo, 'size': file_size } extended_origin_info.update(origin_info) ordered_json.dump(cache_as + '.json', extended_origin_info) except OSError as e: if e.errno == errno.ENOENT: # if we failed, it's because the file already exists (probably # because another process got there first), so just rm our # temporary file and continue cache_logger.debug('another process downloaded %s first', cache_key) fsutils.rmF(download_fname) else: raise return cache_key
def execCommand(args, following_args): # standard library modules, , , import logging import os # colorama, BSD 3-Clause license, color terminal output, pip install colorama import colorama # validate, , validate things, internal from yotta.lib import validate # folders, , get places to install things, internal from yotta.lib import folders # fsutils, , misc filesystem utils, internal from yotta.lib import fsutils c = validate.currentDirectoryModule() if not c: return 1 link_module_name = None if args.module_or_path: link_module_name = args.module_or_path err = validate.componentNameValidationError(args.module_or_path) if err: # check if the module name is really a path to a module if os.path.isdir(args.module_or_path): # make sure the first half of the link exists, src = os.path.abspath(args.module_or_path) # if it isn't a valid module, that's an error: dep = validate.directoryModule(src) if not dep: logging.error("%s is not a valid module: %s", args.module_or_path, dep.getError()) return 1 link_module_name = dep.getName() dst = os.path.join(folders.globalInstallDirectory(), link_module_name) errcode = tryLink(src, dst) if errcode: return errcode else: logging.error( "%s is neither a valid module name, nor a path to an existing module.", args.module_or_path) logging.error(err) return 1 fsutils.mkDirP(os.path.join(os.getcwd(), 'yotta_modules')) src = os.path.join(folders.globalInstallDirectory(), link_module_name) dst = os.path.join(os.getcwd(), 'yotta_modules', link_module_name) # if the component is already installed, rm it fsutils.rmRf(dst) else: fsutils.mkDirP(folders.globalInstallDirectory()) src = os.getcwd() dst = os.path.join(folders.globalInstallDirectory(), c.getName()) if link_module_name: realsrc = fsutils.realpath(src) if src == realsrc: logging.warning(('%s -> %s -> ' % (dst, src)) + colorama.Fore.RED + 'BROKEN' + colorama.Fore.RESET #pylint: disable=no-member ) else: logging.info('%s -> %s -> %s' % (dst, src, realsrc)) # check if the thing we linked is actually a dependency, if it isn't # warn about that. To do this we may have to get the current target # description. This might fail, in which case we warn that we couldn't # complete the check: target = c.getTarget(args.target, args.config) if target: if not c.hasDependencyRecursively( link_module_name, target=target, test_dependencies=True): logging.warning( '"%s" is not installed as a dependency, so will not ' + ' be built. Perhaps you meant to "yotta install %s" ' + 'first?', link_module_name, link_module_name) else: logging.warning( 'Could not check if linked module "%s" is installed as a ' + 'dependency, because target "%s" is not available. Run ' '"yotta ls" to check.', link_module_name, args.target) else: logging.info('%s -> %s' % (dst, src)) return tryLink(src, dst)