def execute(self, parser, args): directory = args.directory or args.name if path.maybedir(directory): directory = path.join(directory, args.name) if not path.exists(directory): logger.debug('creating directory "{}"'.format(directory)) path.makedirs(directory) elif not path.isdir(directory): logger.error('"{}" is not a directory'.format(directory)) return 1 if args.nested: directory = path.join(directory, 'craftr') path.makedirs(directory) mfile = path.join(directory, 'manifest.' + args.format) sfile = path.join(directory, 'Craftrfile') for fn in [mfile, sfile]: if path.isfile(fn): logger.error('"{}" already exists'.format(fn)) return 1 logger.debug('creating file "{}"'.format(mfile)) with open(mfile, 'w') as fp: if args.format == 'cson': lines = textwrap.dedent(''' name: "%s" version: "%s" project_dir: ".." author: "" url: "" dependencies: {} options: {} ''' % (args.name, args.version)).lstrip().split('\n') if not args.nested: del lines[2] elif args.format == 'json': lines = textwrap.dedent(''' { "name": "%s", "version": "%s", "project_dir": "..", "author": "", "url": "", "dependencies": {}, "options": {} }''' % (args.name, args.version)).lstrip().split('\n') if not args.nested: del lines[3] fp.write('\n'.join(lines)) logger.debug('creating file "{}"'.format(sfile)) with open(sfile, 'w') as fp: print('# {}'.format(args.name), file=fp)
def execute(self, parser, args): directory = args.directory or args.name if path.maybedir(directory): directory = path.join(directory, args.name) if not path.exists(directory): logger.debug('creating directory "{}"'.format(directory)) path.makedirs(directory) elif not path.isdir(directory): logger.error('"{}" is not a directory'.format(directory)) return 1 if args.nested: directory = path.join(directory, "craftr") path.makedirs(directory) mfile = path.join(directory, MANIFEST_FILENAME) sfile = path.join(directory, "Craftrfile") for fn in [mfile, sfile]: if path.isfile(fn): logger.error('"{}" already exists'.format(fn)) return 1 logger.debug('creating file "{}"'.format(mfile)) with open(mfile, "w") as fp: lines = ( textwrap.dedent( """ { "name": "%s", "version": "%s", "project_dir": "..", "author": "", "url": "", "dependencies": {}, "options": {} }\n""" % (args.name, args.version) ) .lstrip() .split("\n") ) if not args.nested: del lines[3] fp.write("\n".join(lines)) logger.debug('creating file "{}"'.format(sfile)) with open(sfile, "w") as fp: print("# {}".format(args.name), file=fp)
def load(self, context, cache): if cache is not None and path.isdir(cache.get("directory", "")): # Check if the requested version changes. url_template = context.expand_variables(cache.get("url_template", "")) if url_template == cache.get("url"): self.directory = cache["directory"] logger.info("Reusing cached directory: {}".format(path.rel(self.directory, nopar=True))) return cache else: logger.info("Cached URL is outdated:", cache.get("url")) directory = None archive = None delete_after_extract = True for url_template in self.urls: url = context.expand_variables(url_template) if not url: continue if url.startswith("file://"): name = url[7:] if path.isdir(name): logger.info("Using directory", url) directory = name break elif path.isfile(name): logger.info("Using archive", url) archive = name delete_after_extract = False break error = None else: error = None try: progress = lambda d: self._download_progress(url, context, d) archive, reused = httputils.download_file( url, directory=context.get_temporary_directory(), on_exists="skip", progress=progress ) except (httputils.URLError, httputils.HTTPError) as exc: error = exc except self.DownloadAlreadyExists as exc: directory = exc.directory logger.info("Reusing existing directory", directory) else: if reused: logger.info("Reusing cached download", path.basename(archive)) break if error: logger.info("Error reading", url, ":", error) if directory or archive: logger.debug("URL applies: {}".format(url)) if not directory and archive: suffix, directory = self._get_archive_unpack_info(context, archive) logger.info( 'Unpacking "{}" to "{}" ...'.format(path.rel(archive, nopar=True), path.rel(directory, nopar=True)) ) nr.misc.archive.extract( archive, directory, suffix=suffix, unpack_single_dir=True, check_extract_file=self._check_extract_file, progress_callback=self._extract_progress, ) elif not directory: raise LoaderError(self, "no URL matched") self.directory = directory with open(path.join(self.directory, ".craftr_downloadurl"), "w") as fp: fp.write(url) return {"directory": directory, "url_template": url_template, "url": url}