def get_data_files(self): """Yield (source,destination) tuples for data files. This method generates the names of all data file to be included in the frozen app. They should be placed directly into the freeze directory as raw files. """ fdir = self.freeze_dir if sys.platform == "win32" and self.bundle_msvcrt: for (src,dst) in self.get_msvcrt_private_assembly_files(): yield (src,dst) if self.distribution.data_files: for datafile in self.distribution.data_files: # Plain strings get placed in the root dist directory. if isinstance(datafile,basestring): datafile = ("",[datafile]) (dst,sources) = datafile if os.path.isabs(dst): err = "cant freeze absolute data_file paths (%s)" err = err % (dst,) raise ValueError(err) dst = convert_path(dst) for src in sources: src = convert_path(src) yield (src,os.path.join(dst,os.path.basename(src)))
def run(self): """ This is where the meat is. Basically the data_files list must now be a list of tuples of 3 entries. The first entry is one of 'base', 'platbase', etc, which indicates which base to install from. The second entry is the path to install too. The third entry is a list of files to install. """ for lof in self.data_files: if lof[0]: base = getattr(self, 'install_' + lof[0]) else: base = getattr(self, 'install_base') dir = convert_path(lof[1]) if not os.path.isabs(dir): dir = os.path.join(base, dir) elif self.root: dir = change_root(self.root, dir) self.mkpath(dir) files = lof[2] if len(files) == 0: # If there are no files listed, the user must be # trying to create an empty directory, so add the the # directory to the list of output files. self.outfiles.append(dir) else: # Copy files, adding them to the list of output files. for f in files: f = convert_path(f) (out, _) = self.copy_file(f, dir) # print "DEBUG: ", out # dbg self.outfiles.append(out) return self.outfiles
def run(self): if not self.skip_build: self.run_command("build_scripts") for script in self.distribution.scripts: if isinstance(script, str): fn = os.path.join(self.build_dir, os.path.basename(convert_path(script))) out, _ = self.copy_file(fn, self.install_dir) self.outfiles.append(out) else: dn = convert_path(script[0]) if not os.path.isabs(dn): dn = os.path.join(self.install_dir, dn) elif self.root: dn = change_root(self.root, dn) self.mkpath(dn) if not script[1]: self.outfiles.append(dn) else: for s in script[1]: fn = os.path.join(self.build_dir, os.path.basename(convert_path(s))) out, _ = self.copy_file(fn, dn) self.outfiles.append(out) if os.name == "posix": for fn in self.get_outputs(): mode = S_IMODE(os.stat(fn).st_mode) | 0555 log.info("changing mode of %s to %o", fn, mode) if not self.dry_run: os.chmod(fn, mode)
def build_intl(build_cmd): ''' Merge translation files into desktop and mime files ''' i_v = intltool_version() if i_v is None or i_v < (0, 25, 0): return data_files = build_cmd.distribution.data_files base = build_cmd.build_base merge_files = (('data/gramps.desktop', 'share/applications', '-d'), ('data/gramps.keys', 'share/mime-info', '-k'), ('data/gramps.xml', 'share/mime/packages', '-x'), ('data/gramps.appdata.xml', 'share/metainfo', '-x')) for filename, target, option in merge_files: filenamelocal = convert_path(filename) newfile = os.path.join(base, filenamelocal) newdir = os.path.dirname(newfile) if not(os.path.isdir(newdir) or os.path.islink(newdir)): os.makedirs(newdir) merge(filenamelocal + '.in', newfile, option) data_files.append((target, [base + '/' + filename])) for filename in INTLTOOL_FILES: filename = convert_path(filename) merge(filename + '.in', filename, '-x', po_dir=os.sep + 'tmp', cache=False)
def build_intl(build_cmd): """ Merge translation files into desktop and mime files """ for filename in _FILES: filename = convert_path(filename) strip_files(filename + ".in", filename, ["_tip", "_name"]) i_v = intltool_version() if i_v is None or i_v < (0, 25, 0): log.info("No intltool or version < 0.25.0, build_intl is aborting") return data_files = build_cmd.distribution.data_files base = build_cmd.build_base merge_files = ( ("data/gramps.desktop", "share/applications", "-d"), ("data/gramps.keys", "share/mime-info", "-k"), ("data/gramps.xml", "share/mime/packages", "-x"), ("data/gramps.appdata.xml", "share/metainfo", "-x"), ) for filename, target, option in merge_files: filenamelocal = convert_path(filename) newfile = os.path.join(base, filenamelocal) newdir = os.path.dirname(newfile) if not (os.path.isdir(newdir) or os.path.islink(newdir)): os.makedirs(newdir) merge(filenamelocal + ".in", newfile, option) data_files.append((target, [base + "/" + filename]))
def build_intl(build_cmd): ''' Merge translation files into desktop and mime files ''' for filename in _FILES: filename = convert_path(filename) strip_files(filename + '.in', filename, ['_tip', '_name']) i_v = intltool_version() if i_v is None or i_v < (0, 25, 0): log.info('No intltool or version < 0.25.0, build_intl is aborting') return data_files = build_cmd.distribution.data_files base = build_cmd.build_base merge_files = (('data/gramps.desktop', 'share/applications', '-d'), ('data/gramps.keys', 'share/mime-info', '-k'), ('data/gramps.xml', 'share/mime/packages', '-x'), ('data/gramps.appdata.xml', 'share/metainfo', '-x')) for filename, target, option in merge_files: filenamelocal = convert_path(filename) newfile = os.path.join(base, filenamelocal) newdir = os.path.dirname(newfile) if not(os.path.isdir(newdir) or os.path.islink(newdir)): os.makedirs(newdir) merge(filenamelocal + '.in', newfile, option) data_files.append((target, [base + '/' + filename]))
def run(self): # install_data is a classic class so super() won't work. Call it # directly to copy the files first. _install_data.run(self) # Now gzip them for f in self.data_files: if type(f) is StringType: # it's a simple file f = convert_path(f) cmd = '/bin/gzip %s/%s' % (self.install_dir, f) log.info("gzipping %s/%s" % (self.install_dir, f)) os.system(cmd) else: # it's a tuple with path and a list of files dir = convert_path(f[0]) if not os.path.isabs(dir): dir = os.path.join(self.install_dir, dir) elif self.root: dir = change_root(self.root, dir) if f[1] == []: # If there are no files listed the user must be # trying to create an empty directory. So nothing # to do here. pass else: # gzip the files for data in f[1]: data = convert_path(data) cmd = '/bin/gzip %s/%s' % (dir, data) log.info("gzipping %s/%s" % (dir, data)) os.system(cmd)
def run(self): self.mkpath(self.install_dir) for f in self.data_files: # it's a tuple with dict to install to and a list of files tdict = f[0] dir = convert_path(tdict['path']) if not os.path.isabs(dir): dir = os.path.join(self.install_dir, dir) elif self.root: dir = change_root(self.root, dir) self.mkpath(dir) os.chmod(dir, tdict['mode']) if(os.geteuid() == 0): try: uinfo = pwd.getpwnam(tdict['owner']) except KeyError: print "Error: Unkown user %s" % tdict['owner'] sys.exit(1) uid, gid = uinfo[2], uinfo[3] os.chown(dir, uid, gid) if f[1] == []: # If there are no files listed, the user must be # trying to create an empty directory, so add the # directory to the list of output files. self.outfiles.append(dir) else: # Copy files, adding them to the list of output files. for data, mode in f[1]: data = convert_path(data) (out, _) = self.copy_file(data, dir) self.outfiles.append(out) os.chmod(out, mode) if(os.geteuid() == 0): os.chown(out, uid, gid) self.run_command('install_locales')
def run (self): self.mkpath(self.install_dir) for f in self.data_files: if type(f) is StringType: # it's a simple file, so copy it f = convert_path(f) if self.warn_dir: self.warn("setup script did not provide a directory for " "'%s' -- installing right in '%s'" % (f, self.install_dir)) (out, _) = self.copy_file(os.path.join(self.src_dir, f), self.install_dir) self.outfiles.append(out) else: # it's a tuple with path to install to and a list of files dir = convert_path(f[0]) if not os.path.isabs(dir): dir = os.path.join(self.install_dir, dir) elif self.root: dir = change_root(self.root, dir) self.mkpath(dir) if f[1] == []: # If there are no files listed, the user must be # trying to create an empty directory, so add the # directory to the list of output files. self.outfiles.append(dir) else: # Copy files, adding them to the list of output files. for data in f[1]: data = convert_path(data) (out, _) = self.copy_file(os.path.join(self.src_dir, data), dir) self.outfiles.append(out)
def process_filelist(self, filelist): assert isinstance(filelist, FileList) filelist.set_allfiles(self.distribution.get_allfiles()) for source in tuple(filelist.sources): pattern = util.convert_path(source) if filelist.recursive: found = filelist.include_pattern(None, prefix=pattern) else: found = filelist.include_pattern(pattern, anchor=True) if not found: self.warn("no files found matching '%s'" % source) filelist.sources.remove(source) for exclude in filelist.excludes: pattern = util.convert_path(exclude) if filelist.recursive: found = filelist.exclude_pattern(None, prefix=pattern) else: found = filelist.exclude_pattern(pattern, anchor=True) if not found: self.warn("no previously included files found" " matching '%s'" % exclude) filelist.sort() filelist.remove_duplicates() return filelist
def scm_finder(*none): """Find files for distribution tarball This is only used when ``setuptools`` is imported, simply to create a valid list of files to distribute. Standard setuptools only works with CVS. Without this it *appears* to work, but only distributes a very small subset of the package. .. seealso:: :class:`MySdist.get_file_list` :type none: any :param none: Just for compatibility """ # setuptools documentation says this shouldn't be a hard fail, but we won't # do that as it makes builds entirely unpredictable if __pkg_data__.SCM == "hg": output = call_scm("locate") elif __pkg_data__.SCM == "git": output = call_scm("ls-tree -r --full-name --name-only HEAD") distributed_files = output.splitlines() distributed_files.append(".%s_version" % __pkg_data__.SCM) distributed_files.append("ChangeLog") distributed_files.extend(glob("*.html")) distributed_files.extend(glob("doc/*.html")) for path, directory, filenames in os.walk("html"): for filename in filenames: distributed_files.append(os.path.join(path, filename)) return distributed_files if __pkg_data__.SCM == "hg": finders.append((convert_path('.hg/dirstate'), scm_finder)) elif __pkg_data__.SCM == "git": finders.append((convert_path('.git/index'), scm_finder))
def fixfiles(files, ext = 'txt', fill = None): newfiles = [] if store.get('no-replace', False): fill = None for name in files: if win32 and ext and '.' not in posixpath.basename(name): newname = "%s.%s" % (name, ext) else: newname = name newfiles.append(newname) newpath = util.convert_path(newname) path = util.convert_path(name) content = [line.rstrip() for line in file(path)] if fill is not None: newcont = [] for line in content: for key, value in fill.items(): line = line.replace("@@%s@@" % key, value) newcont.append(line) content = newcont file_util.write_file(newpath, content) return newfiles
def _parse_template_line (self, line): words = string.split(line) action = words[0] patterns = dir = dir_pattern = None if action in ('include', 'exclude', 'global-include', 'global-exclude'): if len(words) < 2: raise DistutilsTemplateError, \ "'%s' expects <pattern1> <pattern2> ..." % action patterns = map(convert_path, words[1:]) elif action in ('recursive-include', 'recursive-exclude'): if len(words) < 3: raise DistutilsTemplateError, \ "'%s' expects <dir> <pattern1> <pattern2> ..." % action dir = convert_path(words[1]) patterns = map(convert_path, words[2:]) elif action in ('graft', 'prune'): if len(words) != 2: raise DistutilsTemplateError, \ "'%s' expects a single <dir_pattern>" % action dir_pattern = convert_path(words[1]) else: raise DistutilsTemplateError, "unknown action '%s'" % action return (action, patterns, dir, dir_pattern)
def get_outputs(self): outputs = [] for file in self.files: source = util.convert_path(file.source) source = os.path.basename(source) outdir = util.convert_path(file.outdir) outputs.append(os.path.join(self.install_dir, outdir, source)) return outputs
def run(self): for file in self.files: source = util.convert_path(file.source) destdir = util.convert_path(file.outdir) destdir = os.path.join(self.install_dir, destdir) self.mkpath(destdir) self.copy_file(source, destdir) return
def run(self): build_py = self.get_finalized_command("build_py") data_files = self.distribution.data_files for f in data_files: dir = convert_path(f[0]) dir = os.path.join(build_py.build_lib, dir) self.mkpath(dir) for data in f[1]: data = convert_path(data) self.copy_file(data, dir)
def run(self): self.mkpath(self.install_dir) for dest, src in self.sysconf_files: dest = os.path.join(self.install_dir, util.convert_path(dest)) src = util.convert_path(src) if os.path.exists(dest) and newer(src, dest): # Save off the existing file self.warn('saving %r as %r' % (dest, dest + '.orig')) self.copy_file(dest, dest + '.orig') self.copy_file(util.convert_path(src), dest) return
def finalize_options(self): build_scripts.finalize_options(self) # build up a dictionary of commands for sub scripting self.blds={} print self.scripts for file in self.scripts: d,f = file.split('/',1) if not self.blds.has_key(d): self.blds[d]=self.mkbld('/'+d) print convert_path(f) self.blds[d].scripts += [file]
def run(self): self.mkpath(self.install_dir) inst = self.distribution.command_options.get("install") vars_2_subst = { "PREFIX": inst["prefix"][1] if "prefix" in inst else "", "PREFIXCONF": os.path.join(get_install_conf_dir(inst), "macsyfinder"), "PREFIXDATA": os.path.join(get_install_data_dir(inst), "macsyfinder"), "PREFIXDOC": os.path.join(get_install_doc_dir(inst), "macsyfinder"), } for f in self.conf_files: if isinstance(f, str): # it's a simple file, so copy it f = convert_path(f) if self.warn_dir: self.warn( "setup script did not provide a directory for " "'{0}' -- installing right in '{1}'".format(f, self.install_dir) ) dest = os.path.join(self.install_dir, f + ".new") (out, _) = self.copy_file(f, self.install_dir) self.outfiles.append(out) else: # it's a tuple with path to install to and a list of files _dir = convert_path(f[0]) if not os.path.isabs(_dir): _dir = os.path.join(self.install_dir, _dir) elif self.root: _dir = change_root(self.root, _dir) self.mkpath(_dir) if f[1] == []: # If there are no files listed, the user must be # trying to create an empty directory, so add the # directory to the list of output files. self.outfiles.append(_dir) else: # Copy files, adding them to the list of output files. for conf in f[1]: conf = convert_path(conf) dest = os.path.join(_dir, os.path.basename(conf) + ".new") (out, _) = self.copy_file(conf, dest) if conf in self.distribution.fix_conf: input_file = out output_file = input_file + ".tmp" subst_vars(input_file, output_file, vars_2_subst) if os.path.exists(input_file): os.unlink(input_file) self.move_file(output_file, input_file) self.outfiles.append(input_file)
def get_source_files(self): sources = [] for doc in self.distribution.doc_files: if isinstance(doc, Structures.File): source = util.convert_path(doc.source) sources.append(source) elif isinstance(doc, Structures.Document): source = util.convert_path(doc.source) sources.append(source) prefix = len(os.getcwd()) + len(os.sep) for path in self.find_xml_includes(Uri.OsPathToUri(source)): sources.append(path[prefix:]) if self.inplace: sources.extend(self.get_outputs()) return sources
def handle_extra_path (self): if self.extra_path is None: self.extra_path = self.distribution.extra_path if self.extra_path is not None: if type(self.extra_path) is StringType: self.extra_path = string.split(self.extra_path, ',') if len(self.extra_path) == 1: path_file = extra_dirs = self.extra_path[0] elif len(self.extra_path) == 2: (path_file, extra_dirs) = self.extra_path else: raise DistutilsOptionError, \ ("'extra_path' option must be a list, tuple, or " "comma-separated string with 1 or 2 elements") # convert to local form in case Unix notation used (as it # should be in setup scripts) extra_dirs = convert_path(extra_dirs) else: path_file = None extra_dirs = '' # XXX should we warn if path_file and not extra_dirs? (in which # case the path file would be harmless but pointless) self.path_file = path_file self.extra_dirs = extra_dirs
def handle_extra_path(self): """Set `path_file` and `extra_dirs` using `extra_path`.""" if self.extra_path is None: self.extra_path = self.distribution.extra_path if self.extra_path is not None: if isinstance(self.extra_path, str): self.extra_path = self.extra_path.split(",") if len(self.extra_path) == 1: path_file = extra_dirs = self.extra_path[0] elif len(self.extra_path) == 2: path_file, extra_dirs = self.extra_path else: raise DistutilsOptionError( "'extra_path' option must be a list, tuple, or " "comma-separated string with 1 or 2 elements" ) # convert to local form in case Unix notation used (as it # should be in setup scripts) extra_dirs = convert_path(extra_dirs) else: path_file = None extra_dirs = "" # XXX should we warn if path_file and not extra_dirs? (in which # case the path file would be harmless but pointless) self.path_file = path_file self.extra_dirs = extra_dirs
def _add_defaults_data_files(self): # getting distribution.data_files if self.distribution.has_data_files(): for item in self.distribution.data_files: if isinstance(item, str): # plain file item = convert_path(item) if os.path.isfile(item): self.filelist.append(item) else: # a (dirname, filenames) tuple dirname, filenames = item for f in filenames: f = convert_path(f) if os.path.isfile(f): self.filelist.append(f)
def append(self, item): if item.endswith('\r'): # Fix older sdists built on Windows item = item[:-1] path = convert_path(item) if self._safe_path(path): self.files.append(path)
def find_packages(where='.', exclude=()): """Return a list all Python packages found within directory 'where' 'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it will be converted to the appropriate local path syntax. 'exclude' is a sequence of package names to exclude; '*' can be used as a wildcard in the names, such that 'foo.*' will exclude all subpackages of 'foo' (but not 'foo' itself). """ out = [] stack=[(convert_path(where), '')] while stack: where,prefix = stack.pop(0) for name in os.listdir(where): fn = os.path.join(where,name) looks_like_package = ( '.' not in name and os.path.isdir(fn) ) if looks_like_package: out.append(prefix+name) stack.append((fn, prefix+name+'.')) for pat in list(exclude)+['ez_setup']: from fnmatch import fnmatchcase out = [item for item in out if not fnmatchcase(item,pat)] return out
def find_package_data(where='.', package='', exclude=standard_exclude, exclude_directories=standard_exclude_directories): out = {} stack = [(convert_path(where), '', package)] while stack: where, prefix, package = stack.pop(0) for name in os.listdir(where): fn = os.path.join(where, name) if os.path.isdir(fn): bad_name = False for pattern in exclude_directories: if (fnmatchcase(name, pattern) or fn.lower() == pattern.lower()): bad_name = True break if bad_name: continue if os.path.isfile(os.path.join(fn, '__init__.py')): if not package: new_package = name else: new_package = package + '.' + name stack.append((fn, '', new_package)) else: stack.append((fn, prefix + name + '/', package)) else: bad_name = False for pattern in exclude: if (fnmatchcase(name, pattern) or fn.lower() == pattern.lower()): bad_name = True break if bad_name: continue out.setdefault(package, []).append(prefix+name) return out
def find_packages_and_data(where='.', exclude_packages=(), exclude_package_data=()): packages = [] package_data = {} stack=[(convert_path(where), '', '', True)] while stack: where,parent,parent_where,parent_is_package = stack.pop(0) for name in os.listdir(where): fn = os.path.join(where,name) looks_like_package = ( '.' not in name and os.path.isdir(fn) and os.path.isfile(os.path.join(fn, '__init__.py')) and parent_is_package ) if looks_like_package: package_name = ((parent+'.') if parent else '')+name packages.append(package_name) stack.append((fn, package_name, fn, True)) elif os.path.isdir(fn): stack.append((fn, parent, parent_where, False)) elif parent: keep_it = True for pat in list(exclude_package_data)+DEFAULT_EXCLUDE_PACKAGE_DATA_FILES: if fnmatch.fnmatch(fn, pat): keep_it = False break if keep_it: package_data.setdefault(parent, []).append(fn[len(parent_where)+1:]) for pat in list(exclude_packages)+['ez_setup']: packages = [item for item in packages if not fnmatch.fnmatchcase(item,pat)] return packages, dict((k,v) for k, v in package_data.iteritems() if k in packages)
def finalize_options(self): if self.bdist_dir is None: bdist_base = self.get_finalized_command('bdist').bdist_base self.bdist_dir = os.path.join(bdist_base, 'inno') self.set_undefined_options('bdist', ('keep_temp', 'keep_temp'), ('dist_dir', 'dist_dir'), ('skip_build', 'skip_build')) if not self.target_version: self.target_version = get_python_version() if not self.skip_build and (self.distribution.has_ext_modules() or self.distribution.has_scripts()): short_version = get_python_version() if self.target_version != short_version: raise DistutilsOptionError( "target version can only be %s, or the '--skip_build'" " option must be specified" % short_version) self.target_version = short_version self.license_file = self.distribution.license_file if self.license_file: self.license_file = util.convert_path(self.license_file) self.output_basename = '%s.win32' return
def copy_scripts(self): """ Override the default distutils copy_scripts to call replace_tags if it's marked as a Python script. """ self.mkpath(self.build_dir) outfiles = [] for script in self.scripts: adjust = 0 script = convert_path(script) outfile = os.path.join(self.build_dir, os.path.basename(script)) outfiles.append(outfile) if not self.force and not newer(script, outfile): log.debug("not copying %s (up-to-date)", script) continue self.copy_file(script, outfile, self.dry_run) if os.name == 'posix': for file in outfiles: if self.dry_run: log.info("changing mode of %s", file) else: oldmode = os.stat(file)[ST_MODE] & 07777 newmode = (oldmode | 0555) & 07777 if newmode != oldmode: log.info("changing mode of %s from %o to %o", file, oldmode, newmode) os.chmod(file, newmode)
def _find_depends(source, incdirs, depends): source = convert_path(source) system_dirs = incdirs user_dirs = [os.path.dirname(source)] + system_dirs depends = set(depends) # do two passes to prevent having too many files open at once todo = [] lines = open(source) for line in lines: match = _find_include(line) if match: user_include, system_include = map(convert_path, match.groups()) if user_include: todo.append((user_include, user_dirs)) else: todo.append((system_include, system_dirs)) lines.close() # Now look for the included file on the search path includes = set() for include, search_path in todo: for path in search_path: filename = os.path.normpath(os.path.join(path, include)) if os.path.isfile(filename) and filename not in depends: depends.add(filename) depends |= _find_depends(filename, incdirs, depends) break return depends
def copy_scripts(self): """Copy each script listed in 'self.scripts'; if it's marked as a Python script in the Unix way (first line matches 'first_line_re', ie. starts with "\#!" and contains "python"), then adjust the first line to refer to the current Python interpreter as we copy. """ self.mkpath(self.build_dir) outfiles = [] updated_files = [] for script in self.scripts: adjust = False script = convert_path(script) outfile = os.path.join(self.build_dir, os.path.basename(script)) outfiles.append(outfile) if not self.force and not newer(script, outfile): log.debug("not copying %s (up-to-date)", script) continue # Always open the file, but ignore failures in dry-run mode -- # that way, we'll get accurate feedback if we can read the # script. try: f = open(script, "rb") except OSError: if not self.dry_run: raise f = None else: encoding, lines = tokenize.detect_encoding(f.readline) f.seek(0) first_line = f.readline() if not first_line: self.warn("%s is an empty file (skipping)" % script) continue match = first_line_re.match(first_line) if match: adjust = True post_interp = match.group(1) or b'' if adjust: log.info("copying and adjusting %s -> %s", script, self.build_dir) updated_files.append(outfile) if not self.dry_run: if not sysconfig.python_build: executable = self.executable else: executable = os.path.join( sysconfig.get_config_var("BINDIR"), "python%s%s" % (sysconfig.get_config_var("VERSION"), sysconfig.get_config_var("EXE"))) executable = os.fsencode(executable) shebang = b"#!" + executable + post_interp + b"\n" # Python parser starts to read a script using UTF-8 until # it gets a #coding:xxx cookie. The shebang has to be the # first line of a file, the #coding:xxx cookie cannot be # written before. So the shebang has to be decodable from # UTF-8. try: shebang.decode('utf-8') except UnicodeDecodeError: raise ValueError("The shebang ({!r}) is not decodable " "from utf-8".format(shebang)) # If the script is encoded to a custom encoding (use a # #coding:xxx cookie), the shebang has to be decodable from # the script encoding too. try: shebang.decode(encoding) except UnicodeDecodeError: raise ValueError( "The shebang ({!r}) is not decodable " "from the script encoding ({})".format( shebang, encoding)) with open(outfile, "wb") as outf: outf.write(shebang) outf.writelines(f.readlines()) if f: f.close() else: if f: f.close() updated_files.append(outfile) self.copy_file(script, outfile) if os.name == 'posix': for file in outfiles: if self.dry_run: log.info("changing mode of %s", file) else: oldmode = os.stat(file)[ST_MODE] & 0o7777 newmode = (oldmode | 0o555) & 0o7777 if newmode != oldmode: log.info("changing mode of %s from %o to %o", file, oldmode, newmode) os.chmod(file, newmode) # XXX should we modify self.outfiles? return outfiles, updated_files
def __init__(self, path, pattern, postproc=None): self.pattern = pattern self.postproc = postproc self.path = convert_path(path)
# XXX support this in distutils as well from lib2to3.main import main main( 'lib2to3.fixes', ['-wd', os.path.join(tmp_src, 'tests', 'api_tests.txt')]) util.run_2to3(outfiles_2to3) # arrange setup to use the copy sys.path.insert(0, os.path.abspath(tmp_src)) src_root = tmp_src from distutils.util import convert_path d = {} init_path = convert_path('setuptools/command/__init__.py') init_file = open(init_path) exec(init_file.read(), d) init_file.close() SETUP_COMMANDS = d['__all__'] VERSION = "0.6.49" from setuptools import setup, find_packages from setuptools.command.build_py import build_py as _build_py from setuptools.command.test import test as _test scripts = [] console_scripts = ["easy_install = setuptools.command.easy_install:main"] if os.environ.get("DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT") is None:
def load_pkg_init(): result = {} init_path = convert_path('tropostack/__init__.py') with open(init_path) as init_file: exec(init_file.read(), result) return result
""" url = "https://pypi.python.org/pypi/%s/json" % (package_name, ) try: response = urllib.request.urlopen(urllib.request.Request(url), timeout=1) data = json.load(response) versions = list(data["releases"].keys()) versions.sort(key=LooseVersion) return ">={}".format(versions[-1]) except: pass return "" pkg_version = {} ver_path = convert_path("vidgear/version.py") with open(ver_path) as ver_file: exec(ver_file.read(), pkg_version) with open("README.md", "r", encoding="utf-8") as fh: long_description = fh.read() long_description = long_description.replace( # patch for images "docs/overrides/assets", "https://abhitronix.github.io/vidgear/latest/assets") # patch for unicodes long_description = long_description.replace("➶", ">>") long_description = long_description.replace("©", "(c)") setup( name="vidgear", packages=["vidgear", "vidgear.gears", "vidgear.gears.asyncio"],
def convert_paths(self, *names): """Call `convert_path` over `names`.""" for name in names: attr = "install_" + name setattr(self, attr, convert_path(getattr(self, attr)))
def find_package_data( where=".", package="", exclude=standard_exclude, exclude_directories=standard_exclude_directories, only_in_packages=True, show_ignored=False): """ Return a dictionary suitable for use in ``package_data`` in a distutils ``setup.py`` file. The dictionary looks like:: {"package": [files]} Where ``files`` is a list of all the files in that package that don"t match anything in ``exclude``. If ``only_in_packages`` is true, then top-level directories that are not packages won"t be included (but directories under packages will). Directories matching any pattern in ``exclude_directories`` will be ignored; by default directories with leading ``.``, ``CVS``, and ``_darcs`` will be ignored. If ``show_ignored`` is true, then all the files that aren"t included in package data are shown on stderr (for debugging purposes). Note patterns use wildcards, or can be exact paths (including leading ``./``), and all searching is case-insensitive. """ out = {} stack = [(convert_path(where), "", package, only_in_packages)] while stack: where, prefix, package, only_in_packages = stack.pop(0) for name in os.listdir(where): fn = os.path.join(where, name) if os.path.isdir(fn): bad_name = False for pattern in exclude_directories: if (fnmatchcase(name, pattern) or fn.lower() == pattern.lower()): bad_name = True if show_ignored: print >> sys.stderr, ( "Directory %s ignored by pattern %s" % (fn, pattern)) break if bad_name: continue if (os.path.isfile(os.path.join(fn, "__init__.py")) and not prefix): if not package: new_package = name else: new_package = package + "." + name stack.append((fn, "", new_package, False)) else: stack.append( (fn, prefix + name + "/", package, only_in_packages)) elif package or not only_in_packages: # is a file bad_name = False for pattern in exclude: if (fnmatchcase(name, pattern) or fn.lower() == pattern.lower()): bad_name = True if show_ignored: print >> sys.stderr, ( "File %s ignored by pattern %s" % (fn, pattern)) break if bad_name: continue out.setdefault(package, []).append(prefix + name) return out
def copy_scripts(self): """Copy each script listed in 'self.scripts'; if it's marked as a Python script in the Unix way (first line matches 'first_line_re', ie. starts with "\#!" and contains "python"), then adjust the first line to refer to the current Python interpreter as we copy. """ _sysconfig = __import__('sysconfig') self.mkpath(self.build_dir) outfiles = [] for script in self.scripts: adjust = 0 script = convert_path(script) outfile = os.path.join(self.build_dir, os.path.basename(script)) outfiles.append(outfile) if not self.force and not newer(script, outfile): log.debug("not copying %s (up-to-date)", script) continue # Always open the file, but ignore failures in dry-run mode -- # that way, we'll get accurate feedback if we can read the # script. try: f = open(script, "r") except IOError: if not self.dry_run: raise f = None else: first_line = f.readline() if not first_line: self.warn("%s is an empty file (skipping)" % script) continue match = first_line_re.match(first_line) if match: adjust = 1 post_interp = match.group(1) or '' if adjust: log.info("copying and adjusting %s -> %s", script, self.build_dir) print "###############" print executable executable = fix_jython_executable(executable, post_interp) print executable if not self.dry_run: outf = open(outfile, "w") if not _sysconfig.is_python_build(): outf.write("#!%s%s\n" % (self.executable, post_interp)) else: outf.write("#!%s%s\n" % (os.path.join( _sysconfig.get_config_var("BINDIR"), "python%s%s" % (_sysconfig.get_config_var("VERSION"), _sysconfig.get_config_var("EXE"))), post_interp)) outf.writelines(f.readlines()) outf.close() if f: f.close() else: if f: f.close() self.copy_file(script, outfile) if os.name == 'posix': for file in outfiles: if self.dry_run: log.info("changing mode of %s", file) else: oldmode = os.stat(file)[ST_MODE] & 07777 newmode = (oldmode | 0555) & 07777 if newmode != oldmode: log.info("changing mode of %s from %o to %o", file, oldmode, newmode) os.chmod(file, newmode)
from setuptools import setup, find_packages from distutils.util import convert_path with open("README.md", 'r') as f: long_description = f.read() main_ns = {} ver_path = convert_path('hyperstream/version.py') with open(ver_path) as ver_file: exec(ver_file.read(), main_ns) with open('requirements.txt') as f: required = f.read().splitlines() description = ('Hyperstream is a large-scale, ' 'flexible and robust software ' 'package for processing streaming data') authors = [ 'Tom Diethe', 'Meelis Kull', 'Niall Twomey', 'Kacper Sokol', 'Hao Song', 'Emma Tonkin', 'Peter Flach' ] packages = find_packages() setup(name='hyperstream', version=main_ns['__version__'], description=description, license="MIT", long_description=long_description, author='; '.join(authors),
# cf. https://pythonhosted.org/setuptools/setuptools.html # commands: # python setup.py sdist upload -r testpypi # python setup.py sdist upload -r pypi from distutils.util import convert_path from setuptools import setup, find_packages ################################################## module = 'jupyter_drawing_pad' ################################################## # get version from __meta__ meta_ns = {} path = convert_path(module + '/__meta__.py') with open(path) as meta_file: exec(meta_file.read(), meta_ns) # read requirements.txt with open('requirements.txt', 'r') as f: content = f.read() li_req = content.split('\n') install_requires = [e.strip() for e in li_req if len(e)] name = module name_url = name.replace('_', '-') packages = [module] version = meta_ns['__version__'] description = 'This is a jupyter widget (or ipywidget) consisting in a drawing pad.'
import sys from distutils.util import convert_path from setuptools import find_packages, setup main_ns = {} ver_path = convert_path("experiment_impact_tracker/version.py") with open(ver_path) as ver_file: exec(ver_file.read(), main_ns) if sys.version_info.major != 3: print("This Python is only compatible with Python 3, but you are running " "Python {}. The installation will likely fail.".format( sys.version_info.major)) setup( name="experiment_impact_tracker", packages=find_packages(), include_package_data=True, scripts=[ "scripts/create-compute-appendix", "scripts/get-region-emissions-info", "scripts/lookup-cloud-region-info", "scripts/generate-carbon-impact-statement", "scripts/get-rough-emissions-estimate", ], install_requires=[ "requests", "bs4", "shapely", "scipy",
Operating System :: MacOS """ # create descriptions LONG_DESCRIPTION = [ "This package provides a pythonic way to organize dumping and pulling python objects and other type of files to a folder or a directory that is called repository.", "A Repository can be created in any directory or folder, it suffices to initialize a Repository instance in a directory to start dumping and pulling object into it. .", "Any directory or a folder that contains a .pyrepinfo binary file in it, is theoretically a pyrep Repository." "By default dump and pull methods use pickle to serialize storing python objects.", "Practically any other methods can be used simply by providing the means and the required libraries in a simple form of string.", ] DESCRIPTION = [LONG_DESCRIPTION[0]] # get package info PACKAGE_INFO = {} ver_path = convert_path('__init__.py') with open(ver_path) as ver_file: exec(ver_file.read(), PACKAGE_INFO) # create meta data metadata = dict( name=PACKAGE_NAME, packages=[PACKAGE_NAME], package_dir={PACKAGE_NAME: '.'}, version=PACKAGE_INFO['__version__'], author="Bachir AOUN", author_email="*****@*****.**", description="\n".join(DESCRIPTION), long_description="\n".join(LONG_DESCRIPTION), url="http://bachiraoun.github.io/pyrep/", download_url="https://github.com/bachiraoun/pyrep",
import os import platform import subprocess import sys import traceback from distutils.command.install import INSTALL_SCHEMES from distutils.sysconfig import get_python_inc from distutils.util import convert_path from setuptools import find_packages from setuptools import setup # Get all template files templates = [] for dirpath, dirnames, filenames in os.walk( convert_path('pwnlib/shellcraft/templates'), followlinks=True): for f in filenames: templates.append(os.path.relpath(os.path.join(dirpath, f), 'pwnlib')) # This makes pwntools-LICENSE.txt appear with the package folders for scheme in INSTALL_SCHEMES.values(): scheme['data'] = scheme['purelib'] console_scripts = ['pwn=pwnlib.commandline.main:main'] # Find all of the ancillary console scripts # We have a magic flag --include-all-scripts flag = '--only-use-pwn-command' if flag in sys.argv: sys.argv.remove(flag) else:
from .age_verification import SandboxAgeVerificationBuilder from .client import SandboxClientBuilder from .token import YotiTokenRequestBuilder DEFAULTS = { "YOTI_API_URL": "https://api.yoti.com", "YOTI_API_PORT": 443, "YOTI_API_VERSION": "v1", "YOTI_API_VERIFY_SSL": "true", } DEFAULT_SANDBOX_URL = DEFAULTS["YOTI_API_URL"] + "/sandbox/v1" main_ns = {} directory_name = os.path.dirname(__file__) version_path = os.path.join(directory_name, "version.py") ver_path = convert_path(version_path) with open(ver_path) as ver_file: exec(ver_file.read(), main_ns) __version__ = main_ns["__version__"] __all__ = [ __version__, "SandboxClientBuilder", "SandboxAgeVerificationBuilder", "YotiTokenRequestBuilder", ]
#!/usr/bin/env python2 import glob import os import platform import sys from distutils.command.install import INSTALL_SCHEMES from distutils.sysconfig import get_python_inc from distutils.util import convert_path from setuptools import find_packages from setuptools import setup # Get all template files templates = [] for dirpath, dirnames, filenames in os.walk(convert_path('pwnlib/shellcraft/templates')): for f in filenames: templates.append(os.path.relpath(os.path.join(dirpath, f), 'pwnlib')) # This makes pwntools-LICENSE.txt appear with the package folders for scheme in INSTALL_SCHEMES.values(): scheme['data'] = scheme['purelib'] console_scripts = ['pwn=pwnlib.commandline.main:main'] # Find all of the ancillary console scripts # We have a magic flag --include-all-scripts flag = '--only-use-pwn-command' if flag in sys.argv: sys.argv.remove(flag) else: flag = False
packages = [ 'terminalone', 'terminalone.models', 'terminalone.utils', 'terminalone.vendor', ] requirements = [ 'requests>=2.3.0', 'requests-oauthlib>=0.5.0', ] metadata = {} ver_path = convert_path('terminalone/metadata.py') with open(ver_path) as ver_file: exec(ver_file.read(), metadata) def check_pip(): st = subprocess.check_output(['pip', 'search', metadata['__name__']]) pip_version = st[st.index('(') + 1:st.index(')')] print(pip_version) if pip_version == metadata['__version__']: print('version {} already published. ' 'Modify metadata.py to update version and commit.'.format( pip_version)) sys.exit()
from distutils.util import convert_path try: from setuptools import setup, find_packages except ImportError: from distutils.core import setup def find_packages(): return [ 'uarm', 'uarm.comm', 'uarm.utils', 'uarm.tools', 'uarm.wrapper', 'uarm.swift', 'uarm.metal' ] main_ns = {} ver_path = convert_path('uarm/version.py') with open(os.path.join(os.getcwd(), ver_path)) as ver_file: exec(ver_file.read(), main_ns) version = main_ns['__version__'] # long_description = open('README.rst').read() long_description = 'long description for uarm' with open(os.path.join(os.getcwd(), 'requirements.txt')) as f: requirements = f.read().splitlines() setup( name='uArm-Python-Wrapper', version=version, author='andySigler',
def convert_paths (self, *names): for name in names: attr = "install_" + name setattr(self, attr, convert_path(getattr(self, attr)))
def get_version(path): ver_path = convert_path(path) main_ns = {} with open(ver_path) as ver_file: exec(ver_file.read(), main_ns) return main_ns['__version__']
#!/usr/bin/env python import os from setuptools import setup, find_packages from distutils.util import convert_path # Load version information main_ns = {} ver_path = convert_path('src/xmlrunner/version.py') with open(ver_path, 'rb') as ver_file: exec(ver_file.read(), main_ns) install_requires = ['six>=1.4.0'] import sys if sys.version_info < (2, 7): install_requires += ['unittest2'] setup( name = 'unittest-xml-reporting', version = main_ns['__version__'], author = 'Daniel Fernandes Martins', author_email = '*****@*****.**', description = 'unittest-based test runner with Ant/JUnit like XML reporting.', license = 'BSD', platforms = ['Any'], keywords = ['pyunit', 'unittest', 'junit xml', 'report', 'testrunner'], url = 'http://github.com/xmlrunner/unittest-xml-reporting/tree/master/', classifiers = [ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License',
from setuptools import find_packages, setup from distutils.util import convert_path # Imports __version__, reference: https://stackoverflow.com/a/24517154/2220152 ns = {} ver_path = convert_path('nbfetch/version.py') with open(ver_path) as ver_file: exec(ver_file.read(), ns) __version__ = ns['__version__'] setup(name='nbfetch', version=__version__, url='', license='3-clause BSD', author='Peter Veerman, YuviPanda', author_email='*****@*****.**', description= 'Notebook Extension to do one-way synchronization of git repositories', packages=find_packages(), include_package_data=True, platforms='any', install_requires=['notebook>=5.5.0', 'tornado'], data_files=[('etc/jupyter/jupyter_notebook_config.d', ['nbfetch/etc/nbfetch.json'])], zip_safe=False, entry_points={ 'console_scripts': [ 'nbfetch = nbfetch.pull:main', ], }, classifiers=[
#!/usr/bin/env python import os from setuptools import setup, find_packages from distutils.util import convert_path pkg_name = "piBlueAudio" pkg_desc = "Bluetooth pairing and audio for Raspbery Pi" pkg_url = "https://github.com/kwodzicki/piBlueAudio" main_ns = {} ver_path = convert_path( os.path.join(pkg_name, "version.py") ) with open(ver_path) as ver_file: exec(ver_file.read(), main_ns) setup( name = pkg_name, description = pkg_desc, url = pkg_url, author = "Kyle R. Wodzicki", author_email = "*****@*****.**", version = main_ns['__version__'], packages = find_packages(), install_requires = [], scripts = ['bin/piBlueAudio'], include_package_date = True, zip_safe = False, )
install_reqs = parse_requirements(filename=os.path.join( '.', 'requirements.txt'), session='update') reqs = [str(ir.req) for ir in install_reqs] # ======================================== # Readme # ======================================== with open(os.path.join('.', 'README.rst')) as readme: README = readme.read() # ======================================== # Version parsing # ======================================== main_ns = {} ver_path = convert_path('kryten_worksheet/version.py') with open(ver_path) as ver_file: exec(ver_file.read(), main_ns) setup( name='kryten-worksheet', version=main_ns['__version__'], packages=find_packages(), include_package_data=True, author='lordoftheflies', author_email='*****@*****.**', license='Apache 2.0 License', # example license description='Crawler application for scraping single page apps.', long_description=README, url='https://github.com/lordoftheflies/kryten-worksheet/', classifiers=[
import setuptools from distutils.util import convert_path with open("README.md", "r") as fh: long_description = fh.read() main_ns = {} ver_path = convert_path('btplotting/version.py') with open(ver_path) as ver_file: exec(ver_file.read(), main_ns) setuptools.setup( name='btplotting', version=main_ns['__version__'], description='Plotting package for Backtrader (Bokeh)', python_requires='>=3.6', author='happydasch', author_email='*****@*****.**', long_description=long_description, long_description_content_type="text/markdown", license='GPLv3+', url="https://github.com/happydasch/btplotting", project_urls={ "Bug Tracker": "https://github.com/happydasch/btplotting/issues", "Documentation": "https://github.com/happydasch/btplotting/wiki", "Source Code": "https://github.com/happydasch/btplotting", "Demos": "https://github.com/happydasch/btplotting/tree/gh-pages", }, # What does your project relate to? keywords=['trading', 'development', 'plotting', 'backtrader'],
#!/usr/bin/env python from setuptools import setup, find_packages from distutils.util import convert_path import codecs # Load version information main_ns = {} ver_path = convert_path('xmlrunner/version.py') with codecs.open(ver_path, 'rb', 'utf8') as ver_file: exec(ver_file.read(), main_ns) # Load README.md readme_path = convert_path('README.md') with codecs.open(readme_path, 'rb', 'utf8') as readme_file: long_description = readme_file.read() install_requires = ['six>=1.4.0'] # this is for sdist to work. import sys if sys.version_info < (2, 7): # python 2.6 no longer supported, use last 1.x release instead. raise RuntimeError('This version requires Python 2.7+') # pragma: no cover setup( name='unittest-xml-reporting', version=main_ns['__version__'], author='Daniel Fernandes Martins, Damien Nozay', description='unittest-based test runner with Ant/JUnit like XML reporting.', long_description=long_description,
import re from setuptools import setup from distutils.util import convert_path import ast import toml project = toml.load("pyproject.toml")["project"] # read the readme as long description with open("README.md") as f: project["long_description"] = f.read() with open(convert_path('seq2science/__init__.py')) as ver_file: match = next( re.finditer('__version__ = "(.*)"', ver_file.read(), re.MULTILINE)) project["version"] = match.group(1) project["long_description_content_type"] = "text/markdown" project["package_data"] = ast.literal_eval(project["package_data"]) setup(**project)
__package__ = 'forge' __author__ = 'Andres Weber' __url__ = 'https://github.com/andresmweber/forge.git' __email__ = '*****@*****.**' __keywords__ = [ 'maya', 'autorigger', 'rigging', 'templates', 'maya.cmds', 'autodesk' ] __requirements__ = ['simplejson', 'nomenclate', 'six', 'Qt.py'] __requirements_test__ = ['nose', 'coveralls'] __requirements_dev__ = ['twine', 'sphinx', 'docutils', 'docopt'] # from: # http://stackoverflow.com/questions/2058802/how-can-i-get-the-version-defined-in-setup-py-setuptools-in-my-package main_ns = {} with open(convert_path('%s/version.py' % __package__)) as ver_file: exec(ver_file.read(), main_ns) __version__ = main_ns['__version__'] with codecs.open(join(abspath(dirname(__file__)), 'README.rst'), encoding='utf-8') as readme_file: __long_description__ = readme_file.read() setup( name=__package__, version=__version__, description=( "A toolset for building and maintaining rigs in Maya (and more later)." "maya.cmds command (or build stubs) for its signature in Python."), long_description=__long_description__, url=__url__,
#!/usr/bin/env python2 from setuptools import setup, find_packages from distutils.util import convert_path from distutils.command.install import INSTALL_SCHEMES import os, glob, platform # Get all template files templates = [] for dirpath, dirnames, filenames in os.walk( convert_path('pwnlib/shellcraft/templates')): for f in filenames: templates.append(os.path.relpath(os.path.join(dirpath, f), 'pwnlib')) # Get the version ns = {} with open(convert_path('pwnlib/version.py')) as fd: exec fd.read() in ns version = ns['__version__'] # This makes pwntools-LICENSE.txt appear with the package folders for scheme in INSTALL_SCHEMES.values(): scheme['data'] = scheme['purelib'] # Find all of the console scripts console_scripts = [] for filename in glob.glob('pwnlib/commandline/*'): filename = os.path.basename(filename) filename, ext = os.path.splitext(filename) if ext != '.py' or '__init__' in filename: continue
def add_defaults(self): """Add all the default files to self.filelist: - README or README.txt - setup.py - test/test*.py - all pure Python modules mentioned in setup script - all files pointed by package_data (build_py) - all files defined in data_files. - all files defined as scripts. - all C sources listed as part of extensions or C libraries in the setup script (doesn't catch C headers!) Warns if (README or README.txt) or setup.py are missing; everything else is optional. """ standards = [('README', 'README.txt'), self.distribution.script_name] for fn in standards: if isinstance(fn, tuple): alts = fn got_it = 0 for fn in alts: if os.path.exists(fn): got_it = 1 self.filelist.append(fn) break if not got_it: self.warn("standard file not found: should have one of " + string.join(alts, ', ')) else: if os.path.exists(fn): self.filelist.append(fn) else: self.warn("standard file '%s' not found" % fn) optional = ['test/test*.py', 'setup.cfg'] for pattern in optional: files = filter(os.path.isfile, glob(pattern)) if files: self.filelist.extend(files) # build_py is used to get: # - python modules # - files defined in package_data build_py = self.get_finalized_command('build_py') # getting python files if self.distribution.has_pure_modules(): self.filelist.extend(build_py.get_source_files()) # getting package_data files # (computed in build_py.data_files by build_py.finalize_options) for pkg, src_dir, build_dir, filenames in build_py.data_files: for filename in filenames: self.filelist.append(os.path.join(src_dir, filename)) # getting distribution.data_files if self.distribution.has_data_files(): for item in self.distribution.data_files: if isinstance(item, str): # plain file item = convert_path(item) if os.path.isfile(item): self.filelist.append(item) else: # a (dirname, filenames) tuple dirname, filenames = item for f in filenames: f = convert_path(f) if os.path.isfile(f): self.filelist.append(f) if self.distribution.has_ext_modules(): build_ext = self.get_finalized_command('build_ext') self.filelist.extend(build_ext.get_source_files()) if self.distribution.has_c_libraries(): build_clib = self.get_finalized_command('build_clib') self.filelist.extend(build_clib.get_source_files()) if self.distribution.has_scripts(): build_scripts = self.get_finalized_command('build_scripts') self.filelist.extend(build_scripts.get_source_files())
#! /usr/bin/env python3 from setuptools import setup, find_packages from distutils.util import convert_path main_ns = {} ver_path = convert_path("autofocus/version.py") with open(ver_path) as ver_file: exec(ver_file.read(), main_ns) setup( name="autofocus-client-library", packages=find_packages(exclude=["tests"]), version=main_ns["__version__"], description="AutoFocus Client Lib", author="GSRT Tech", author_email="*****@*****.**", url="https://github.com/PaloAltoNetworks-BD/autofocus-client-library/", classifiers=["Development Status :: 4 - Beta"], python_requires=">=3.6", install_requires=['requests', 'aiohttp'], )