def get_file(url, local, opts, config): if local is None: raise error.general("source/patch path invalid") if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice("Creating source directory: %s" % (os.path.relpath(path.host(path.dirname(local))))) log.output("making dir: %s" % (path.host(path.dirname(local)))) if not opts.dry_run(): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general("source not found: %s" % (path.host(local))) # # Check if a URL has been provided on the command line. # url_bases = opts.urls() urls = [] if url_bases is not None: for base in url_bases: if base[-1:] != "/": base += "/" url_path = urlparse.urlsplit(url)[2] slash = url_path.rfind("/") if slash < 0: url_file = url_path else: url_file = url_path[slash + 1 :] urls.append(urlparse.urljoin(base, url_file)) urls += url.split() log.trace("_url: %s -> %s" % (",".join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if not opts.dry_run(): raise error.general("downloading %s: all paths have failed, giving up" % (url))
def __init__(self, name=None, original=None, rtdir="."): self.files = [] self.macro_filter = re.compile(r"%{[^}]+}") if original is None: self.macros = {} self.read_maps = [] self.read_map_locked = False self.write_map = "global" self.rtpath = path.abspath(path.dirname(inspect.getfile(macros))) if path.dirname(self.rtpath).endswith("/share/rtems"): self.prefix = path.dirname(self.rtpath)[: -len("/share/rtems")] else: self.prefix = "." self.macros["global"] = {} self.macros["global"]["nil"] = ("none", "none", "") self.macros["global"]["_cwd"] = ("dir", "required", path.abspath(os.getcwd())) self.macros["global"]["_prefix"] = ("dir", "required", self.prefix) self.macros["global"]["_rtdir"] = ("dir", "required", path.abspath(self.expand(rtdir))) self.macros["global"]["_rttop"] = ("dir", "required", self.prefix) else: self.macros = {} for m in original.macros: if m not in self.macros: self.macros[m] = {} for k in original.macros[m]: self.macros[m][k] = copy.copy(original.macros[m][k]) self.read_maps = sorted(copy.copy(original.read_maps)) self.read_map_locked = copy.copy(original.read_map_locked) self.write_map = copy.copy(original.write_map) if name is not None: self.load(name)
def __init__(self, name = None, original = None, rtdir = '.'): self.files = [] self.macro_filter = re.compile(r'%{[^}]+}') if original is None: self.macros = {} self.read_maps = [] self.read_map_locked = False self.write_map = 'global' self.rtpath = path.abspath(path.dirname(inspect.getfile(macros))) if path.dirname(self.rtpath).endswith('/share/rtems'): self.prefix = path.dirname(self.rtpath)[:-len('/share/rtems')] else: self.prefix = '.' self.macros['global'] = {} self.macros['global']['nil'] = ('none', 'none', '') self.macros['global']['_cwd'] = ('dir', 'required', path.abspath(os.getcwd())) self.macros['global']['_prefix'] = ('dir', 'required', self.prefix) self.macros['global']['_rtdir'] = ('dir', 'required', path.abspath(self.expand(rtdir))) self.macros['global']['_rttop'] = ('dir', 'required', self.prefix) else: self.macros = {} for m in original.macros: if m not in self.macros: self.macros[m] = {} for k in original.macros[m]: self.macros[m][k] = copy.copy(original.macros[m][k]) self.read_maps = sorted(copy.copy(original.read_maps)) self.read_map_locked = copy.copy(original.read_map_locked) self.write_map = copy.copy(original.write_map) if name is not None: self.load(name)
def get_file(url, local, opts, config): if local is None: raise error.general('source/patch path invalid') if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice('Creating source directory: %s' % \ (os.path.relpath(path.host(path.dirname(local))))) log.output('making dir: %s' % (path.host(path.dirname(local)))) if _do_download(opts): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general('source not found: %s' % (path.host(local))) # # Check if a URL has been provided on the command line. If the package is # release push to the start the RTEMS URL. # url_bases = opts.urls() if version.released(): rtems_release_url = config.macros.expand('%{rtems_release_url}/%{rsb_version}/sources') log.trace('release url: %s' % (rtems_release_url)) # # If the URL being fetched is under the release path do not add the # sources release path because it is already there. # if not url.startswith(rtems_release_url): if url_bases is None: url_bases = [rtems_release_url] else: url_bases.append(rtems_release_url) urls = [] if url_bases is not None: # # Split up the URL we are being asked to download. # url_path = urlparse.urlsplit(url)[2] slash = url_path.rfind('/') if slash < 0: url_file = url_path else: url_file = url_path[slash + 1:] log.trace('url_file: %s' %(url_file)) for base in url_bases: if base[-1:] != '/': base += '/' next_url = urlparse.urljoin(base, url_file) log.trace('url: %s' %(next_url)) urls.append(next_url) urls += url.split() log.trace('_url: %s -> %s' % (','.join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if _do_download(opts): raise error.general('downloading %s: all paths have failed, giving up' % (url))
def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source["url"] = url colon = url.find(":") if url[colon + 1 : colon + 3] != "//": raise error.general("malforned URL: %s" % (url)) source["path"] = url[: colon + 3] + path.dirname(url[colon + 3 :]) source["file"] = path.basename(url) source["name"], source["ext"] = path.splitext(source["file"]) if source["name"].endswith(".tar"): source["name"] = source["name"][:-4] source["ext"] = ".tar" + source["ext"] # # Get the file. Checks the local source directory first. # source["local"] = None for p in config.define(pathkey).split(":"): local = path.join(path.abspath(p), source["file"]) if source["local"] is None: source["local_prefix"] = path.abspath(p) source["local"] = local if path.exists(local): source["local_prefix"] = path.abspath(p) source["local"] = local break source["script"] = "" for p in parsers: if url.startswith(p): source["type"] = p if parsers[p](source, config, opts): break return source
def __init__(self, topdir, configure): self.topdir = topdir self.configure = configure self.cwd = path.dirname(self.configure) self.bspopts() self.command = command(['autoreconf', '-i', '--no-recursive'], self.cwd) self.command.run()
def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source['url'] = url colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL (no protocol prefix): %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) source['file'] = path.basename(url) source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, pathkey, config, opts): break source['script'] = '' return source
def __init__(self, argv, argopts, defaults): command_path = path.dirname(path.abspath(argv[1])) if len(command_path) == 0: command_path = '.' self.command_path = command_path self.command_name = path.basename(argv[0]) extras = ['--dry-run', '--with-download', '--quiet', '--without-log', '--without-error-report', '--without-release-url'] self.argv = argv self.args = argv[1:] + extras self.defaults = macros.macros(name = defaults, sbdir = command_path) self.load_overrides() self.opts = { 'params' : extras } self.sb_git() self.rtems_bsp() if argopts.download_dir is not None: self.defaults['_sourcedir'] = ('dir', 'optional', path.abspath(argopts.download_dir)) self.defaults['_patchdir'] = ('dir', 'optional', path.abspath(argopts.download_dir))
def _relative_path(self, p): sbdir = None if '_sbdir' in self.macros: sbdir = path.dirname(self.expand('%{_sbdir}')) if p.startswith(sbdir): p = p[len(sbdir) + 1:] return p
def __init__(self, name = None, original = None, rtdir = '.'): self.files = [] self.macro_filter = re.compile(r'%{[^}]+}') if original is None: self.macros = {} self.read_maps = [] self.read_map_locked = False self.write_map = 'global' self.macros['global'] = {} self.macros['global']['nil'] = ('none', 'none', '') self.macros['global']['_cwd'] = ('dir', 'required', path.abspath(os.getcwd())) self.macros['global']['_rtdir'] = ('dir', 'required', path.abspath(rtdir)) self.macros['global']['_rttop'] = ('dir', 'required', path.abspath(path.dirname(rtdir))) else: self.macros = {} for m in original.macros: if m not in self.macros: self.macros[m] = {} for k in original.macros[m]: self.macros[m][k] = copy.copy(original.macros[m][k]) self.read_maps = sorted(copy.copy(original.read_maps)) self.read_map_locked = copy.copy(original.read_map_locked) self.write_map = copy.copy(original.write_map) if name is not None: self.load(name)
def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source['url'] = url source['path'] = path.dirname(url) source['file'] = path.basename(url) source['name'], source['ext'] = path.splitext(source['file']) # # Get the file. Checks the local source directory first. # source['local'] = None for p in config.define(pathkey).split(':'): local = path.join(path.abspath(p), source['file']) if source['local'] is None: source['local_prefix'] = path.abspath(p) source['local'] = local if path.exists(local): source['local_prefix'] = path.abspath(p) source['local'] = local break source['script'] = '' for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, config, opts): break return source
def __init__(self, name=None, original=None, sbdir='.'): self.files = [] self.macro_filter = re.compile(r'%{[^}]+}') if original is None: self.macros = {} self.read_maps = [] self.read_map_locked = False self.write_map = 'global' self.macros['global'] = {} self.macros['global']['_cwd'] = ('dir', 'required', path.abspath(os.getcwd())) self.macros['global']['_sbdir'] = ('dir', 'required', path.abspath(sbdir)) self.macros['global']['_sbtop'] = ('dir', 'required', path.abspath( path.dirname(sbdir))) else: self.macros = {} for m in original.macros: if m not in self.macros: self.macros[m] = {} for k in original.macros[m]: self.macros[m][k] = original.macros[m][k] self.read_maps = sorted(original.read_maps) self.read_map_locked = original.read_map_locked self.write_map = original.write_map if name is not None: self.load(name)
def parse_url(url, pathkey, config, opts, file_override = None): # # Split the source up into the parts we need. # source = {} source['url'] = url source['options'] = [] colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL (no protocol prefix): %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) if file_override is None: source['file'] = path.basename(url) else: bad_chars = [c for c in ['/', '\\', '?', '*'] if c in file_override] if len(bad_chars) > 0: raise error.general('bad characters in file name: %s' % (file_override)) log.output('download: file-override: %s' % (file_override)) source['file'] = file_override source['options'] += ['file-override'] source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, pathkey, config, opts): break source['script'] = '' return source
def get_config_files(self, config): # # Convert to shell paths and return shell paths. # # @fixme should this use a passed in set of defaults and not # not the initial set of values ? # config = path.shell(config) if '*' in config or '?' in config: print config configdir = path.dirname(config) configbase = path.basename(config) if len(configbase) == 0: configbase = '*' if not configbase.endswith('.cfg'): configbase = configbase + '.cfg' if len(configdir) == 0: configdir = self.macros.expand(self.defaults['_configdir']) configs = [] for cp in configdir.split(':'): hostconfigdir = path.host(cp) for f in glob.glob(os.path.join(hostconfigdir, configbase)): configs += path.shell(f) else: configs = [config] return configs
def __init__(self, topdir, configure): self.topdir = topdir self.configure = configure self.cwd = path.dirname(self.configure) self.command = command(['autoreconf', '-i', '--no-recursive'], self.cwd) self.command.run()
def __init__(self, topdir, makefile): self.topdir = topdir self.makefile = makefile self.preinstall = path.join(path.dirname(makefile), 'preinstall.am') self.command = command([path.join(topdir, 'ampolish3'), makefile], self.topdir) self.command.run()
def get_config_files(self, config): # # Convert to shell paths and return shell paths. # # @fixme should this use a passed in set of defaults and not # not the initial set of values ? # config = path.shell(config) if '*' in config or '?' in config: print(config) configdir = path.dirname(config) configbase = path.basename(config) if len(configbase) == 0: configbase = '*' if not configbase.endswith('.cfg'): configbase = configbase + '.cfg' if len(configdir) == 0: configdir = self.macros.expand(self.defaults['_configdir']) configs = [] for cp in configdir.split(':'): hostconfigdir = path.host(cp) for f in glob.glob(os.path.join(hostconfigdir, configbase)): configs += path.shell(f) else: configs = [config] return configs
def save_data_to_file(data, filename): _path = Path(filename) _dir = path.dirname(filename) os.makedirs(_dir, exist_ok=True) with open(filename, 'w') as out: for userId in data: for record in data[userId]: out.write("{}\t{}\t{}\n".format(userId, record[0], record[1]))
def read_file(filename): # 获取文件的绝对路径 filepath = path.join(path.dirname(__file__), filename) # 判断文件是否存在 assert_msg(path.exists(filepath), 'file not exist') # 读取CSV文件并返回 return pd.read_csv(filepath, index_col=0, parse_dates=True, infer_datetime_format=True)
def _git_parser(source, config, opts): # # Symlink. # us = source["url"].split("?") source["path"] = path.dirname(us[0]) source["file"] = path.basename(us[0]) source["name"], source["ext"] = path.splitext(source["file"]) if len(us) > 1: source["args"] = us[1:] source["local"] = path.join(source["local_prefix"], "git", source["file"]) source["symlink"] = source["local"]
def _find(name, opts): ename = opts.defaults.expand(name) if ':' in ename: paths = path.dirname(ename).split(':') name = path.basename(name) else: paths = opts.defaults.get_value('_configdir').split(':') for p in paths: n = path.join(opts.defaults.expand(p), name) if path.exists(n): return n return None
def _git_parser(source, config, opts): # # Symlink. # us = source['url'].split('?') source['path'] = path.dirname(us[0]) source['file'] = path.basename(us[0]) source['name'], source['ext'] = path.splitext(source['file']) if len(us) > 1: source['args'] = us[1:] source['local'] = \ path.join(source['local_prefix'], 'git', source['file']) source['symlink'] = source['local']
def bspopts(self): if _grep(self.configure, 'RTEMS_CHECK_BSPDIR'): bsps = _collect_dirs(self.cwd, 'custom') try: acinclude = path.join(self.cwd, 'acinclude.m4') b = open(path.host(acinclude), 'w') b.write('# RTEMS_CHECK_BSPDIR(RTEMS_BSP_FAMILY)' + os.linesep) b.write('AC_DEFUN([RTEMS_CHECK_BSPDIR],' + os.linesep) b.write('[' + os.linesep) b.write(' case "$1" in' + os.linesep) for bs in sorted(bsps): dir = path.dirname(bs)[len(self.cwd) + 1:] dir = path.dirname(dir) b.write(' %s )%s' % (dir, os.linesep)) b.write(' AC_CONFIG_SUBDIRS([%s]);;%s' % (dir, os.linesep)) b.write(' *)' + os.linesep) b.write(' AC_MSG_ERROR([Invalid BSP]);;' + os.linesep) b.write(' esac' + os.linesep) b.write('])' + os.linesep) b.close() except IOError as err: raise error.general('writing: %s' % (acinclude))
def get_file(url, local, opts, config): if local is None: raise error.general('source/patch path invalid') if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice('Creating source directory: %s' % \ (os.path.relpath(path.host(path.dirname(local))))) log.output('making dir: %s' % (path.host(path.dirname(local)))) if _do_download(opts): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general('source not found: %s' % (path.host(local))) # # Check if a URL has been provided on the command line. # url_bases = opts.urls() urls = [] if url_bases is not None: for base in url_bases: if base[-1:] != '/': base += '/' url_path = urlparse.urlsplit(url)[2] slash = url_path.rfind('/') if slash < 0: url_file = url_path else: url_file = url_path[slash + 1:] urls.append(urlparse.urljoin(base, url_file)) urls += url.split() log.trace('_url: %s -> %s' % (','.join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if _do_download(opts): raise error.general( 'downloading %s: all paths have failed, giving up' % (url))
def set_to_ap(): config_path = path.join(path.dirname(__file__), "config_path") print("Setting up dhcpcd") result = run(["cp", f"{config_path}/dhcpcd.conf.ap", "/etc/dhcpcd.conf"]) print("Set dhcpcd") # Setup hostapd call print("Setting up hostapd") result = run(["cp", f"{config_path}/default.hostapd.ap", "/etc/default/hostapd"]) print("Set hostapd") # Setup sysctl print("Setting up sysctl") result = run(["cp", f"{config_path}/sysctl.conf.ap", "/etc/sysctl.conf"]) print("Set sysctl") print("Rebooting") run("reboot")
def __init__(self, argv, argopts, defaults): command_path = path.dirname(path.abspath(argv[1])) if len(command_path) == 0: command_path = '.' self.command_path = command_path self.command_name = path.basename(argv[0]) extras = ['--dry-run', '--with-download', '--quiet', '--without-log', '--without-error-report', '--without-release-url'] self.argv = argv self.args = argv[1:] + extras self.defaults = macros.macros(name = defaults, sbdir = command_path) self.opts = { 'params' : extras } self.sb_git() if argopts.download_dir is not None: self.defaults['_sourcedir'] = ('dir', 'optional', path.abspath(argopts.download_dir)) self.defaults['_patchdir'] = ('dir', 'optional', path.abspath(argopts.download_dir))
def parse_url(url, pathkey, config, opts, file_override=None): # # Split the source up into the parts we need. # source = {} source['url'] = url source['options'] = [] colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL (no protocol prefix): %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) if file_override is None: source['file'] = path.basename(url) else: bad_chars = [c for c in ['/', '\\', '?', '*'] if c in file_override] if len(bad_chars) > 0: raise error.general('bad characters in file name: %s' % (file_override)) log.output('download: file-override: %s' % (file_override)) source['file'] = file_override source['options'] += ['file-override'] source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, pathkey, config, opts): break source['script'] = '' return source
def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source['url'] = url colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL: %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) source['file'] = path.basename(url) source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in config.define(pathkey).split(':'): local = path.join(path.abspath(p), source['file']) if source['local'] is None: source['local_prefix'] = path.abspath(p) source['local'] = local if path.exists(local): source['local_prefix'] = path.abspath(p) source['local'] = local _hash_check(source['file'], local, config.macros) break source['script'] = '' for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, config, opts): break return source
def get_file(url, local, opts, config): if local is None: raise error.general('source/patch path invalid') if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice('Creating source directory: %s' % \ (os.path.relpath(path.host(path.dirname(local))))) log.output('making dir: %s' % (path.host(path.dirname(local)))) if _do_download(opts): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general('source not found: %s' % (path.host(local))) # # Check if a URL has been provided on the command line. If the package is # released push to the start the RTEMS URL unless overrided by the command # line option --with-release-url. The variant --without-release-url can # override the released check. # url_bases = opts.urls() try: rtems_release_url_value = config.macros.expand('%{rtems_release_url}/%{rsb_version}/sources') except: rtems_release_url_value = None log.output('RTEMS release URL could not be expanded') rtems_release_url = None if version.released() and rtems_release_url_value: rtems_release_url = rtems_release_url_value with_rel_url = opts.with_arg('release-url') if with_rel_url[1] == 'not-found': if config.defined('without_release_url'): with_rel_url = ('without_release-url', 'yes') if with_rel_url[0] == 'with_release-url': if with_rel_url[1] == 'yes': if rtems_release_url_value is None: raise error.general('no valid release URL') rtems_release_url = rtems_release_url_value elif with_rel_url[1] == 'no': pass else: rtems_release_url = with_rel_url[1] elif with_rel_url[0] == 'without_release-url' and with_rel_url[1] == 'yes': rtems_release_url = None if rtems_release_url is not None: log.trace('release url: %s' % (rtems_release_url)) # # If the URL being fetched is under the release path do not add the # sources release path because it is already there. # if not url.startswith(rtems_release_url): if url_bases is None: url_bases = [rtems_release_url] else: url_bases.append(rtems_release_url) urls = [] if url_bases is not None: # # Split up the URL we are being asked to download. # url_path = urllib_parse.urlsplit(url)[2] slash = url_path.rfind('/') if slash < 0: url_file = url_path else: url_file = url_path[slash + 1:] log.trace('url_file: %s' %(url_file)) for base in url_bases: if base[-1:] != '/': base += '/' next_url = urllib_parse.urljoin(base, url_file) log.trace('url: %s' %(next_url)) urls.append(next_url) urls += url.split() log.trace('_url: %s -> %s' % (','.join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if _do_download(opts): raise error.general('downloading %s: all paths have failed, giving up' % (url))
if _email is not None: if _name is not None: _email = '%s <%s>' % (_name, _email) return _email return None def head(self): hash = '' ec, output = self._run(['log', '-n', '1']) if ec == 0: l1 = output.split('\n')[0] if l1.startswith('commit '): hash = l1[len('commit '):] return hash if __name__ == '__main__': import os.path import sys defaults = path.join(path.dirname(path.dirname(path.shell(sys.argv[0]))), 'defaults.mc') opts = options.load(sys.argv, defaults = defaults) g = repo('.', opts) print('g.git_version():', g.git_version()) print('g.valid():', g.valid()) print('g.submodules():', g.submodules()) print('g.status():', g.status()) print('g.status():', g.status(True)) print('g.dirty():', g.dirty()) print('g.remotes():', g.remotes()) print('g.email():', g.email()) print('g.head():', g.head())
#will be used to describe database from sqlalchemy import(create_engine, Column, String, Integer, Boolean, Table, ForeignKey) from sqlalchemy.orm import sessionmaker, relationships from sqlalchemy.ext.declarative import declarative_base database_filename = 'twitter.sqlite3' diretory = path.abspath(path.dirname(__file__)) database_filepath = path.join(diretory, database_filename) #create engine using file path engine_url = 'sqlite:///{}'.format(database_filepath) engine = create_engine(engine_url) #the database class objects are going to inherit from #this class Base = declarative_base(bind=engine) #create a configured "Session" class Session = sessionmaker(bind=engine, autoflush=False) #create a session
def _top(): top = path.dirname(sys.argv[0]) if len(top) == 0: top = '.' return top
import os import path import numpy as np from sklearn.svm import SVC from datetime import datetime, timedelta import sys from os import if __package__ is None: sys.path.append(path.dirname(path.dirname(__file__))) sys.path.append(path.dirname(path.dirname(path.dirname(path.dirname(__file__))))) # from mlp.find_n_and_factors_for_hs300_components import find_n_and_factors_for_hs300_components from find_n_and_factors_for_hs300_components import predict_history else: from ..find_n_and_factors_for_hs300_components.find_n_and_factors_for_hs300_components import predict_history def test_predict_history(): ta_factors = [ ('STOCHF', {}), ('AD', {}), ('RMI', {}), ('MINUS_DI', {'timeperiod': 28}), ('MINUS_DI', {'timeperiod': 50}) ] security_id = '000009' start_date = '2015-06-01' end_date = datetime.now() - timedelta(days=1) end_date = str(end_date.date())
def load(self, name): def common_end(left, right): end = '' while len(left) and len(right): if left[-1] != right[-1]: return end end = left[-1] + end left = left[:-1] right = right[:-1] return end if self.load_depth == 0: self.in_error = False self.lc = 0 self.name = name self.conditionals = {} self.load_depth += 1 save_name = self.name save_lc = self.lc self.name = name self.lc = 0 # # Locate the config file. Expand any macros then add the # extension. Check if the file exists, therefore directly # referenced. If not see if the file contains ':' or the path # separator. If it does split the path else use the standard config dir # path in the defaults. # exname = self.expand(name) # # Macro could add an extension. # if exname.endswith('.cfg'): configname = exname else: configname = '%s.cfg' % (exname) name = '%s.cfg' % (name) if ':' in configname: cfgname = path.basename(configname) else: cfgname = common_end(configname, name) if not path.exists(configname): if ':' in configname: configdirs = path.dirname(configname).split(':') else: configdirs = self.define('_configdir').split(':') for cp in configdirs: configname = path.join(path.abspath(cp), cfgname) if path.exists(configname): break configname = None if configname is None: raise error.general('no config file found: %s' % (cfgname)) try: log.trace('config: %s: _open: %s' % (self.init_name, path.host(configname))) config = open(path.host(configname), 'r') except IOError as err: raise error.general('error opening config file: %s' % (path.host(configname))) self.configpath += [configname] self._includes += [configname] try: dir = None info = None data = [] while True: r = self._parse(config, dir, info) if r[0] == 'control': if r[1] == '%end': break log.warning("unexpected '%s'" % (r[1])) elif r[0] == 'directive': if r[1] == '%include': self.load(r[2][0]) continue dir, info, data = self._process_directive( r, dir, info, data) elif r[0] == 'data': dir, info, data = self._process_data(r, dir, info, data) else: self._error("%d: invalid parse state: '%s" % (self.lc, r[0])) if dir is not None: self._directive_extend(dir, data) except: config.close() raise config.close() self.name = save_name self.lc = save_lc self.load_depth -= 1
def load(self, name): def common_end(left, right): end = '' while len(left) and len(right): if left[-1] != right[-1]: return end end = left[-1] + end left = left[:-1] right = right[:-1] return end if self.load_depth == 0: self.in_error = False self.lc = 0 self.name = name self.conditionals = {} self.load_depth += 1 save_name = self.name save_lc = self.lc self.name = name self.lc = 0 # # Locate the config file. Expand any macros then add the # extension. Check if the file exists, therefore directly # referenced. If not see if the file contains ':' or the path # separator. If it does split the path else use the standard config dir # path in the defaults. # exname = self.expand(name) # # Macro could add an extension. # if exname.endswith('.cfg'): configname = exname else: configname = '%s.cfg' % (exname) name = '%s.cfg' % (name) if ':' in configname: cfgname = path.basename(configname) else: cfgname = common_end(configname, name) if not path.exists(configname): if ':' in configname: configdirs = path.dirname(configname).split(':') else: configdirs = self.define('_configdir').split(':') for cp in configdirs: configname = path.join(path.abspath(cp), cfgname) if path.exists(configname): break configname = None if configname is None: raise error.general('no config file found: %s' % (cfgname)) try: log.trace('config: %s: _open: %s' % (self.init_name, path.host(configname))) config = open(path.host(configname), 'r') except IOError as err: raise error.general('error opening config file: %s' % (path.host(configname))) self.configpath += [configname] self._includes += [configname] try: dir = None info = None data = [] while True: r = self._parse(config, dir, info) if r[0] == 'control': if r[1] == '%end': break log.warning("unexpected '%s'" % (r[1])) elif r[0] == 'directive': if r[1] == '%include': self.load(r[2][0]) continue dir, info, data = self._process_directive(r, dir, info, data) elif r[0] == 'data': dir, info, data = self._process_data(r, dir, info, data) else: self._error("%d: invalid parse state: '%s" % (self.lc, r[0])) if dir is not None: self._directive_extend(dir, data) except: config.close() raise config.close() self.name = save_name self.lc = save_lc self.load_depth -= 1
$NetBSD$ Pick up `cvss' module from the parent directory in order to properly run the tests without having the cvss module installed. --- tests/test_cvss2.py.orig 2017-01-11 09:11:40.000000000 +0000 +++ tests/test_cvss2.py @@ -2,6 +2,8 @@ from os import path import sys import unittest +sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__)))) + from cvss import CVSS2 from cvss.exceptions import CVSS2MalformedError, CVSS2MandatoryError, CVSS2RHScoreDoesNotMatch, \ CVSS2RHMalformedError
def load(self, name): def common_end(left, right): end = '' while len(left) and len(right): if left[-1] != right[-1]: return end end = left[-1] + end left = left[:-1] right = right[:-1] return end if self.load_depth == 0: self._reset(name) self._packages[self.package] = package(self.package, self.define('%{_arch}'), self) self.load_depth += 1 save_name = self.name save_lc = self.lc # # Locate the config file. Expand any macros then add the # extension. Check if the file exists, therefore directly # referenced. If not see if the file contains ':' or the path # separator. If it does split the path else use the standard config dir # path in the defaults. # exname = self.expand(name) # # Macro could add an extension. # if exname.endswith('.cfg'): configname = exname else: configname = '%s.cfg' % (exname) name = '%s.cfg' % (name) if ':' in configname: cfgname = path.basename(configname) else: cfgname = common_end(configname, name) if not path.exists(configname): if ':' in configname: configdirs = path.dirname(configname).split(':') else: configdirs = self.define('_configdir').split(':') for cp in configdirs: configname = path.join(path.abspath(cp), cfgname) if path.exists(configname): break configname = None if configname is None: raise error.general('no config file found: %s' % (cfgname)) try: log.trace('config: %s: _open: %s' % (self.name, path.host(configname))) config = open(path.host(configname), 'r') except IOError, err: raise error.general('error opening config file: %s' % (path.host(configname)))
#!/usr/bin/env python """Dealer------SCM watcher tool. Get current revision and send update notify.""" import sysfrom os import path as op from setuptools import setup, find_packages def read(fname): try: return open(op.join(op.dirname(__file__), fname)).read() except IOError: return '' NAME = 'dealer' CURDIR = op.dirname(__file__)MODULE = __import__(NAME)README = op.join(CURDIR, 'README.rst')REQUIREMENTS = open(op.join(CURDIR, 'requirements.txt')).readlines() if sys.version_info < (2, 7): REQUIREMENTS.append('importlib') setup( name=NAME, version=MODULE.__version__, license=MODULE.__license__, author=MODULE.__author__, description=read('DESCRIPTION'), long_description=read('README.rst'), platforms=('Any'), keywords="mercurial git static revision django flask".split(), author_email='*****@*****.**', url=' http://github.com/klen/dealer', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Natural Language :: Russian', 'Natural Language :: English', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3.3', 'Environment :: Console', ], packages=['dealer', 'dealer.contrib', 'dealer.contrib.django'], install_requires=REQUIREMENTS,)
from setuptools import setup, find_packages from codecs import openfrom os import path here = path.abspath(path.dirname(__file__)) with open(path.join(here, "README.md"), encoding="utf-8") as f: long_description = f.read() setup( name="fritzhome", version="1.0.0", description="Query information from your FRITZ!Box (mostly energy)", long_description=long_description, url="https://github.com/DerMitch/fritzbox-smarthome", author="Michael Mayr", author_email="*****@*****.**", license="MIT", classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', ], keywords="fritzbox smarthome avm energy", packages=["fritzhome"], install_requires=[ 'requests>=2.7.0', 'click>=4.0.0', ], entry_points={ 'console_scripts': [ 'fritzhome=fritzhome.__main__:cli', ], })
def load(self, name): def common_end(left, right): end = '' while len(left) and len(right): if left[-1] != right[-1]: return end end = left[-1] + end left = left[:-1] right = right[:-1] return end if self.load_depth == 0: self.in_error = False self.lc = 0 self.name = name self.conditionals = {} self._packages = {} self.package = 'main' self._packages[self.package] = package(self.package, self.define('%{_arch}'), self) self.load_depth += 1 save_name = self.name save_lc = self.lc # # Locate the config file. Expand any macros then add the # extension. Check if the file exists, therefore directly # referenced. If not see if the file contains ':' or the path # separator. If it does split the path else use the standard config dir # path in the defaults. # exname = self.expand(name) # # Macro could add an extension. # if exname.endswith('.cfg'): configname = exname else: configname = '%s.cfg' % (exname) name = '%s.cfg' % (name) if ':' in configname: cfgname = path.basename(configname) else: cfgname = common_end(configname, name) if not path.exists(configname): if ':' in configname: configdirs = path.dirname(configname).split(':') else: configdirs = self.define('_configdir').split(':') for cp in configdirs: configname = path.join(path.abspath(cp), cfgname) if path.exists(configname): break configname = None if configname is None: raise error.general('no config file found: %s' % (cfgname)) try: log.trace('config: %s: _open: %s' % (self.name, path.host(configname))) config = open(path.host(configname), 'r') except IOError, err: raise error.general('error opening config file: %s' % (path.host(configname)))
def load(args, optargs = None, defaults = '%{_sbdir}/defaults.mc'): """ Copy the defaults, get the host specific values and merge them overriding any matching defaults, then create an options object to handle the command line merging in any command line overrides. Finally post process the command line. """ global host_windows # # The path to this command. # command_path = path.dirname(args[0]) if len(command_path) == 0: command_path = '.' # # The command line contains the base defaults object all build objects copy # and modify by loading a configuration. # o = command_line(args, optargs, macros.macros(name = defaults, sbdir = command_path), command_path) overrides = None if os.name == 'nt': try: import windows overrides = windows.load() host_windows = True except: raise error.general('failed to load Windows host support') elif os.name == 'posix': uname = os.uname() try: if uname[0].startswith('CYGWIN_NT'): import windows overrides = windows.load() elif uname[0] == 'Darwin': import darwin overrides = darwin.load() elif uname[0] == 'FreeBSD': import freebsd overrides = freebsd.load() elif uname[0] == 'NetBSD': import netbsd overrides = netbsd.load() elif uname[0] == 'Linux': import linux overrides = linux.load() elif uname[0] == 'SunOS': import solaris overrides = solaris.load() except: raise error.general('failed to load %s host support' % (uname[0])) else: raise error.general('unsupported host type; please add') if overrides is None: raise error.general('no hosts defaults found; please add') for k in overrides: o.defaults[k] = overrides[k] o.sb_git() o.process() o.post_process() return o
_email = '%s <%s>' % (_name, _email) return _email return None def head(self): hash = '' ec, output = self._run(['log', '-n', '1']) if ec == 0: l1 = output.split('\n')[0] if l1.startswith('commit '): hash = l1[len('commit '):] return hash if __name__ == '__main__': import os.path import sys defaults = path.join(path.dirname(path.dirname(path.shell(sys.argv[0]))), 'defaults.mc') opts = options.load(sys.argv, defaults=defaults) g = repo('.', opts) print('g.git_version():', g.git_version()) print('g.valid():', g.valid()) print('g.submodules():', g.submodules()) print('g.status():', g.status()) print('g.status():', g.status(True)) print('g.dirty():', g.dirty()) print('g.remotes():', g.remotes()) print('g.email():', g.email()) print('g.head():', g.head())
"""Setup.""" import path from setuptools import setup, find_packages base_dir = path.abspath(path.dirname(__file__)) with open(path.join(base_dir, 'README.md')) as desc: long_description = desc.read() with open(path.join(base_dir, 'LICENSE')) as desc: license = desc.read() with open(path.join(base_dir, 'version.py')) as version: exec(version.read()) setup( name='angelos_lab', version=__version__, # noqa F821 description='A laboratory environment for the Angelos project.', long_description=long_description, author=__author__, # noqa F821 author_email=__author_email__, # noqa F821 url=__url__, # noqa F821 license=license, packages=find_packages(exclude=('tests', 'docs')))
import random import os import path WIDTH = 480 HEIGHT = 600 FPS = 30 #define colours WHITE = (225, 225, 225) BLACK = (0, 0, 0) RED = (225, 0, 0) GREEN = (0, 225, 0) BLUE = (0, 0, 225) #set up image folder img_folder = path.join(path.dirname(__file__), 'img') class Player(pygame.sprite.Sprite): #sprite for player def __init__(self): #initialises sprite pygame.sprite.Sprite.__init__(self) self.image = player_img self.image.set_colorkey(BLACK) self.rect = self.image.get_rect() #rect = rectangle = sprite self.rect.centerx = (WIDTH/2) #centers sprite self.rect.bottom=HEIGHT-10 self.speedx=0 def shoot(self): bullet = Bullet (self.rect.centerx, self.rect.top)
$NetBSD: patch-setup.py,v 1.1 2017/08/25 20:01:34 joerg Exp $ --- setup.py.orig 2017-08-25 16:01:40.820727909 +0000 +++ setup.py @@ -21,7 +21,7 @@ from os import path from setuptools import setup init_path = path.join(path.dirname(__file__), 'cairosvg', '__init__.py') -with open(init_path, 'r', encoding='utf-8') as fd: +with open(init_path, 'r') as fd: version = re.search("__version__ = '([^']+)'", fd.read().strip()).group(1) needs_pytest = {'pytest', 'test', 'ptr'}.intersection(sys.argv)
def _at(): return path.dirname(__file__)
def load(args, optargs=None, defaults='%{_sbdir}/defaults.mc', logfile=True): """ Copy the defaults, get the host specific values and merge them overriding any matching defaults, then create an options object to handle the command line merging in any command line overrides. Finally post process the command line. """ global host_windows global host_posix # # The path to this command. # command_path = path.dirname(args[0]) if len(command_path) == 0: command_path = '.' # # The command line contains the base defaults object all build objects copy # and modify by loading a configuration. # o = command_line(args, optargs, macros.macros(name=defaults, sbdir=command_path), command_path) overrides = None if os.name == 'nt': try: import windows overrides = windows.load() host_windows = True host_posix = False except: raise error.general('failed to load Windows host support') elif os.name == 'posix': uname = os.uname() try: if uname[0].startswith('MINGW64_NT'): import windows overrides = windows.load() host_windows = True elif uname[0].startswith('CYGWIN_NT'): import windows overrides = windows.load() elif uname[0] == 'Darwin': import darwin overrides = darwin.load() elif uname[0] == 'FreeBSD': import freebsd overrides = freebsd.load() elif uname[0] == 'NetBSD': import netbsd overrides = netbsd.load() elif uname[0] == 'Linux': import linux overrides = linux.load() elif uname[0] == 'SunOS': import solaris overrides = solaris.load() except error.general as ge: raise error.general('failed to load %s host support: %s' % (uname[0], ge)) except: raise error.general('failed to load %s host support' % (uname[0])) else: raise error.general('unsupported host type; please add') if overrides is None: raise error.general('no hosts defaults found; please add') for k in overrides: o.defaults[k] = overrides[k] o.sb_released() o.sb_git() o.rtems_options() o.pre_process() o.process() o.post_process(logfile) # # Load the release settings # version.load_release_settings(o.defaults) return o