def __init__(self, base_directory: Path, base_source_url_or_directory: str = "http://www.openslr.org/resources/12/", corpus_names: Iterable[str] = ("dev-clean", "dev-other", "test-clean", "test-other", "train-clean-100", "train-clean-360", "train-other-500"), tar_gz_extension: str = ".tar.gz", mel_frequency_count: int = 128, root_compressed_directory_name_to_skip: Optional[str] = "LibriSpeech/", subdirectory_depth: int = 3, allowed_characters: List[chr] = frequent_characters_in_english, tags_to_ignore: Iterable[str] = list(), id_filter_regex=re.compile('[\s\S]*'), training_test_split: Callable[[List[LabeledExample]], Tuple[ List[LabeledExample], List[LabeledExample]]] = TrainingTestSplit.randomly_by_directory(.9)): self.id_filter_regex = id_filter_regex self.tags_to_ignore = tags_to_ignore self.allowed_characters = allowed_characters self.subdirectory_depth = subdirectory_depth self.root_compressed_directory_name_to_skip = root_compressed_directory_name_to_skip self.base_directory = base_directory self.base_url_or_directory = base_source_url_or_directory self.tar_gz_extension = tar_gz_extension self.mel_frequency_count = mel_frequency_count self.corpus_names = corpus_names mkdir(base_directory) self.corpus_directories = [self._download_and_unpack_if_not_yet_done(corpus_name=corpus_name) for corpus_name in corpus_names] directories = self.corpus_directories for i in range(self.subdirectory_depth): directories = [subdirectory for directory in directories for subdirectory in directory.iterdir() if subdirectory.is_dir()] self.files = [file for directory in directories for file in directory.iterdir() if file.is_file()] self.unfiltered_audio_files = [file for file in self.files if (file.name.endswith(".flac") or file.name.endswith(".wav"))] audio_files = [file for file in self.unfiltered_audio_files if self.id_filter_regex.match(name_without_extension(file))] self.filtered_out_count = len(self.unfiltered_audio_files) - len(audio_files) labels_with_tags_by_id = self._extract_labels_by_id(self.files) found_audio_ids = set(name_without_extension(f) for f in audio_files) found_label_ids = labels_with_tags_by_id.keys() self.audio_ids_without_label = list(found_audio_ids - found_label_ids) self.label_ids_without_audio = list(found_label_ids - found_audio_ids) def example(audio_file: Path) -> LabeledExample: return LabeledExample(audio_file, label_from_id=lambda id: self._remove_tags_to_ignore( labels_with_tags_by_id[id]), mel_frequency_count=self.mel_frequency_count, original_label_with_tags_from_id=lambda id: labels_with_tags_by_id[id]) self.examples = sorted( [example(file) for file in audio_files if name_without_extension(file) in labels_with_tags_by_id.keys()], key=lambda x: x.id) self.examples_by_id = dict([(e.id, e) for e in self.examples])
def link_bin(options): path = os.path.join("module_bin", options.name) tools.mkdir(path, clean=False) for old in tools.get_glob([os.path.join(path, "*.py")]): os.unlink(old) tools.link_dir(os.path.join(options.source, "modules", options.name, "bin"), path, clean=False, match=["*.py"])
def run(RUN_TRAIN, RUN_TEST, RUN_TRAIN2, RUN_TEST2, RUN_SAVE): tools.mkdir() if RUN_TRAIN : trainer() if RUN_TEST : tester() if RUN_TRAIN2 : trainer(type_=2) if RUN_TEST2 : tester(type_=2) if RUN_SAVE: tools.saver()
def link_py_apps(path): tools.mkdir("bin", clean=False) tools.link_dir(path, "bin", clean=False, match=["*"], filt=tools.filter_pyapps)
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) mkdir(settings['static_files']) # Create Ziggurat tables alembic_ini_file = 'alembic.ini' if not os.path.exists(alembic_ini_file): alembic_ini = ALEMBIC_CONF.replace('{{db_url}}', settings['sqlalchemy.url']) f = open(alembic_ini_file, 'w') f.write(alembic_ini) f.close() bin_path = os.path.split(sys.executable)[0] alembic_bin = os.path.join(bin_path, 'alembic') command = '%s upgrade head' % alembic_bin os.system(command) os.remove(alembic_ini_file) # Insert data engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) init_model() create_schemas(engine) Base.metadata.create_all(engine) initial_data.insert() transaction.commit()
def link_benchmark(options, module): path = os.path.join("benchmark", options.name) tools.mkdir(path, clean=False) for old in tools.get_glob([os.path.join(path, "*.py")]): os.unlink(old) tools.link_dir(os.path.join(module.path, "benchmark"), path, clean=False, match=["*.py"])
def link_swig(source): target = os.path.join("swig") tools.mkdir(target) for module, g in tools.get_modules(source): # they all go in the same dir, so don't remove old links tools.link_dir( os.path.join(g, "pyext"), target, match=["*.i"], clean=False) if os.path.exists(os.path.join(g, "pyext", "include")): tools.link_dir( os.path.join(g, "pyext", "include"), target, match=["*.i"], clean=False) tools.link( os.path.join( g, "pyext", "swig.i-in"), os.path.join( target, "IMP_%s.impl.i" % module))
def initPHPUnit(self, force=False): """Initialise the PHPUnit environment""" if self.branch_compare(23, '<'): raise Exception('PHPUnit is only available from Moodle 2.3') # Set PHPUnit data root phpunit_dataroot = self.get('dataroot') + '_phpu' self.updateConfig('phpunit_dataroot', phpunit_dataroot) if not os.path.isdir(phpunit_dataroot): mkdir(phpunit_dataroot, 0777) # Set PHPUnit prefix phpunit_prefix = 'phpu_' self.updateConfig('phpunit_prefix', phpunit_prefix) result = (None, None, None) exception = None try: if force: result = self.cli('/admin/tool/phpunit/cli/util.php', args='--drop', stdout=None, stderr=None) result = self.cli('/admin/tool/phpunit/cli/init.php', stdout=None, stderr=None) except Exception as exception: pass if exception != None or result[0] > 0: if result[0] == 129: raise Exception('PHPUnit is not installed on your system') elif result[0] > 0: raise Exception('Something wrong with PHPUnit configuration') else: raise exception
def generate(self): for i in range(int(self.size / self.mix_number)): state = [] for j in range(self.mix_number): s = 0 if self.space_number > 1: s = self.generate_sub_state() for number in range(self.space_number - 1): s = np.reshape(np.einsum('ij,kl->ikjl', s, self.generate_sub_state()), [s.shape[0] * self.sub_dim, s.shape[1] * self.sub_dim]) state.append(s) for j in range(self.mix_number): weight = np.random.random([j + 1]) weight = weight / np.sum(weight) mix = np.zeros([self.sub_dim ** self.space_number, self.sub_dim ** self.space_number]) for k in range(j + 1): mix = mix + weight[k] * state[k] self.set.append(mix) Set = np.array(self.set) shape = list(Set.shape) shape.append(1) Set_r = np.reshape(np.real(Set), shape) Set_i = np.reshape(np.imag(Set), shape) Set_2 = np.concatenate((Set_r, Set_i), axis=-1) mkdir('./Data/') np.save('./Data/' + self.name + '.npy', Set_2)
def save(self, name_ontology): # path = f'workspace/{name_ontology}/data/' path = f'result-Ontologies-Less-Than-10000/{name_ontology}/data/' mkdir(path) jsobj1 = json.dumps(self.axioms_RI) fileObject1 = open(f'{path}role_inclusion.json', 'w') fileObject1.write(jsobj1) fileObject1.close() jsobj1 = json.dumps(self.axioms_RC) fileObject1 = open(f'{path}role_chain.json', 'w') fileObject1.write(jsobj1) fileObject1.close() jsobj1 = json.dumps(self.axioms) fileObject1 = open(f'{path}axioms.json', 'w') fileObject1.write(jsobj1) fileObject1.close() jsobj1 = json.dumps(self.axioms_normalized) fileObject1 = open(f'{path}axioms_normalized.json', 'w') fileObject1.write(jsobj1) fileObject1.close() jsobj1 = json.dumps(self.mapback) fileObject1 = open(f'{path}mapback.json', 'w') fileObject1.write(jsobj1) fileObject1.close() return
def link_py_apps(options): path = os.path.join(options.source, "modules", options.name, "bin") tools.mkdir("bin", clean=False) tools.link_dir(path, "bin", clean=False, match=["*"], filt=tools.filter_pyapps)
def generate_all_cpp(source): target=os.path.join("src") tools.mkdir(target) for module, g in tools.get_modules(source): sources= tools.get_glob([os.path.join(g, "src", "*.cpp")])\ +tools.get_glob([os.path.join(g, "src", "internal", "*.cpp")]) targetf=os.path.join(target, module+"_all.cpp") sources.sort() tools.rewrite(targetf, "\n".join(["#include <%s>"%os.path.abspath(s) for s in sources]) + '\n')
def main(): (options, args) = parser.parse_args() outputdir= os.path.abspath(os.path.join("src", "%s_swig"%options.module)) tools.mkdir(outputdir, clean=False) run_swig(outputdir, options) patch_file(os.path.join(outputdir, "wrap.cpp-in"), os.path.join(outputdir, "wrap.cpp"), options) patch_file(os.path.join(outputdir, "wrap.h-in"), os.path.join(outputdir, "wrap.h"), options)
def run(RUN_TRAIN, RUN_TEST, RUN_TRAIN2, RUN_TEST2, RUN_SAVE): tools.mkdir() if RUN_TRAIN : trainer() if RUN_TEST : tester() if RUN_TRAIN2 : trainer(type_=2) if RUN_TEST2 : tester(type_=2) if RUN_SAVE: tools.saver() if RUN_DENSECRF : crf_runner(LOAD_MAT_FILE, RUN_TRAIN2) if GRID_SEARCH : grid_search(LOAD_MAT_FILE, RUN_TRAIN2)
def starting(): logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') mkdir(['tmp', 'life', '../vote']) register_handlers() manager() scheduler.start() return logging.info("Initialized.")
def main(): (options, args) = parser.parse_args() outputdir = os.path.abspath(os.path.join("src", "%s_swig" % options.module)) tools.mkdir(outputdir, clean=False) run_swig(outputdir, options) patch_file(os.path.join(outputdir, "wrap.cpp-in"), os.path.join(outputdir, "wrap.cpp"), options) patch_file(os.path.join(outputdir, "wrap.h-in"), os.path.join(outputdir, "wrap.h"), options)
def link_dox(source): for subdir in ("ref", "manual"): target = os.path.join("doxygen", subdir) tools.mkdir(target) tools.link_dir(os.path.join(source, "doc", "ref"), os.path.join("doc", "ref"), match=["*.png", "*.pdf", "*.gif"], clean=False) tools.link_dir(os.path.join(source, "doc", "manual", "images"), os.path.join("doc", "manual"), match=["*.png", "*.pdf", "*.gif"], clean=False)
def test_mkdir(self): self.assertFalse(tools.mkdir('/')) with TemporaryDirectory() as d: path = os.path.join(d, 'foo') self.assertTrue(tools.mkdir(path)) for mode in (0o700, 0o644, 0o777): msg = 'new path should have octal permissions {0:#o}'.format(mode) path = os.path.join(d, '{0:#o}'.format(mode)) self.assertTrue(tools.mkdir(path, mode), msg) self.assertEqual('{0:o}'.format(os.stat(path).st_mode & 0o777), '{0:o}'.format(mode), msg)
def record() -> LabeledExample: from labeled_example_plotter import LabeledExamplePlotter print("Wait in silence to begin recording; wait in silence to terminate") mkdir(recording_directory) name = "recording-{}".format(timestamp()) example = Recorder().record_to_file(recording_directory / "{}.wav".format(name)) LabeledExamplePlotter(example).save_spectrogram(recording_directory) return example
def generate_all_cpp(modules): target = os.path.join("src") tools.mkdir(target) gen = tools.CPPFileGenerator() for module in modules: sources = tools.get_glob([os.path.join(module.path, "src", "*.cpp")])\ + tools.get_glob([os.path.join(module.path, "src", "internal", "*.cpp")]) targetf = os.path.join(target, module.name + "_all.cpp") sources.sort() gen.write(targetf, "\n".join(["#include <%s>" % os.path.abspath(s) for s in sources]) + '\n')
def generate_all_cpp(source): target = os.path.join("src") tools.mkdir(target) for module, g in tools.get_modules(source): sources = tools.get_glob([os.path.join(g, "src", "*.cpp")])\ + tools.get_glob([os.path.join(g, "src", "internal", "*.cpp")]) targetf = os.path.join(target, module + "_all.cpp") sources.sort() tools.rewrite( targetf, "\n".join(["#include <%s>" % os.path.abspath(s) for s in sources]) + '\n')
def link_python(source): target=os.path.join("lib") tools.mkdir(target, clean=False) for module, g in tools.get_modules(source): path= os.path.join(target, "IMP", module) tools.mkdir(path, clean=False) for old in tools.get_glob([os.path.join(path, "*.py")]): # don't unlink the generated file if os.path.split(old)[1] != "__init__.py" and os.path.split(old)[1] != "_version_check.py": os.unlink(old) #print "linking", path tools.link_dir(os.path.join(g, "pyext", "src"), path, clean=False)
def link_dox(source): target = os.path.join("doxygen") tools.mkdir(target) for module, g in tools.get_modules(source): tools.link_dir(os.path.join(g, "doc"), os.path.join("doc", "html", module), match=["*.png", "*.pdf", "*.gif"], clean=False) tools.link_dir(os.path.join(source, "doc"), os.path.join("doc", "html"), match=["*.png", "*.pdf", "*.gif"], clean=False) tools.link_dir(os.path.join(source, "doc", "tutorial"), os.path.join("doc", "tutorial"), match=["*.png", "*.pdf", "*.gif"], clean=False)
def link_headers(source): target=os.path.join("include") tools.mkdir(target) root=os.path.join(target, "IMP") tools.mkdir(root) for (module, g) in tools.get_modules(source): #print g, module if module== "SConscript": continue tools.link_dir(os.path.join(g, "include"), os.path.join(root, module), match=["*.h"]) tools.link_dir(os.path.join(g, "include", "internal"), os.path.join(root, module, "internal"), match=["*.h"])
def make_version_check(options): dir= os.path.join("lib", "IMP", options.name) tools.mkdir(dir, clean=False) outf= os.path.join(dir, "_version_check.py") template="""def check_version(myversion): def _check_one(name, expected, found): if expected != found: raise RuntimeError('Expected version '+expected+' but got '+ found \ +' when loading module '+name\ +'. Please make sure IMP is properly built and installed and that matching python and C++ libraries are used.') _check_one('%s', '%s', myversion) """ tools.rewrite(outf, template%(options.name, get_version(options)))
def link_python(source): target = os.path.join("lib") tools.mkdir(target, clean=False) for module, g in tools.get_modules(source): path = os.path.join(target, "IMP", module) tools.mkdir(path, clean=False) for old in tools.get_glob([os.path.join(path, "*.py")]): # don't unlink the generated file if os.path.split(old)[1] != "__init__.py" and os.path.split( old)[1] != "_version_check.py": os.unlink(old) # print "linking", path tools.link_dir(os.path.join(g, "pyext", "src"), path, clean=False)
def link_benchmark(options): path = os.path.join("benchmark", options.name) tools.mkdir(path, clean=False) for old in tools.get_glob([os.path.join(path, "*.py")]): os.unlink(old) tools.link_dir( os.path.join(options.source, "modules", options.name, "benchmark"), path, clean=False, match=["*.py"])
def make_version_check(options): dir = os.path.join("lib", "IMP", options.name) tools.mkdir(dir, clean=False) version = tools.get_module_version(options.name, options.source) outf = os.path.join(dir, "_version_check.py") template = """def check_version(myversion): def _check_one(name, expected, found): if expected != found: message = "Expected version " + expected + " but got " + found + " when loading module " + name + ". Please make sure IMP is properly built and installed and that matching python and C++ libraries are used." raise RuntimeError(message) version = '%s' _check_one('%s', version, myversion) """ tools.rewrite(outf, template % (version, version))
def make_version_check(options): dir= os.path.join("lib", "IMP", options.name) tools.mkdir(dir, clean=False) version = tools.get_module_version(options.name, options.source) outf= os.path.join(dir, "_version_check.py") template="""def check_version(myversion): def _check_one(name, expected, found): if expected != found: message = "Expected version " + expected + " but got " + found + " when loading module " + name + ". Please make sure IMP is properly built and installed and that matching python and C++ libraries are used." raise RuntimeError(message) version = '%s' _check_one('%s', version, myversion) """ tools.rewrite(outf, template%(version, version))
def writeKnownHostsFile(key): """ Write host key ``key`` into `~/.ssh/known_hosts`. Args: key (str): host key """ sshDir = os.path.expanduser('~/.ssh') knownHostFile = os.path.join(sshDir, 'known_hosts') if not os.path.isdir(sshDir): tools.mkdir(sshDir, 0o700) with open(knownHostFile, 'at') as f: logger.info('Write host key to {}'.format(knownHostFile)) f.write(key + '\n')
def writeKnownHostsFile(key): """ Write host key ``key`` into `~/.ssh/known_hosts`. Args: key (str): host key """ sshDir = os.path.expanduser("~/.ssh") knownHostFile = os.path.join(sshDir, "known_hosts") if not os.path.isdir(sshDir): tools.mkdir(sshDir, 0o700) with open(knownHostFile, "at") as f: logger.info("Write host key to {}".format(knownHostFile)) f.write(key + "\n")
def main(): (options, args) = parser.parse_args() if not os.path.exists( os.path.join(options.source, "modules", options.module)): print "Skipping alias as original module not found" return print "Setting up alias for module", options.module, "as", options.alias tools.mkdir("include/IMP/%s" % options.alias) tools.mkdir("include/IMP/%s/internal" % options.alias) var = {"module": options.module} if options.deprecate != "": var["deprecate"] = "IMP%s_DEPRECATED_HEADER(%s, \"%s\")" % ( options.module.upper(), options.deprecate, "Use the one in IMP/%s instead." % options.module) else: var["deprecate"] = "" if options.alias == "": var["namespacebegin"] = "namespace IMP {" var["namespaceend"] = "}" var["slashalias"] = "" else: var["namespacebegin"] = "namespace IMP { namespace %s {" % options.alias var["namespaceend"] = "} }" var["slashalias"] = "/" + options.alias for h in tools.get_glob( [os.path.join("include", "IMP", options.module, "*.h")]): if h.endswith("_config.h"): continue filename = os.path.split(h)[1] var["file"] = filename header = header_template % var tools.rewrite("include/IMP%s/%s" % (var["slashalias"], filename), header) # Remove aliased header if the source header is gone for h in glob.glob("include/IMP%s/*.h" % var["slashalias"]): filename = os.path.split(h)[1] orig_filename = os.path.join("include", "IMP", options.module, filename) if not os.path.exists(orig_filename) \ and not os.path.exists(h[:-2]): # Exclude all-module headers os.unlink(h) for h in tools.get_glob( [os.path.join("include", "IMP", options.module, "internal", "*.h")]): filename = os.path.split(h)[1] var["file"] = filename header = internal_header_template % var tools.rewrite("include/IMP/%s/internal/%s" % (options.alias, filename), header) allh = allh_template % var tools.rewrite("include/IMP%s.h" % var["slashalias"], allh)
def createDir(name, urls): basicPath = './pic4/' path = basicPath+name+'/' tools.mkdir(path) #下载图片 for url in urls: print(url) re1 = re.search( r'.{8}\.jpg', url) if re1: name = re1.group().replace('_','') pathPic = path+name print('图片保存在:'+pathPic) tools.downPic(url,pathPic)
def setup_module(module, source, datapath): sys.stdout.write("Configuring module %s ..." % module) data = tools.get_module_description(source, module, datapath) for d in data["required_dependencies"]: if not tools.get_dependency_info(d, datapath)["ok"]: print("Required dependency %s not found" % d) write_no_ok(module) return False, [] dependencies = data["required_dependencies"] unfound_dependencies = [] for d in data["optional_dependencies"]: if tools.get_dependency_info(d, datapath)["ok"]: dependencies.append(d) else: unfound_dependencies.append(d) for d in data["required_modules"]: if not tools.get_module_info(d, datapath)["ok"]: print("Required module IMP.%s not available" % d) write_no_ok(module) return False, [] modules = data["required_modules"] unfound_modules = [] for d in data["optional_modules"]: if tools.get_module_info(d, datapath)["ok"]: modules.append(d) else: unfound_modules.append(d) all_modules = tools.get_dependent_modules(modules, datapath) moddir = os.path.join('IMP', '' if module == 'kernel' else module) swig_includes = [os.path.split(x)[1] for x in tools.get_glob( [os.path.join(source, "modules", module, "pyext", "include", "*.i")])] \ + [os.path.join(moddir, os.path.split(x)[1]) for x in tools.get_glob( [os.path.join("include", moddir, "*_macros.h")])] swig_wrapper_includes = [os.path.join(moddir, "internal", os.path.split(x)[1]) for x in tools.get_glob( [os.path.join(source, "modules", module, "include", "internal", "swig*.h")])] tools.mkdir(os.path.join("src", module)) tools.mkdir(os.path.join("src", module + "_swig")) write_ok(module, all_modules, unfound_modules, tools.get_dependent_dependencies(all_modules, dependencies, datapath), unfound_dependencies, swig_includes, swig_wrapper_includes) return True, all_modules
def link_py_apps(module): path = os.path.join(module.path, "bin") tools.mkdir("bin", clean=False) bins = [b for b in glob.glob(os.path.join(path, '*')) if tools.filter_pyapps(b)] # rewrite Python shebang to use current version of Python (2 or 3) for source_bin in bins: contents = """#!%s fname = '%s' with open(fname) as fh: exec(compile(fh.read(), fname, 'exec')) """ % (sys.executable, source_bin) dest_bin = os.path.join("bin", os.path.basename(source_bin)) tools.rewrite(dest_bin, contents, verbose=False) os.chmod(dest_bin, 493) # 493 = 0755, i.e. executable
def link_dox(source): target=os.path.join("doxygen") tools.mkdir(target) for module, g in tools.get_modules(source): tools.link_dir(os.path.join(g, "doc"), os.path.join(target, module)) tools.link_dir(os.path.join(g, "doc"), os.path.join("doc", "html"), match=["*.png", "*.pdf"], clean=False) doxygenize_readme(os.path.join(g, "README.md"), "doxygen", module) for app, g in tools.get_applications(source): tools.link_dir(g, os.path.join(target, app)) tools.link_dir(g, os.path.join("doc", "html"), match=["*.png", "*.pdf"], clean=False) doxygenize_readme(os.path.join(g, "README.md"), "doxygen", app) tools.link_dir(os.path.join(source, "doc"), os.path.join(target, "IMP")) tools.link_dir(os.path.join(source, "doc"), os.path.join("doc", "html"), match=["*.png", "*.pdf"], clean=False)
def main(): (options, args) = parser.parse_args() clean_pyc(options.source) tools.mkdir(os.path.join("data", "build_info")) tools.mkdir(os.path.join("cmake_tests")) tools.rewrite(os.path.join("data", "build_info", "disabled"), options.disabled.replace(":", "\n")) tools.setup_sorted_order(options.source, options.datapath) link_headers(options.source) link_examples(options.source) link_swig(options.source) link_python(options.source) link_data(options.source) generate_tests(options.source, options.scons) generate_src_dirs(options.source)
def setup_module(module, finder): sys.stdout.write("Configuring module %s ..." % module.name) for d in module.required_dependencies: if not finder.get_dependency_info(d)["ok"]: print("Required dependency %s not found" % d) write_no_ok(module.name) return False, [] dependencies = module.required_dependencies[:] unfound_dependencies = [] for d in module.optional_dependencies: if finder.get_dependency_info(d)["ok"]: dependencies.append(d) else: unfound_dependencies.append(d) for d in module.required_modules: if not d.configured.ok: print("Required module IMP.%s not available" % d.name) write_no_ok(module.name) return False, [] modules = module.required_modules[:] unfound_modules = [] for d in module.optional_modules: if d.configured.ok: modules.append(d) else: unfound_modules.append(d) all_modules = finder.get_dependent_modules(modules) moddir = os.path.join('IMP', '' if module.name == 'kernel' else module.name) swig_includes = [os.path.split(x)[1] for x in tools.get_glob( [os.path.join(module.path, "pyext", "include", "*.i")])] \ + [os.path.join(moddir, os.path.split(x)[1]) for x in tools.get_glob( [os.path.join("include", moddir, "*_macros.h")])] swig_wrapper_includes = [os.path.join(moddir, "internal", os.path.split(x)[1]) for x in tools.get_glob( [os.path.join(module.path, "include", "internal", "swig*.h")])] tools.mkdir(os.path.join("src", module.name)) tools.mkdir(os.path.join("src", module.name + "_swig")) write_ok(module, all_modules, unfound_modules, finder.get_dependent_dependencies(all_modules, dependencies), unfound_dependencies, swig_includes, swig_wrapper_includes, module.python_only) return True, all_modules
def download(self, fileCache=None, cacheDir=C.get('dirs.mdk')): """Download a plugin""" if fileCache == None: fileCache = C.get('plugins.fileCache') dest = os.path.abspath(os.path.expanduser(os.path.join(cacheDir, 'plugins'))) if not fileCache: dest = gettempdir() if not 'downloadurl' in self.keys(): raise ValueError('Expecting the key downloadurl') elif not 'component' in self.keys(): raise ValueError('Expecting the key component') elif not 'branch' in self.keys(): raise ValueError('Expecting the key branch') dl = self.get('downloadurl') plugin = self.get('component') branch = self.get('branch') target = os.path.join(dest, '%s-%d.zip' % (plugin, branch)) md5sum = self.get('downloadmd5') release = self.get('release', 'Unknown') if fileCache: if not os.path.isdir(dest): logging.debug('Creating directory %s' % (dest)) tools.mkdir(dest, 0777) if os.path.isfile(target) and (md5sum == None or tools.md5file(target) == md5sum): logging.info('Found cached plugin file: %s' % (os.path.basename(target))) return target logging.info('Downloading %s (%s)' % (plugin, release)) if logging.getLogger().level <= logging.INFO: urlretrieve(dl, target, tools.downloadProcessHook) # Force a new line after the hook display logging.info('') else: urlretrieve(dl, target) # Highly memory inefficient MD5 check if md5sum and tools.md5file(target) != md5sum: os.remove(target) logging.warning('Bad MD5 sum on downloaded file') return False return target
def main(): (options, args) = parser.parse_args() if not os.path.exists(os.path.join(options.source, "modules", options.module)): print("Skipping alias as original module not found") return print("Setting up alias for module", options.module, "as", options.alias) tools.mkdir("include/IMP/%s" % options.alias) tools.mkdir("include/IMP/%s/internal" % options.alias) var = {"module": options.module} if options.deprecate != "": var["deprecate"] = "IMP%s_DEPRECATED_HEADER(%s, \"%s\")" % (options.module.upper(), options.deprecate, "Use the one in IMP/%s instead." % options.module) else: var["deprecate"] = "" if options.alias == "": var["namespacebegin"] = "namespace IMP {" var["namespaceend"] = "}" var["slashalias"] = "" else: var["namespacebegin"] = "namespace IMP { namespace %s {" % options.alias var["namespaceend"] = "} }" var["slashalias"] = "/" + options.alias for h in tools.get_glob([os.path.join("include", "IMP", options.module, "*.h")]): if h.endswith("_config.h"): continue filename = os.path.split(h)[1] var["file"] = filename header = header_template % var tools.rewrite( "include/IMP%s/%s" % (var["slashalias"], filename), header) # Remove aliased header if the source header is gone for h in glob.glob("include/IMP%s/*.h" % var["slashalias"]): filename = os.path.split(h)[1] orig_filename = os.path.join("include", "IMP", options.module, filename) if not os.path.exists(orig_filename) \ and not os.path.exists(h[:-2]): # Exclude all-module headers os.unlink(h) for h in tools.get_glob([os.path.join("include", "IMP", options.module, "internal", "*.h")]): filename = os.path.split(h)[1] var["file"] = filename header = internal_header_template % var tools.rewrite( "include/IMP/%s/internal/%s" % (options.alias, filename), header) allh = allh_template % var tools.rewrite("include/IMP%s.h" % var["slashalias"], allh)
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) ziggurat_init(settings) mkdir(settings['static_files']) # Insert data engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) init_model() create_schemas(engine) Base.metadata.create_all(engine) initial_data.insert() transaction.commit()
def export(database_path, kernels, model, op, init_cuda): device, ctx, stream = init_cuda() data = bytearray() #Kernels data.extend(struct.pack('<{}I'.format(kernels.ndim), *kernels.shape)) data.extend(kernels.tobytes(order='C')) #Models data.extend(struct.pack('I', len(model.layers))) for L in model.layers: encode(L, data) #Write file arch = 'sm_' + '_'.join(map(str, device.compute_capability)) prefix = os.path.join(database_path, arch) mkdir(prefix) with open(os.path.join(prefix, op + '.hpp'), 'w') as fp: fp.write(cpp_file(arch, op, data))
def link_swig(source): target = os.path.join("swig") tools.mkdir(target) for module, g in tools.get_modules(source): # they all go in the same dir, so don't remove old links tools.link_dir(os.path.join(g, "pyext"), target, match=["*.i"], clean=False) if os.path.exists(os.path.join(g, "pyext", "include")): tools.link_dir(os.path.join(g, "pyext", "include"), target, match=["*.i"], clean=False) tools.link(os.path.join(g, "pyext", "swig.i-in"), os.path.join(target, "IMP_%s.impl.i" % module))
def link_dox(source): target=os.path.join("doxygen") tools.mkdir(target) for module, g in tools.get_modules(source): tools.link_dir(os.path.join(g, "doc"), os.path.join("doc", "html", module), match=["*.png", "*.pdf"], clean=False) for app, g in tools.get_applications(source): tools.link_dir(g, os.path.join("doc", "html", app), match=["*.png", "*.pdf"], exclude=["README.md"], clean=False) tools.link_dir(os.path.join(source, "doc"), os.path.join("doc", "html"), match=["*.png", "*.pdf"], clean=False) tools.link_dir(os.path.join(source, "doc", "tutorial"), os.path.join("doc", "tutorial"), match=["*.png", "*.pdf"], clean=False)
def badSmpRm(path, output, alist): """ delete invalid bad samples """ line_cnt = -1 tools.mkdir(output) with open(path, 'r') as f: with open(output, 'w') as w: for i in f.readlines(): line_cnt += 1 if line_cnt in alist: continue else: w.write(i) w.close() f.close()
def parse_and_encode_ngrams(extracted_it, simstring_dir, ids_dir,arg):#check and modyfy # Create destination directories for the two databases mkdir(simstring_dir) mkdir(ids_dir) ss_db = SimstringDBWriter(simstring_dir,arg) # here added arg and going to tools ids_db = IDDB(ids_dir) simstring_terms = set() for i, (term, id) in enumerate(extracted_it, start=1): if term not in simstring_terms: ss_db.insert(term) simstring_terms.add(term) ids_db.insert(term, id) #comeback to here after reviewing main program
def link_headers(source): target = os.path.join("include") tools.mkdir(target) root = os.path.join(target, "IMP") tools.mkdir(root) for (module, g) in tools.get_modules(source): # print g, module modroot = os.path.join(root, '' if module == 'kernel' else module) if module == "SConscript": continue tools.link_dir(os.path.join(g, "include"), modroot, match=["*.h"]) tools.link_dir(os.path.join(g, "include", "internal"), os.path.join(modroot, "internal"), match=["*.h"]) # ick if os.path.exists(os.path.join(g, "include", "eigen3")): tools.link_dir(os.path.join(g, "include", "eigen3"), os.path.join(modroot, "eigen3"), match=["*"])
def setup_module(module, source, datapath): sys.stdout.write("Configuring module %s ..." % module) data = tools.get_module_description(source, module, datapath) for d in data["required_dependencies"]: if not tools.get_dependency_info(d, datapath)["ok"]: print("Required dependency %s not found" % d) write_no_ok(module) return False, [] dependencies = data["required_dependencies"] unfound_dependencies = [] for d in data["optional_dependencies"]: if tools.get_dependency_info(d, datapath)["ok"]: dependencies.append(d) else: unfound_dependencies.append(d) for d in data["required_modules"]: if not tools.get_module_info(d, datapath)["ok"]: print("Required module IMP.%s not available" % d) write_no_ok(module) return False, [] modules = data["required_modules"] unfound_modules = [] for d in data["optional_modules"]: if tools.get_module_info(d, datapath)["ok"]: modules.append(d) else: unfound_modules.append(d) all_modules = tools.get_dependent_modules(modules, datapath) moddir = os.path.join('IMP', '' if module == 'kernel' else module) swig_includes = [os.path.split(x)[1] for x in tools.get_glob([os.path.join(source, "modules", module, "pyext", "include", "*.i")])]\ + [os.path.join(moddir, os.path.split(x)[1]) for x in tools.get_glob([os.path.join("include", moddir, "*_macros.h")])] swig_wrapper_includes = [os.path.join(moddir, "internal", os.path.split(x)[1]) for x in tools.get_glob([os.path.join(source, "modules", module, "include", "internal", "swig*.h")])] tools.mkdir(os.path.join("src", module)) tools.mkdir(os.path.join("src", module + "_swig")) write_ok(module, all_modules, unfound_modules, tools.get_dependent_dependencies( all_modules, dependencies, datapath), unfound_dependencies, swig_includes, swig_wrapper_includes) return True, all_modules
def warmup(self, fconfig, model, deterministic = True): np.random.seed(2012310818) config = get_config(fconfig) self.path = config['path'] self.paraf = config['pfile'] self.samples = config['samples'] self.opt = config['opt'] self.store = config['pstore'] self.Arange = config['Arange'] self.epochs = config['epochs'] self.sliced = config['sliced'] self.batchsize = config['batchsize'] self.decay = config['decay'] self.acc = config['acc'] self.recoveru = config['update'] self.logf = fconfig[:-4] + 'log' self.dat_patch = h5.File(config['path'] + 'dat_patch_' + str(config['Arange']) + '.h5') self.dat_pmask = h5.File(config['path'] + 'dat_patch_' + str(config['Arange']) + '.h5') self.model = model self.model.load(config) self.model.build(deterministic = True) with open(config['path'] + 'parameter_' + str(config['Arange']) + '.json', 'r') as f: self.meta = json.load(f) with open(config['path'] + ('unsliced.json' if self.sliced == False else 'sliced.json')) as f: self.dat_idx = json.load(f) self.train_idx = self.dat_idx['train'] self.val_idx = self.dat_idx['val'] self.test_idx = self.dat_idx['test'] self.idx = [i + 1 for i, t in enumerate(self.opt) if t == 1] sops = ['segment', 'perspective', 'angle', 'mal'] print(('%d training samples, %d validation samples, %d test samples...with option: ' + ''.join([sops[i - 1] + ' ' for i in self.idx])) % tuple([len(self.train_idx), len(self.val_idx), len(self.test_idx)])) self.tmp_dir = self.path + self.store mkdir(self.tmp_dir)
def setup_module(module, source, datapath): print "Configuring module", module, "...", data= tools.get_module_description(source, module, datapath) for d in data["required_dependencies"]: if not tools.get_dependency_info(d, datapath)["ok"]: print d, "not found" write_no_ok(module) return False dependencies = data["required_dependencies"] unfound_dependencies = [] for d in data["optional_dependencies"]: if tools.get_dependency_info(d, datapath)["ok"]: dependencies.append(d) else: unfound_dependencies.append(d) for d in data["required_modules"]: if not tools.get_module_info(d, datapath)["ok"]: print "IMP."+d, "not found" write_no_ok(module) return False modules= data["required_modules"] unfound_modules = [] for d in data["optional_modules"]: if tools.get_module_info(d, datapath)["ok"]: modules.append(d) else: unfound_modules.append(d) all_modules=tools.get_dependent_modules(modules, datapath) swig_includes=[os.path.split(x)[1] for x in tools.get_glob([os.path.join(source, "modules", module, "pyext", "include", "*.i")])]\ + ["IMP/"+module+"/"+os.path.split(x)[1] for x in tools.get_glob([os.path.join("include", "IMP", module, "*_macros.h")])] swig_wrapper_includes= ["IMP/"+module+"/internal/"+os.path.split(x)[1] for x in tools.get_glob([os.path.join(source, "modules", module, "include", "internal", "swig*.h")])] tools.mkdir(os.path.join("src", module)) tools.mkdir(os.path.join("src", module+"_swig")) write_ok(module, all_modules, unfound_modules, tools.get_dependent_dependencies(all_modules, dependencies,datapath), unfound_dependencies, swig_includes, swig_wrapper_includes) return True
def uninstall(self): """Uninstall the instance""" if not self.isInstalled(): raise Exception('The instance is not installed') # Delete the content in moodledata dataroot = self.get('dataroot') if os.path.isdir(dataroot): logging.debug('Deleting dataroot content (%s)' % (dataroot)) shutil.rmtree(dataroot) mkdir(dataroot, 0777) # Drop the database dbname = self.get('dbname') if self.dbo().dbexists(dbname): logging.debug('Droping database (%s)' % (dbname)) self.dbo().dropdb(dbname) # Remove the config file configFile = os.path.join(self.get('path'), 'config.php') if os.path.isfile(configFile): logging.debug('Deleting config.php') os.remove(configFile)
def __init__(self, cfg = None, *args, **kwargs): self.config = cfg if self.config is None: self.config = config.Config() cachePath = self.config.passwordCacheFolder() if not tools.mkdir(cachePath, 0o700): msg = 'Failed to create secure Password_Cache folder' logger.error(msg, self) raise PermissionError(msg) pid = self.config.passwordCachePid() super(Password_Cache, self).__init__(pid, umask = 0o077, *args, **kwargs) self.dbKeyring = {} self.dbUsr = {} self.fifo = password_ipc.FIFO(self.config.passwordCacheFifo()) self.keyringSupported = tools.keyringSupported()