class Recent(Files): def __init__(self): super(Recent, self).__init__() self._paths = [] # http://python.6.n6.nabble.com/Access-Most-Recently-Used-MRU-entries-td1953541.html self.mru_path = shell.SHGetSpecialFolderPath(0, shellcon.CSIDL_RECENT, 0) self.mrufs = OSFS(self.mru_path) self.watcher = None def setup(self): self._update_path() self.watcher = self.mrufs.add_watcher(lambda e: self._update_path()) def _update_path(self): self._paths = sorted( [os.path.join(self.mru_path, f) for f in self.mrufs.listdir()], key=os.path.getmtime, reverse=True ) self.path_list_changed() def teardown(self): if self.watcher: self.mrufs.del_watcher(self.watcher) @property def paths(self): return self._paths @property def name(self): return "re" def lit(self, *args, **kargs): return super(Recent, self).lit(*args, **kargs)
def dir_sim(dirp, randombits, bits): """ Finds the similarity between documents of the same extension contained in the given directory path. If we are using random bits i.e md5 length the helper is used. For each file contained in the directory: get its fingerprint and generate the comparison combinations then for each combination who's extension matches print the similarity between them """ if bits is not None: print('using bit len of %d' % bits) dir = OSFS(dirp) file_fps = {} for fname in dir.listdir(files_only=True): file_fps[fname] = file_fingerprint(os.path.join(dirp, fname), randombits, bits) dir.close() combos = list(map(dict, combinations(file_fps.items(), 2))) if bits is None: dir_sim_rand_helper(file_fps, combos) return for k, v in combos: kext = os.path.splitext(k)[1] vext = os.path.splitext(v)[1] if kext == vext: print(k, v, cosine_sim(file_fps[k][:bits], file_fps[v][:bits]))
class Recent(Files): def __init__(self): super(Recent, self).__init__() self._paths = [] # http://python.6.n6.nabble.com/Access-Most-Recently-Used-MRU-entries-td1953541.html self.mru_path = shell.SHGetSpecialFolderPath(0, shellcon.CSIDL_RECENT, 0) self.mrufs = OSFS(self.mru_path) self.watcher = None def setup(self): self._update_path() self.watcher = self.mrufs.add_watcher(lambda e: self._update_path()) def _update_path(self): self._paths = sorted( [os.path.join(self.mru_path, f) for f in self.mrufs.listdir()], key=os.path.getmtime, reverse=True) self.path_list_changed() def teardown(self): if self.watcher: self.mrufs.del_watcher(self.watcher) @property def paths(self): return self._paths @property def name(self): return 're' def lit(self, *args, **kargs): return super(Recent, self).lit(*args, **kargs)
def pyfs_scratch(): # home_fs = OSFS("~/") home_fs = OSFS("/") # home_fs = OSFS("C:/") print(home_fs.listdir("../")) # print(home_fs.tree(max_levels = 0, dirs_first = True)) dir_obj_infos = home_fs.scandir("../") for info in dir_obj_infos: if info.is_dir: print("[dir] {}".format(info.name)) else: print("[file] {} {} MB".format(info.name, info.size / float(1024 * 1024)))
def cleanup(source, destination_temp, standard): """Remove the source and temporary destination folders.""" try: source_fs = OSFS('%s/%s' % (source, standard)) except ResourceNotFoundError: return None destination_fs = OSFS(destination_temp) artifacts = source_fs.listdir(dirs_only=True) if '.git' in artifacts: artifacts.remove('.git') for artifact in artifacts: path = '%s/%s' % (artifact, standard) if destination_fs.exists(path): destination_fs.removedir(path, force=True) if destination_fs.exists(standard): destination_fs.removedir(standard, force=True)
def cleanup(build_path, source, destination_temp, standard): """Remove the source and temporary destination folders.""" try: source_fs = OSFS(ospath.join(build_path, source, standard)) except ResourceNotFoundError: return None destination_fs = OSFS(ospath.join(build_path, destination_temp)) artifacts = source_fs.listdir(dirs_only=True) if '.git' in artifacts: artifacts.remove('.git') for artifact in artifacts: path = ospath.join(artifact, standard) if destination_fs.exists(path): destination_fs.removedir(path, force=True) if destination_fs.exists(standard): destination_fs.removedir(standard, force=True)
#!/usr/bin/env python import sys from fs.osfs import OSFS import Image img_fs = OSFS(sys.argv[1]) imgs = [] for path in img_fs.listdir(wildcard='*.png'): img = Image.open(img_fs.getsyspath(path)) size = img.size[0] if size != 16: continue imgs.append((path, img)) sprite = Image.new('RGBA', (16, len(imgs)*16)) imgs.sort(key=lambda i:i[0]) sprite_text_f = img_fs.open('sprites.txt', 'wt') for i, (path, img) in enumerate(imgs): y = i*16 sprite.paste(img, (0, y)) sprite_text_f.write( "%i\t%s\n" % (y, path)) sprite.save(img_fs.getsyspath('sprites.png'))
from fs.osfs import OSFS home_fs = OSFS("~/") home_fs.listdir('')
class Filesystem(drink.ListPage, PyFile): drink_name = "Server folder" local_path = "" mime = "folder" hidden_class = True default_action = "edit" path = "" editable_fields = drink.ListPage.editable_fields.copy() editable_fields.update({"local_path": drink.types.Text("Local folder path", group="a")}) def serialize(self): drink.ListPage.serialize(recurse=False) return d def __init__(self, name, rootpath): self.fd = None drink.ListPage.__init__(self, name, rootpath) self._make_fd() PyFile.__init__(self, self, rootpath, self.id, self.id, None) def _edit(self): r = drink.ListPage._edit(self) self.fd = None self._make_fd() if self.default_action == "edit" and self.fd: self.default_action = "view" return r def _make_fd(self): if self.local_path and not self.fd: try: self.fd = OSFS(self.local_path, thread_synchronize=True) except fs.errors.ResourceNotFoundError: self.fd = None view = PyFile.view def keys(self): if self.fd: try: return self.fd.listdir() except (fs.errors.ResourceNotFoundError, fs.errors.PermissionDeniedError): return [] else: return [] iterkeys = keys def __getattr__(self, name): return drink.Model.__getattribute__(self, name) def __getitem__(self, name): if name in self._properties: raise KeyError() return PyFile(self, self.path, name, name, self.fd)
def log_folder(): log_dir = log_folder_directories['0'] folder = OSFS(log_dir) test_n = len(list(n for n in folder.listdir() if n.startswith('test'))) return log_dir + "/test" + str(test_n + 1)
def summary_folder(name): logdir = summary_folder_directories['0'] + name folder = OSFS(logdir) test_n = len(list(n for n in folder.listdir() if n.startswith('test'))) return logdir + "/test" + str(test_n + 1)
return url try: src = sys.argv[1] dst = sys.argv[2] except IndexError: src = '~/projects/linkstop/webapp/linkstop/media/faviconsx' dst = '~/projects/linkstop/webapp/linkstop/media/favicons' src_fs = OSFS(src) dst_fs = OSFS(dst) count = 0 max_count = 2 for path in src_fs.listdir(dirs_only=True): icon_fs = src_fs.opendir(path) if icon_fs.isfile('scan.pik'): try: icon = pickle.load(icon_fs.open('scan.pik')) except Exception, e: print "%s (%s)" % (str(e), path) continue normalized_url = icon['normalized_url'] out_dir = hash_path(normalized_url) + '/' + url_to_filename(normalized_url) print out_dir dest_dir_fs = dst_fs.makeopendir(out_dir, recursive=True) movedir(icon_fs, dest_dir_fs, overwrite=True, ignore_errors=True)
import fs.copy as fscopy from fs.osfs import OSFS from fs.memoryfs import MemoryFS from fs.zipfs import ZipFS import os import sys from clint.textui import puts, indent, progress puts("Ren'Py setup") with indent(2): cwdfs = OSFS(".") tempfs = MemoryFS() if "renpy.zip" not in cwdfs.listdir("/"): puts("Downloading Ren'Py") r = requests.get( "https://www.renpy.org/dl/6.99.12.4/renpy-6.99.12.4-sdk.zip", stream=True) r.raise_for_status() with cwdfs.open("renpy.zip", 'wb') as fd: total_length = int(r.headers.get('content-length')) for chunk in progress.bar(r.iter_content(chunk_size=1024), expected_size=(total_length / 1024) + 1): fd.write(chunk) puts("Extracting Ren'Py") with ZipFS("./renpy.zip") as zipfs: fscopy.copy_dir(zipfs, "renpy-6.99.12.4-sdk", tempfs, "renpy") cwdfs.remove("renpy.zip")
class TestDatasetManager(unittest.TestCase): trash_dir = "./tests/resources/trash_data" def setUp(self): self.os = OSFS(".") def tearDown(self): for data in self.os.listdir(self.trash_dir): if data != ".keep": self.os.remove("{}/{}".format(self.trash_dir, data)) self.os.close() def test_should_read_yaml_from_dir(self): expected = { "one_test": { "source": "http://source/teste", "description": "my little dataset" } } data = DatasetManager("./tests/resources/one_data") self.assertDictEqual(data.get_datasets(), expected) def test_should_read_multiple_yaml_from_dir(self): expected = { "one_test": { "source": "https://raw.githubusercontent.com/pcsanwald/kaggle-titanic/master/train.csv", "description": "my little dataset" }, "two_test": { "source": "https://raw.githubusercontent.com/pcsanwald/kaggle-titanic/master/train.csv", "description": "my little dataset 2" } } data = DatasetManager("./tests/resources/multiple_data", fs=self.os) result = list(data.get_datasets().keys()) result.sort() expected = ["one_test", "two_test"] self.assertListEqual(expected, result) def test_should_get_dataset(self): data = DatasetManager("./tests/resources/local_data") dataset = { "local_test": { "source": "./tests/resources/local_data/train.csv", "description": "my little dataset local" } } self.assertDictEqual(data.get_dataset("local_test"), dataset.get("local_test")) def test_should_get_dataset_unknown(self): data = DatasetManager("./tests/resources/local_data") with self.assertRaises(IOError): data.get_dataset("unknown_test") def test_should_create_dataset(self): data = DatasetManager(self.trash_dir, fs=self.os) identifier = "data_name" dataset = { "identifier": identifier, "description": "description", "source": "/tmp/test.csv", } data.create_dataset(**dataset) loaded_datasets = data.get_datasets() dataset_config = loaded_datasets.get(identifier) self.assertTrue( self.os.isfile("{}/{}.yaml".format(self.trash_dir, identifier))) self.assertEqual(len(self.os.listdir(self.trash_dir)), 2) self.assertEqual(list(loaded_datasets.keys())[0], identifier) self.assertEqual(dataset_config.get("description"), dataset["description"]) self.assertEqual(dataset_config.get("source"), dataset["source"]) def test_should_create_dataset_with_custom_data(self): data = DatasetManager(self.trash_dir, fs=self.os) identifier = "data_name_custom" dataset = { "identifier": identifier, "description": "description", "source": "/tmp/test.csv" } data.create_dataset(**dataset) self.assertTrue( self.os.isfile("{}/{}.yaml".format(self.trash_dir, identifier))) self.assertEqual(len(os.listdir(self.trash_dir)), 2) loaded_dataset = data.get_datasets() self.assertEqual(list(loaded_dataset.keys()), [identifier]) datasource_configs = loaded_dataset.get(identifier) self.assertEqual(datasource_configs["description"], dataset["description"]) self.assertEqual(datasource_configs["source"], dataset["source"]) def test_should_remove_dataset(self): data = DatasetManager(self.trash_dir, fs=self.os) identifier = "data_name" dataset = { "identifier": identifier, "description": "description", "source": "/tmp/test.csv" } data.create_dataset(**dataset) self.assertTrue( os.path.isfile("{}/{}.yaml".format(self.trash_dir, identifier))) self.assertEqual(len(os.listdir(self.trash_dir)), 2) data.remove_dataset(identifier) self.assertFalse( os.path.isfile("{}/{}.yaml".format(self.trash_dir, identifier))) self.assertEqual(len(os.listdir(self.trash_dir)), 1) def test_should_remove_unknown_dataset(self): data = DatasetManager("./tests/resources/local_data", fs=self.os) with self.assertRaises(IOError): data.remove_dataset("unknown_dataset")
class OdooAddonManager: """ Class wrapping the OAM behaviour Attributes ---------- install_dir: OSFS The installation directory src_cache: dict A dictionary containing, for each source type supporting cache (git), the temporary location of the previously downloaded sources odoo_version: str Version of Odoo using the addons desc_version: str Version of the description file used to log changes modules_to_install: dict Modules to install as described in the YAML file verbose_level: str Level of details to print """ install_dir: OSFS modules_to_install: Dict[str, Dict[str, Any]] src_cache: Dict[str, Dict[str, Any]] odoo_version: str desc_version: str = None verbose_level: str _tmp_dir: TempFS = None _hst_file: TextIOWrapper = None _chglog_file: TextIOWrapper = None def __init__(self, description_file: str = None, install_directory: str = ".", verbose_level: str = VERBOSE_NONE): self.install_dir = OSFS(install_directory) self.verbose_level = verbose_level self.src_cache = { "git": {}, } if description_file: with open(description_file, "r") as description_file: install_data = yaml.load(description_file, Loader=yaml.Loader) self.modules_to_install = install_data.get("modules", []) self.odoo_version = install_data.get("odoo_version") self.desc_version = install_data.get("version") def __del__(self): self.install_dir.close() if self._tmp_dir: self._tmp_dir.close() if self._hst_file: self._hst_file.close() if self._chglog_file: self._chglog_file.close() @property def tmp_dir(self) -> TempFS: """ The temporary directory used to download modules before installing them if needed. """ if not self._tmp_dir: self._tmp_dir = TempFS(TEMP_DIR_NAME) return self._tmp_dir @property def history_file(self) -> TextIOWrapper: """ The history file where are logged the operations performed in the installation directory """ if not self._hst_file: self._hst_file = open( self.install_dir.getsyspath(HISTORY_FILE_NAME), 'a+') return self._hst_file @property def changelog_file(self) -> TextIOWrapper: """ The markdown changelog file listing changes in a human-readable format """ if not self._chglog_file: self._chglog_file = open( self.install_dir.getsyspath(CHANGELOG_FILE_NAME), "a+") self._chglog_file.seek(0) if not self._chglog_file.read(): self._chglog_file.write("# CHANGELOG") self._chglog_file.seek(0) return self._chglog_file def install_all(self, force: bool = False): """ Install all modules described in the description file. :param force: whether to overwrite installed modules or not """ installed_modules = [] if self.verbose_level == VERBOSE_NONE: with click.progressbar(self.modules_to_install) as modules: for module in modules: if self.install(module, force): installed_modules.append(module) else: for module in self.modules_to_install: if self.install(module, force): installed_modules.append(module) # Modules installed are removed from the list to avoid being processed twice e.g. in case of a refresh for module in installed_modules: self.modules_to_install.pop(module) click.echo("{} module(s) installed.".format(len(installed_modules))) def install(self, module_name: str, force: bool = False) -> bool: """ Install a single module from its source. :param module_name: Name of the module :param force: Whether to overwrite the module if it is already installed :param: Whether the module has been installed or not """ success = False self.pretty_print(module_name, "Installing...", level=VERBOSE_FULL) source = self.modules_to_install[module_name] origin_name = source.get("origin_name", module_name) installed_version = self.get_module_version(module_name, self.install_dir) if force or not installed_version: try: source_fs = self.fetch_module_from_source(module_name) self.install_from_fs(origin_name, source_fs, output_name=module_name) version = self.get_module_version(module_name, self.install_dir) self.log(module_name, OPERATION_INSTALL, force=force, extra=version) if not force: self.log_md(module_name, OPERATION_INSTALL, new_version=version) success = True except InvalidModuleError as err: self.pretty_print(module_name, err.message, status=LOG_STATUS_ERROR, level=VERBOSE_NONE) except pygit2.errors.GitError: self.pretty_print( module_name, "Installation failed - Could not fetch from Git repository.", status=LOG_STATUS_ERROR, level=VERBOSE_NONE) except Exception as e: self.pretty_print(module_name, "Installation failed ({})".format( type(e).__name__), status=LOG_STATUS_ERROR, level=VERBOSE_NONE) else: self.pretty_print(module_name, "Already installed. Skipping installation.", status=LOG_STATUS_WARNING, level=VERBOSE_NORMAL) return success def update_all(self, force: bool = False): """ Update all modules :param force: Whether to skip version check or not. If True, modules are just replaced no matter if they are being downgraded or installed for the first time. """ updated_modules = [] if self.verbose_level == VERBOSE_NONE: with click.progressbar(self.modules_to_install) as modules: for module in modules: if self.update(module, force): updated_modules.append(module) else: for module in self.modules_to_install: if self.update(module, force): updated_modules.append(module) # Modules updated are removed from the list to avoid being processed twice in case of a refresh for module in updated_modules: self.modules_to_install.pop(module) click.echo("{} module(s) updated.".format(len(updated_modules))) def update(self, module_name: str, force: bool = False) -> bool: """ Update a single module. :param module_name: Name of the module :param force: Whether to skip version check or not. If True, modules are just replaced no matter if they are being downgraded or installed for the first time. :return: Whether the module has been updated or not """ success = False self.pretty_print(module_name, "Updating...", level=VERBOSE_FULL) installed_version = self.get_module_version(module_name, self.install_dir) if force or installed_version: try: source_fs = self.fetch_module_from_source(module_name) origin_name = self.modules_to_install[module_name].get( "origin_name", module_name) new_version = self.get_module_version(origin_name, source_fs) if force or version.parse(new_version) >= version.parse( installed_version): self.pretty_print(module_name, "Updating from {0} to {1}".format( installed_version, new_version), level=VERBOSE_FULL) self.install_from_fs(origin_name, source_fs, output_name=module_name) self.log(module_name, OPERATION_UPDATE, force=force, extra="from {0} to {1}".format( installed_version, new_version)) if not force: self.log_md(module_name, OPERATION_UPDATE, installed_version, new_version) success = True else: self.pretty_print( module_name, "Fetched version ({0}) is inferior to current version ({1}). Skipping update." .format(new_version, installed_version), status=LOG_STATUS_ERROR, level=VERBOSE_NORMAL) except InvalidModuleError as err: self.pretty_print(module_name, err.message, status=LOG_STATUS_ERROR, level=VERBOSE_NONE) except pygit2.errors.GitError: self.pretty_print( module_name, "Update failed - Could not fetch from Git repository.", status=LOG_STATUS_ERROR, level=VERBOSE_NONE) except Exception as e: self.pretty_print(module_name, "Update failed ({})".format( type(e).__name__), status=LOG_STATUS_ERROR, level=VERBOSE_NONE) else: self.pretty_print( module_name, "Not installed. Skipping update.".format(module_name), status=LOG_STATUS_WARNING, level=VERBOSE_NORMAL) return success def uninstall_all(self, auto_confirm=False): """ Uninstall all modules that are installed but not present in the description file. Ask confirmation to the user. :param auto_confirm: Do not ask the user to confirm if True """ installed_modules = self.get_installed_modules() modules_to_uninstall = set(installed_modules.keys()) - set( self.modules_to_install.keys()) if not auto_confirm: click.echo("The following modules will be removed:") for module in modules_to_uninstall: click.echo(module) click.confirm('Do you want to continue?', abort=True) count = 0 if self.verbose_level == VERBOSE_NONE: with click.progressbar(modules_to_uninstall) as modules: for module in modules: count += self.uninstall(module) else: for module in modules_to_uninstall: count += self.uninstall(module) click.echo("{} module(s) removed.".format(count)) def uninstall(self, module_name: str) -> bool: """ Uninstall a single module if it is installed. :param module_name: Name of the module :return: Whether the module has been uninstalled or not """ success = False if module_name in self.install_dir.listdir("."): self.pretty_print(module_name, "Uninstalling...", level=VERBOSE_FULL) self.install_dir.removetree(module_name) success = True self.log(module_name, OPERATION_UNINSTALL) self.log_md(module_name, OPERATION_UNINSTALL) self.pretty_print(module_name, "Uninstalled.", status=LOG_STATUS_OK, level=VERBOSE_NORMAL) else: self.pretty_print(module_name, "Not installed. Skipping uninstall.", status=LOG_STATUS_ERROR, level=VERBOSE_NORMAL) return success def get_installed_modules(self) -> Dict[str, str]: """ Scan installation directory to list currently installed modules :return: A dictionary of module names as keys and their currently installed version as values """ modules = {} for module in self.install_dir.scandir("."): if module.is_dir and "__manifest__.py" in self.install_dir.listdir( module.name): manifest_file = self.install_dir.getsyspath( join(module.name, "__manifest__.py")) with open(manifest_file, "r") as manifest: modules[module.name] = ast.literal_eval( manifest.read())["version"] return modules @staticmethod def get_module_version(module_name: str, directory: FS) -> str: """ Get the version of the module in the given directory :param module_name: name of the module :param directory: FS object pointing to the parent directory of the module :return: version of the module or None if it is not present in the directory """ version = None if module_name in directory.listdir("."): manifest = directory.readtext(join(module_name, "__manifest__.py")) version = ast.literal_eval(manifest)["version"] return version def fetch_module_from_source(self, module_name: str) -> FS: """ Download a module from its source if needed and return the directory where it is located. :param module_name: Name of the module :return: An FS object pointing to the module location """ source = self.modules_to_install[module_name] source_fs: FS if source["source_type"] == SOURCE_LOCAL_DIR: source_fs = OSFS(source["path"]) elif source["source_type"] == SOURCE_LOCAL_ZIP: source_fs = ZipFS(source["path"]) elif source["source_type"] == SOURCE_GIT: source_fs = self.download_from_git( module_name, source["url"], source.get("branch", self.odoo_version), source.get("path", ".")) return source_fs def download_from_git(self, module_name: str, url: str, branch: str, path: str = ".") -> OSFS: """ Clone a git repository or find it in the source cache. :param module_name: name of the module being installed :param url: URL of the repository :param branch: branch of the desired module version :param path: path to the module inside the repository (default to '.') :return: an OSFS object pointing to the module location inside the repository """ repo_dir_name = urlparse(url).path.replace("/", "_") if url in self.src_cache["git"]: self.pretty_print(module_name, "Repository found in cache", level=VERBOSE_FULL) repo = self.src_cache["git"][url] repo.checkout("refs/remotes/origin/{}".format(branch)) else: self.pretty_print(module_name, "Cloning repository", level=VERBOSE_FULL) repo = pygit2.clone_repository( url, self.tmp_dir.getsyspath(repo_dir_name), checkout_branch=branch) self.src_cache["git"][url] = repo return OSFS(join(repo.workdir, path)) def install_from_fs(self, name: str, source_fs: FS, path: str = ".", output_name: str = None): """ Copy a module directory from where it is located to the installation directory. :param name: Name of the module :param source_fs: FS object pointing to the source location :param path: Path to the module directory from the source location root :param output_name: Name to give to the module's directory at installation """ path_to_module = join(path, name) if name not in source_fs.listdir(path): raise InvalidModuleError( name, "Module directory not found - Given path should be the parent directory" ) if "__manifest__.py" not in source_fs.listdir(path_to_module): raise InvalidModuleError( name, "Manifest not found - Given path should be the parent directory" ) self.pretty_print(output_name, "Copying from {}".format( source_fs.desc(path_to_module)), level=VERBOSE_FULL) copy_dir(source_fs, path_to_module, self.install_dir, output_name or name) self.pretty_print(output_name, "Installed and up to date.", status=LOG_STATUS_OK, level=VERBOSE_NORMAL) def log(self, module_name: str, operation: str, force=False, extra: str = ""): """ Log an operation in the history file. :param module_name: Name of the module :param operation: Type of the operation :param force: Whether the operation was performed with the force option or not :param extra: Extra information to log """ log_line = "{0} - {1}{2}: {3} {4}\n".format( datetime.now().replace(microsecond=0), operation, " (forced)" if force else "", module_name, extra) self.history_file.write(log_line) def log_md(self, module: str, operation: str, old_version: str = None, new_version: str = None): """ Log an operation in the markdown log file in human-readable format. :param module: Name of the module :param operation: Type of the operation :param old_version: Overwritten version of the module, in case of an update :param new_version: New version of the module, in case of an installation/update """ current_log_content = self.changelog_file.read() # Look for the section concerning the current version, or write a scaffold if not found version = self.desc_version or datetime.today().strftime("%Y-%m-%d") log_index = current_log_content.find("## {}".format(version)) if log_index >= 0: new_log_content = current_log_content[log_index:] else: new_log_content = "\n\n## {}\n\n**Added**\n\n\n**Updated**\n\n\n**Removed**\n\n".format( version) log_index = len(current_log_content) # Remove previous log entry concerning the module if module in new_log_content: new_log_content = re.sub(r"\n.*{}.*".format(module), "", new_log_content) # Append the new log line under the right operation type if operation == OPERATION_INSTALL: index = new_log_content.find("**Updated**") - 2 log_line = "\n * {0} ({1})".format(module, new_version) elif operation == OPERATION_UPDATE: index = new_log_content.find("**Removed**") - 2 log_line = "\n * {0} ({1} from {2})".format( module, new_version, old_version) elif operation == OPERATION_UNINSTALL: index = len(new_log_content) - 1 log_line = "\n * {0}".format(module) new_log_content = "{0}{1}{2}".format(new_log_content[:index], log_line, new_log_content[index:]) # Overwrite file with the updated logs old_log_content = current_log_content[:log_index] self.changelog_file.truncate() self.changelog_file.write(old_log_content + new_log_content) def list_external_dependencies(self, raw=False, modules: List[str] = None): """ Show external dependencies of all installed modules. :param raw: Whether to print only python dependencies in a 'requirements.txt' format :param modules: If given, show dependencies of those modules only """ dependencies = self.get_all_dependencies(modules=modules) if raw: for dep in dependencies.get("python", []): click.echo(dep) else: for type in dependencies: click.echo(type) for dep in dependencies[type]: if type == "python": dep_installed = self.check_python_dependency(dep) click.echo("\t{0} {1}".format( dep, "(OK)" if dep_installed else "(missing)")) else: click.echo("\t{}".format(dep)) def install_missing_dependencies(self, modules: List[str] = None): """ Install all missing dependencies. :param modules: If given, install dependencies of those modules only """ dependencies = self.get_all_dependencies(modules=modules) self.install_python_dependencies(dependencies.get("python", [])) def get_all_dependencies(self, modules: List[str] = None ) -> Dict[str, List[str]]: """ Get all missing dependencies from the installed modules. :param modules: If given, return dependencies of those modules only :return: A dictionary containing a list of dependencies for each type """ # Filter installed modules to keep the ones given modules = {mod: self.get_installed_modules()[mod] for mod in modules} if modules \ else self.get_installed_modules() all_deps = {} for module in modules: module_deps = self.parse_dependencies(module, self.install_dir) for type, deps in module_deps.items(): all_deps.setdefault(type, set()).update(set(deps)) return all_deps @staticmethod def parse_dependencies(module_name: str, directory: FS) -> Dict[str, List[str]]: """ Retrieve external dependencies from a module's manifest. :param module_name: Name of the module :param directory: Location of the module :return: A dictionary containing a list of dependencies for each type """ manifest = directory.readtext(join(module_name, "__manifest__.py")) manifest_dict = ast.literal_eval(manifest) return manifest_dict.get("external_dependencies", {}) @staticmethod def check_python_dependency(dependency: str) -> bool: """ Check if a python dependency is satisfied i.e. if the python module is installed. :param dependency: Name of the python module :return: True if the module is installed, False otherwise """ try: __import__(dependency) except ImportError: return False return True @staticmethod def install_python_dependencies(dependencies: List[str]): """ Call pip to install the given python dependencies. :param dependencies: List of python modules to install """ callable_pip.main("install", *dependencies) def pretty_print(self, module_name: str, message: str = "", status: str = LOG_STATUS_PENDING, level: int = 0): """ Format and print a log to the console. :param module_name: Name of the module concerned :param message: Message to print :param status: Status of the log ('pending', 'ok', 'warning', 'error') :param level: Minimum verbose level to actually print the log (0, 1, 2) """ if level <= self.verbose_level: if status == LOG_STATUS_OK: msg_color = "green" elif status == LOG_STATUS_WARNING: msg_color = "yellow" elif status == LOG_STATUS_ERROR: msg_color = "red" else: msg_color = "white" click.echo( click.style(module_name.ljust(30), fg="blue") + click.style(message, fg=msg_color))
checkpoint = ModelCheckpoint(checkpoint_file, verbose=1, monitor="val_acc", save_best_only=True, mode="auto") mainsave = ModelCheckpoint(model_file, verbose=1, save_best_only=False, mode="auto") callbacks = [checkpoint, mainsave] if tensorboard_enabled: log_folder = "./logs/" + app_name + "/" pathlib.Path(log_folder).mkdir(parents=True, exist_ok=True) folder = OSFS(log_folder) test_n = len(list(n for n in folder.listdir(".") if n.startswith("run"))) this_test = log_folder + "run" + str(test_n + 1) + "/" pathlib.Path(this_test).mkdir(parents=True, exist_ok=True) tb = keras.callbacks.TensorBoard(log_dir=this_test, histogram_freq=1, batch_size=batch_size, write_graph=True, write_grads=True, write_images=False) callbacks.append(tb) if fit: # Do the training. model.fit(x_train, y_train,
class VirtualHost(object): """ Represents a single host. This class implements the commands that are host-specific, like pwd, ls, etc. """ def __init__(self, params, network, fs_dir): self.hostname = params['hostname'] self.ip_address = params['ip_address'] self.network = network self.env = params['env'] valid_ips = map(str, network[1:-1]) if self.ip_address is None: logger.error( 'IP address for {} is not specified in the config file (or is "null")' .format(self.hostname)) if not self._set_ip_from_previous_run(fs_dir, valid_ips): self.ip_address = get_random_item(valid_ips) logger.info('Assigned random IP {} to host {}'.format( self.ip_address, self.hostname)) else: if not self.ip_address in valid_ips: logger.error( 'IP Address {} for {} is not valid for the specified network' .format(params['ip_address'], self.hostname)) if not self._set_ip_from_previous_run(fs_dir, valid_ips): self.ip_address = get_random_item(valid_ips) logger.info('Assigned random IP {} to host {}'.format( self.ip_address, self.hostname)) self.valid_logins = params['valid_logins'] self.logged_in = False self.current_user = None if params.get('default', False): self.default = True else: self.default = False self.filesystem = OSFS(os.path.join( fs_dir, '{}_{}'.format(self.hostname, self.ip_address)), create=True) self.working_path = '/' def authenticate(self, username, password): if self.valid_logins.get(username, None) == password: return True return False def login(self, username): logger.debug('User "{}" has logged into "{}" host'.format( username, self.hostname)) self.logged_in = True self.current_user = username def logout(self): self.logged_in = False self.current_user = None @property def welcome(self): if self.filesystem.isfile('/etc/motd'): with self.filesystem.open('/etc/motd') as motd_file: return motd_file.read() else: return 'Welcome to {} server.'.format(self.hostname) @property def prompt(self): prompt = '{}@{}:{}$ '.format(self.current_user, self.hostname, self.working_path) return prompt def run_echo(self, params, shell): if not params: shell.writeline('') elif params[0].startswith('$') and len(params) == 1: var_name = params[0][1:] value = self.env.get(var_name, '') shell.writeline(value) elif '*' in params: params.remove('*') params.extend(self.filesystem.listdir()) shell.writeline(' '.join(params)) else: shell.writeline(' '.join(params)) def run_pwd(self, params, shell): if params: shell.writeline('pwd: too many arguments') else: shell.writeline('{}'.format(self.working_path)) def run_wget(self, params, shell): parser = Parser(add_help=False) parser.add_argument('-h', '--help', action='store_true', default=False) parser.add_argument('-V', '--version', action='store_true', default=False) parser.add_argument('-O', '--output-document') args, unparsed = parser.parse_known_args(params) if unparsed: url = unparsed[0] elif not args.help and not args.version: noparam_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'wget', 'no_param') self.send_data_from_file(noparam_file_path, shell) return if args.help: help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'wget', 'help') self.send_data_from_file(help_file_path, shell) return if args.version: version_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'wget', 'version') self.send_data_from_file(version_file_path, shell) return wget_command = WgetCommand(url, self.working_path, self.filesystem, args, shell) wget_command.process() def run_ping(self, params, shell): options = [x for x in params if x.startswith('-')] if '-h' in options or len(params) == 0: help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ping', 'help') self.send_data_from_file(help_file_path, shell) return filtered_params = [p for p in params if not p.startswith('-')] ping_host = filtered_params[-1] logger.debug('Going to ping {}'.format(ping_host)) ping_command = PingCommand(ping_host, shell) ping_command.process() def run_ifconfig(self, params, shell): if len(params) >= 2: shell.writeline('SIOCSIFFLAGS: Operation not permitted') return if params: parameter = params[0] if parameter == '--version': version_file_path = os.path.join( os.path.dirname(hornet.__file__), 'data', 'commands', 'ifconfig', 'version') self.send_data_from_file(version_file_path, shell) logger.debug( 'Sending version string for ifconfig from {} file'.format( version_file_path)) return elif parameter == '--help' or parameter == '-h': help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ifconfig', 'help') self.send_data_from_file(help_file_path, shell) logger.debug( 'Sending version string for ifconfig from {} file'.format( help_file_path)) return output_template_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ifconfig', 'output_template') ifconfig_command = IfconfigCommand(params, output_template_path, self.ip_address, self.network) output = ifconfig_command.process() shell.writeline(output) def run_ls(self, params, shell): paths = [] other_params = [] for p in params: if p.startswith('-'): other_params.append(p) else: paths.append(p) if not paths: # List contents of working dir by default paths.append(self.working_path) parser = Parser(add_help=False) parser.add_argument('-a', '--all', action='store_true', default=False) parser.add_argument('-A', '--almost-all', action='store_true', default=False) parser.add_argument('-d', '--directory', action='store_true', default=False) parser.add_argument('-l', action='store_true', default=False) # We ignore these (for now), but still parse them ;-) parser.add_argument('-h', '--human-readable', action='store_true', default=False) parser.add_argument('-b', '--escape', action='store_true', default=False) parser.add_argument('--block-size') parser.add_argument('-B', '--ignore-backups', action='store_true', default=False) parser.add_argument('-c', action='store_true', default=False) parser.add_argument('-C', action='store_true', default=False) parser.add_argument('--color') parser.add_argument('-D', '--dired', action='store_true', default=False) parser.add_argument('-f', action='store_true', default=False) parser.add_argument('-F', '--classify', action='store_true', default=False) parser.add_argument('--file-type', action='store_true', default=False) parser.add_argument('--format') parser.add_argument('--full-time', action='store_true', default=False) parser.add_argument('-g', action='store_true', default=False) parser.add_argument('--group-directories-first', action='store_true', default=False) parser.add_argument('-G', '--no-group', action='store_true', default=False) parser.add_argument('-H', '--dereference-command-line', action='store_true', default=False) parser.add_argument('--dereference-command-line-symlink-to-dir', action='store_true', default=False) parser.add_argument('--hide') parser.add_argument('--indicator-style') parser.add_argument('-i', '--inode', action='store_true', default=False) parser.add_argument('-I', '--ignore') parser.add_argument('-k', '--kibibytes', action='store_true', default=False) parser.add_argument('-L', '--deference', action='store_true', default=False) parser.add_argument('-m', action='store_true', default=False) parser.add_argument('-n', '--numeric-uid-gid', action='store_true', default=False) parser.add_argument('-N', '--literal', action='store_true', default=False) parser.add_argument('-o', action='store_true', default=False) parser.add_argument('-p', action='store_true', default=False) parser.add_argument('-q', '--hide-control-chars', action='store_true', default=False) parser.add_argument('--show-control-chars', action='store_true', default=False) parser.add_argument('-Q', '--quote-name', action='store_true', default=False) parser.add_argument('--quoting-style') parser.add_argument('-r', '--reverse', action='store_true', default=False) parser.add_argument('-R', '--recursive', action='store_true', default=False) parser.add_argument('-s', '--size', action='store_true', default=False) parser.add_argument('-S', action='store_true', default=False) parser.add_argument('--sort') parser.add_argument('--time') parser.add_argument('--time-style') parser.add_argument('-t', action='store_true', default=False) parser.add_argument('-T', '--tabsize', default=False) parser.add_argument('-u', action='store_true', default=False) parser.add_argument('-U', action='store_true', default=False) parser.add_argument('-v', action='store_true', default=False) parser.add_argument('-w', '--width') parser.add_argument('-x', action='store_true', default=False) parser.add_argument('-X', action='store_true', default=False) parser.add_argument('-1', dest='one_per_line', action='store_true', default=False) parser.add_argument('--help', action='store_true', default=False) parser.add_argument('--version', action='store_true', default=False) try: args = parser.parse_args(other_params) except ParseError: shell.writeline('ls: invalid options: \"{}\"'.format( ' '.join(params))) shell.writeline('Try \'ls --help\' for more information.') return if args.help: help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ls', 'help') logger.debug( 'Sending help string from file {}'.format(help_file_path)) self.send_data_from_file(help_file_path, shell) return if args.version: version_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ls', 'version') logger.debug('Sending version string from file {}'.format( version_file_path)) self.send_data_from_file(version_file_path, shell) return ls_cmd = LsCommand(args, paths, self.filesystem, self.working_path) output = ls_cmd.process() shell.writeline(output) def run_cd(self, params, shell): if len(params) == 0: params = ['/'] cd_path = os.path.join(self.working_path, params[0]) new_path_exists = False try: new_path_exists = self.filesystem.exists(cd_path) except BackReferenceError as e: logger.warn('Access to the external file system was attempted.') cd_path = '/' new_path_exists = True finally: if not new_path_exists: shell.writeline('cd: {}: No such file or directory'.format( params[0])) else: self.working_path = os.path.normpath(cd_path) logger.debug( 'Working directory for host {} changed to {}'.format( self.hostname, self.working_path)) def run_uname(self, params, shell): if not params: shell.writeline('Linux') return buff = '' info = [ 'Linux', self.hostname, '3.13.0-37-generic', '#64-Ubuntu SMP Mon Sep 22 21:30:01 UTC 2014', 'i686', 'i686', 'i686', 'GNU/Linux' ] parser = Parser(add_help=False) parser.add_argument('-a', '--all', default=False, action='store_true') parser.add_argument('-s', '--kernel-name', default=False, action='store_true') parser.add_argument('-n', '--nodename', default=False, action='store_true') parser.add_argument('-r', '--kernel-release', default=False, action='store_true') parser.add_argument('-v', '--kernel-version', default=False, action='store_true') parser.add_argument('-m', '--kernel-machine', default=False, action='store_true') parser.add_argument('-p', '--processor', default=False, action='store_true') parser.add_argument('-i', '--hardware-platform', default=False, action='store_true') parser.add_argument('-o', '--operating-system', default=False, action='store_true') parser.add_argument('--help', default=False, action='store_true') parser.add_argument('--version', default=False, action='store_true') try: args = parser.parse_args(params) except ParseError: shell.writeline('uname: invalid options -- \'{}\''.format( ' '.join(params))) shell.writeline('Try \'uname --help\' for more information.') return if args.all: buff = ' '.join(info) shell.writeline(buff) return if args.help: help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'uname', 'help') self.send_data_from_file(help_file_path, shell) return if args.version: version_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'uname', 'version') self.send_data_from_file(version_file_path, shell) return if args.kernel_name: buff = buff + info[0] + ' ' if args.nodename: buff = buff + self.hostname + ' ' if args.kernel_release: buff = buff + info[2] + ' ' if args.kernel_version: buff = buff + info[3] + ' ' if args.kernel_machine: buff = buff + info[4] + ' ' if args.processor: buff = buff + info[4] + ' ' if args.hardware_platform: buff = buff + info[4] + ' ' if args.operating_system: buff += 'GNU/Linux' shell.writeline(buff) def _set_ip_from_previous_run(self, fs_dir, valid_ips): # pragma: no cover for dir_name in os.listdir(fs_dir): if dir_name.startswith(self.hostname + '_'): possible_ip = dir_name.split('_')[1] if possible_ip in valid_ips: self.ip_address = possible_ip logger.info('Assigned IP {} to host {}'.format( self.ip_address, self.hostname)) return True return False @staticmethod def send_data_from_file(path, shell): with open(path, 'r') as infile: for line in infile: line = line.strip() shell.writeline(line)
def run(self): parser = self.get_argparse() args = parser.parse_args(sys.argv[1:]) if args.version is None: major, minor = __version__.split('.')[:2] version = "{}.{}".format(major, minor) else: version = args.version try: with open(expanduser(args.settings), 'rt') as f_ini: cfg = SettingsContainer.read_from_file(f_ini) print("Read settings from {}".format(args.settings)) except IOError: cfg = SettingsContainer() from ..docgen.extracter import Extracter from ..docgen.builder import Builder from ..command import doc_project location = dirname(doc_project.__file__) extract_fs = OSFS(join('doccode', version), create=True) base_docs_fs = OSFS('text') languages = [d for d in base_docs_fs.listdir(dirs_only=True) if len(d) == 2] def do_extract(): print("Extracting docs v{}".format(version)) utils.remove_all(extract_fs, '/') try: archive, context, doc = moya_build.build_server(location, 'settings.ini') except Exception: raise return -1 extract_fs.makedir("site/docs", recursive=True) extract_fs.makedir("site/tags", recursive=True) #extract_fs.makedir("libs") with extract_fs.opendir('site/tags') as tags_fs: extracter = Extracter(archive, tags_fs) const_data = {} builtin_tags = [] for namespace in self.builtin_namespaces: xmlns = getattr(namespaces, namespace, None) if xmlns is None: raise ValueError("XML namespace '{}' is not in namespaces.py".format(namespace)) namespace_tags = archive.registry.get_elements_in_xmlns(xmlns).values() builtin_tags.extend(namespace_tags) extracter.extract_tags(builtin_tags, const_data=const_data) for language in languages: with extract_fs.makeopendir("site/docs") as language_fs: doc_extracter = Extracter(None, language_fs) docs_fs = base_docs_fs.opendir(language) doc_extracter.extract_site_docs(docs_fs, dirname=language) if args.extract: do_extract() if args.build: theme_path = cfg.get('paths', 'theme', None) dst_path = join('html', version) if theme_path is None: theme_fs = OSFS('theme') else: theme_fs = fsopendir(theme_path) output_path = cfg.get('paths', 'output', None) if output_path is None: output_base_fs = OSFS(dst_path, create=True) else: output_root_base_fs = fsopendir(output_path) output_base_fs = output_root_base_fs.makeopendir(dst_path, recursive=True) #output_base_fs = OSFS(join('html', version), create=True) utils.remove_all(output_base_fs, '/') def do_build(): print("Building docs v{}".format(version)) lib_info = {} lib_paths = {} for long_name, lib in self.document_libs: lib_info[long_name] = moya_build.get_lib_info(lib) lib_paths[long_name] = output_base_fs.getsyspath(join('libs', long_name, 'index.html')) for language in languages: docs_fs = base_docs_fs.makeopendir(language) output_fs = output_base_fs.makeopendir(language) utils.remove_all(output_fs, '/') with extract_fs.opendir("site") as extract_site_fs: builder = Builder(extract_site_fs, output_fs, theme_fs) from ..tools import timer with timer('render time'): builder.build({"libs": lib_info, "lib_paths": lib_paths}) # output_base_fs.makedir("libs", allow_recreate=True) # for long_name, lib in self.document_libs: # source_path = extract_fs.getsyspath(join("libs", long_name)) # output_path = output_base_fs.getsyspath('libs') # cmd_template = 'moya --debug doc build {} --theme libtheme --source "{}" --output "{}"' # cmd = cmd_template.format(lib, source_path, output_path) # os.system(cmd) def extract_build(): do_extract() do_build() do_build() if not args.nobrowser: import webbrowser webbrowser.open(output_base_fs.getsyspath('en/index.html')) if args.watch: print("Watching for changes...") watcher = ReloadChangeWatcher(base_docs_fs, extract_build) while 1: try: time.sleep(0.1) except: break return 0
class Timeline(object): """A timeline is a sequence of timestamped events.""" def __init__(self, path, name, max_events=None): self.path = path self.name = name self.fs = OSFS(path, create=True) self.max_events = max_events def __repr__(self): return "Timeline({!r}, {!r}, max_events={!r})".format(self.path, self.name, self.max_events) def new_event(self, event_type, timestamp=None, *args, **kwargs): """Create and return an event, to be used as a context manager""" if self.max_events is not None: size = len(self.fs.listdir(wildcard="*.json")) if size >= self.max_events: raise TimelineFullError("The timeline has reached its maximum size") if timestamp is None: timestamp = int(time() * 1000.0) try: event_cls = _event_registry[event_type] except KeyError: raise UnknownEventError("No event type '{}'".format(event_type)) # Make an event id that we can be confident it's unique token = str(randint(0, 2 ** 31)) event_id = kwargs.pop('event_id', None) or "{}_{}_{}".format(event_type, timestamp, token) event = event_cls(self, event_id, timestamp, *args, **kwargs) log.debug('new event {!r}'.format(event)) return event def new_photo(self, file, filename=None, ext=None, **kwargs): """Create a new photo object""" event = self.new_event('IMAGE', **kwargs) if hasattr(file, 'getvalue'): bytes = file.getvalue() elif file is not None: if isinstance(file, text_type): with open(file, 'rb') as f: bytes = f.read() else: bytes = file.read() else: if bytes is None: raise ValueError("A value for 'file' or 'bytes' is required") event.attach_bytes(bytes, name='photo', filename=filename, ext=ext) return event def get_events(self, sort=True): """Get all accumulated events""" events = [] for event_filename in self.fs.listdir(wildcard="*.json"): with self.fs.open(event_filename, 'rb') as f: event = loads(f.read().decode('utf-8')) events.append(event) if sort: # sort by timestamp events.sort(key=itemgetter('timestamp')) return events def clear_all(self): """Clear all stored events""" for filename in self.fs.listdir(wildcard="*.json"): try: self.fs.remove(filename) except FSError: pass def clear_events(self, event_ids): """Clear any events that have been processed""" for event_id in event_ids: filename = "{}.json".format(event_id) try: self.fs.remove(filename) except FSError: pass def _write_event(self, event_id, event): if hasattr(event, 'to_data'): event = event.to_data() event['event_id'] = event_id event_json = dumps(event, indent=4).encode('utf-8') filename = "{}.json".format(event_id) with self.fs.open(filename, 'wb') as f: f.write(event_json)
class Timeline(object): """A timeline is a sequence of timestamped events.""" def __init__(self, path, name, max_events=None): self.path = path self.name = name self.fs = OSFS(path, create=True) self.max_events = max_events def __repr__(self): return "Timeline({!r}, {!r}, max_events={!r})".format( self.path, self.name, self.max_events) def new_event(self, event_type, timestamp=None, *args, **kwargs): """Create and return an event, to be used as a context manager""" if self.max_events is not None: size = len(self.fs.listdir(wildcard="*.json")) if size >= self.max_events: raise TimelineFullError( "The timeline has reached its maximum size") if timestamp is None: timestamp = int(time() * 1000.0) try: event_cls = _event_registry[event_type] except KeyError: raise UnknownEventError("No event type '{}'".format(event_type)) # Make an event id that we can be confident it's unique token = str(randint(0, 2**31)) event_id = "{}_{}_{}".format(event_type, timestamp, token) event = event_cls(self, event_id, timestamp, *args, **kwargs) log.debug('new event {!r}'.format(event)) return event def new_photo(self, file, filename=None, ext=None, **kwargs): """Create a new photo object""" event = self.new_event('IMAGE', **kwargs) if hasattr(file, 'getvalue'): bytes = file.getvalue() elif file is not None: if isinstance(file, basestring): with open(file, 'rb') as f: bytes = f.read() else: bytes = file.read() else: if bytes is None: raise ValueError("A value for 'file' or 'bytes' is required") event.attach_bytes(bytes, name='photo', filename=filename, ext=ext) return event def get_events(self, sort=True): """Get all accumulated events""" events = [] for event_filename in self.fs.listdir(wildcard="*.json"): with self.fs.open(event_filename, 'rb') as f: event = loads(f.read()) events.append(event) if sort: # sort by timestamp events.sort(key=itemgetter('timestamp')) return events def clear_all(self): """Clear all stored events""" for filename in self.fs.listdir(wildcard="*.json"): try: self.fs.remove(filename) except FSError: pass def clear_events(self, event_ids): """Clear any events that have been processed""" for event_id in event_ids: filename = "{}.json".format(event_id) try: self.fs.remove(filename) except FSError: pass def _write_event(self, event_id, event): if hasattr(event, 'to_data'): event = event.to_data() event['event_id'] = event_id event_json = dumps(event, indent=4) filename = "{}.json".format(event_id) with self.fs.open(filename, 'wb') as f: f.write(event_json)
# Write in summaries tf.summary.scalar("loss", loss) tf.summary.scalar("accuracy", accuracy) for var in tf.trainable_variables(): tf.summary.histogram(var.name, var) for grad, var in grads: tf.summary.histogram(var.name + '/gradient', grad) # Merge all summaries merged_summary_op = tf.summary.merge_all() training_epochs = 100 folder = OSFS(FLAGS.test_dir) test_n = len(list(n for n in folder.listdir('') if n.startswith('test'))) this_test = FLAGS.test_dir+"/test" + str(test_n+1) with tf.Session() as sess: sess.run(init) summary_writer = tf.summary.FileWriter(this_test, graph=tf.get_default_graph()) lr_ = 0.0001 for epoch in range(training_epochs): sess.run(iterator.initializer, feed_dict={features_placeholder: X_train, labels_placeholder: Labels}) train_error = 0 ac = 0 iter = np.floor(len(X_train), batch_size) for e in range(iter):