def handle_inspect(**kwargs): """ Handle inspect actions. Args: **kwargs (dict): Arguments """ default_args = { 'input': None } default_args.update(kwargs) args = dict_to_namedtuple(default_args) # split input path dirname, basename = os.path.split(unicode(args.input)) # open input filesystem try: in_fs = OSFS(dirname) if in_fs.isfile(basename): inspect_file(in_fs.getsyspath(basename)) elif in_fs.isdir(basename): inspect_dir(in_fs.opendir(basename)) except CreateFailed: console.error('Input {} does not exist.'.format(args.input)) return
def create_free_editorslf(name): """ Creates a BufferedSlfFS exclusively from the contents of the editor directory. The python files inside the editor directory are responsible for creating the STI images and adding them to the SLF. Each file should contain the function: ``` def add_to_free_editorslf(source_fs, target_fs): pass ``` source_fs is the source OSFS (editor directory) target_fs is the target BufferedSlfFS """ target_fs = BufferedSlfFS() target_fs.library_name = name or "Free editor.slf" target_fs.library_path = "editor\\" target_fs.version = 0x0200 # 2.0 target_fs.sort = 0 # BufferedSlfFS does not guarantee that the entries are sorted source_fs = OSFS('editor') for path in source_fs.walkfiles(): if path.endswith(".py"): # run python file inside the editor directory name = ("editor" + path)[:-3].replace("/", ".") spec = spec_from_file_location(name, source_fs.getsyspath(path)) module = module_from_spec(spec) spec.loader.exec_module(module) module.add_to_free_editorslf(source_fs, target_fs) for path in sorted(target_fs.walkfiles()): print(path) return target_fs
def run(self): args = self.args device_class = args.device_class conf_path = constants.CONF_PATH if not os.path.exists(conf_path): sys.stderr.write('{} does not exist.\n'.format(conf_path)) sys.stderr.write("please run 'dataplicity init' first\n") return -1 print("reading conf from {}".format(conf_path)) cfg = settings.read(conf_path) serial = cfg.get('device', 'serial') auth_token = cfg.get('device', 'auth') server_url = cfg.get('server', 'url', constants.SERVER_URL) remote = jsonrpc.JSONRPC(server_url) print("downloading firmware...") with remote.batch() as batch: batch.call_with_id('register_result', 'device.register', auth_token=auth_token, name=args.name or serial, serial=serial, device_class_name=device_class) batch.call_with_id('auth_result', 'device.check_auth', device_class=device_class, serial=serial, auth_token=auth_token) batch.call_with_id('firmware_result', 'device.get_firmware') batch.get_result('register_result') batch.get_result('auth_result') fw = batch.get_result('firmware_result') if not fw['firmware']: sys.stderr.write('no firmware available!\n') return -1 version = fw['version'] firmware_bin = b64decode(fw['firmware']) firmware_file = BytesIO(firmware_bin) firmware_fs = ZipFS(firmware_file) dst_fs = OSFS(constants.FIRMWARE_PATH, create=True) firmware.install(device_class, version, firmware_fs, dst_fs) fw_path = dst_fs.getsyspath('/') print("installed firmware {} to {}".format(version, fw_path)) firmware.activate(device_class, version, dst_fs) print("activated {}".format(version))
def run(self): args = self.args device_class = args.device_class conf_path = constants.CONF_PATH if not os.path.exists(conf_path): sys.stderr.write('{} does not exist.\n'.format(conf_path)) sys.stderr.write("please run 'dataplicity init' first\n") return -1 print "reading conf from {}".format(conf_path) cfg = settings.read(conf_path) serial = cfg.get('device', 'serial') auth_token = cfg.get('device', 'auth') server_url = cfg.get('server', 'url', constants.SERVER_URL) remote = jsonrpc.JSONRPC(server_url) print "downloading firmware..." with remote.batch() as batch: batch.call_with_id('register_result', 'device.register', auth_token=auth_token, name=args.name or serial, serial=serial, device_class_name=device_class) batch.call_with_id('auth_result', 'device.check_auth', device_class=device_class, serial=serial, auth_token=auth_token) batch.call_with_id('firmware_result', 'device.get_firmware') batch.get_result('register_result') batch.get_result('auth_result') fw = batch.get_result('firmware_result') if not fw['firmware']: sys.stderr.write('no firmware available!\n') return -1 version = fw['version'] firmware_bin = b64decode(fw['firmware']) firmware_file = StringIO(firmware_bin) firmware_fs = ZipFS(firmware_file) dst_fs = OSFS(constants.FIRMWARE_PATH, create=True) firmware.install(device_class, version, firmware_fs, dst_fs) fw_path = dst_fs.getsyspath('/') print "installed firmware {} to {}".format(version, fw_path) firmware.activate(device_class, version, dst_fs) print "activated {}".format(version)
def eggifySingle(srcFS, src, destFS, dest, config=None): """ Eggify single source to single destination. Args: src (basestring) dest (basestring) Raises: MissingDestinationException """ if dest is None: raise MissingDestinationException() if config is None: config = {} if src.startswith("/") or src[1] == ":": head, tail = os.path.split(src) srcFS = OSFS(head) src = tail if srcFS.isfile(unicode(src)): assertFS(destFS.getsyspath(unicode(dest))) workingDir = srcFS.getsyspath(unicode("/")) devnull = open(os.devnull, 'w') cmd = ["python", src, "bdist_egg"] if "purge" in config.keys() and config["purge"]: cmd.append("--exclude-source-files") subprocess.check_call(cmd, cwd=workingDir, stdout=devnull, stderr=devnull) if srcFS.isdir(unicode("dist")): distFS = srcFS.opendir(unicode("dist")) for name in reversed(sorted(distFS.listdir("/"))): if name.endswith(".egg"): destEggFS = destFS.opendir(unicode(dest)) # remove existing eggs removeOldEggs(destEggFS, name) eggSrcPath = distFS.getsyspath(unicode(name)) eggDestPath = destEggFS.getsyspath(unicode(name)) copy_file(distFS, unicode(name), destEggFS, unicode(name)) print "copied {} to {}".format(eggSrcPath, eggDestPath) break
def handle_preview(**kwargs): """ Handle preview actions. Args: **kwargs (dict): Arguments """ default_args = { 'input': None, 'output': None, 'prefix': None, 'layer': None, 'num_threads': None, 'multithreading': 1 } default_args.update(kwargs) args = dict_to_namedtuple(default_args) # open output filesystem out_fs = assure_fs(args.output) # join layer layer = None if args.layer: layer = ' '.join(args.layer) # split input path dirname, basename = os.path.split(unicode(args.input)) # open input filesystem try: in_fs = OSFS(dirname) if in_fs.isfile(basename): filename, extension = os.path.splitext(basename) # prepend prefix to filename if args.prefix: filename = args.prefix + filename out_name = unicode(filename + '.jpg') preview_file(in_fs.getsyspath(basename), out_fs.getsyspath(out_name), layer) elif in_fs.isdir(basename): preview_dir(in_fs.opendir(basename), out_fs, args.num_threads, bool(args.multithreading), prefix=args.prefix, layer=layer) except CreateFailed: console.error('Input {} does not exist.'.format(args.input)) return
def build(fs, settings_path="settings.ini", rebuild=False, archive=None, master_settings=None): """Build a project""" if isinstance(fs, string_types): if '://' in fs: fs = fsopendir(fs) else: fs = OSFS(fs) if isinstance(settings_path, string_types): settings_path = [settings_path] if archive is None: archive = Archive(fs) context = Context() syspath = fs.getsyspath('/', allow_none=True) cwd = os.getcwd() if syspath is not None: os.chdir(syspath) try: root = context.root root['libs'] = archive.libs root['apps'] = archive.apps root['fs'] = FSWrapper(fs) log.debug("reading settings from {}".format(textual_list(settings_path))) archive.cfg = SettingsContainer.read(fs, settings_path, master=master_settings) root['settings'] = SettingsContainer.from_dict(archive.cfg['settings']) startup_path = archive.cfg.get('project', 'startup') docs_location = archive.cfg.get('project', 'location') archive.init_settings() root['console'] = archive.console root['debug'] = archive.debug root['_rebuild'] = rebuild parser = Parser(archive, fs.opendir(docs_location), startup_path) doc = parser.parse() if doc is None: raise errors.StartupFailedError('unable to parse "{}"'.format(startup_path)) archive.build(doc, fs=fs) return archive, context, doc finally: os.chdir(cwd)
def cmd_makepreviews(self, *params, **options): PREVIEW_ICON_SIZE = 32 WIDTH_COUNT = 11 BORDER = 5 ICON_DIMENSIONS = (BORDER*2 + PREVIEW_ICON_SIZE) preview_width = ICON_DIMENSIONS * WIDTH_COUNT media_fs = OSFS(settings.MEDIA_ROOT) media_fs.makedir('iconsetpreviews', allow_recreate=True) previews_fs = media_fs.opendir('iconsetpreviews') for catalog in IconCatalog.objects.all(): for category in catalog.get_categories(): filename = "%s.%s.jpg" % (catalog.name, category) icons = catalog.icon_set.filter(category=category).order_by('name') num_icons = icons.count() icons_height_count = (num_icons + WIDTH_COUNT-1) // WIDTH_COUNT preview_height = icons_height_count * ICON_DIMENSIONS preview_img = Image.new('RGB', (preview_width, preview_height), (255, 255, 255)) print preview_width, preview_height for i, icon in enumerate(icons): y, x = divmod(i, WIDTH_COUNT) pth = icon.path.replace('[SIZE]', str(PREVIEW_ICON_SIZE)) icon_pth = media_fs.getsyspath(pth) img = Image.open(icon_pth) if img.size[0] != img.size[1]: img = img.crop((0, 0, PREVIEW_ICON_SIZE, PREVIEW_ICON_SIZE)) try: preview_img.paste(img, (x*ICON_DIMENSIONS+BORDER, y*ICON_DIMENSIONS+BORDER), img) except ValueError: preview_img.paste(img, (x*ICON_DIMENSIONS+BORDER, y*ICON_DIMENSIONS+BORDER)) sys_filename = previews_fs.getsyspath(filename) print sys_filename preview_img.save(previews_fs.getsyspath(filename), quality=75)
def build(fs, settings_path="settings.ini", rebuild=False, archive=None, master_settings=None): """Build a project""" if isinstance(fs, string_types): if '://' in fs: fs = fsopendir(fs) else: fs = OSFS(fs) if archive is None: archive = Archive(fs) context = Context() syspath = fs.getsyspath('/', allow_none=True) cwd = os.getcwd() if syspath is not None: os.chdir(syspath) try: root = context.root root['libs'] = archive.libs root['apps'] = archive.apps root['fs'] = FSWrapper(fs) archive.cfg = SettingsContainer.read(fs, settings_path, master=master_settings) root['settings'] = SettingsContainer.from_dict(archive.cfg['settings']) startup_path = archive.cfg.get('project', 'startup') docs_location = archive.cfg.get('project', 'location') archive.init_settings() root['console'] = archive.console root['debug'] = archive.debug root['_rebuild'] = rebuild parser = Parser(archive, fs.opendir(docs_location), startup_path) doc = parser.parse() archive.build(doc, fs=fs) return archive, context, doc finally: os.chdir(cwd)
def upload(self, filepath, service_path, remove=False): ''' "Upload" a file to a service This copies a file from the local filesystem into the ``DataService``'s filesystem. If ``remove==True``, the file is moved rather than copied. If ``filepath`` and ``service_path`` paths are the same, ``upload`` deletes the file if ``remove==True`` and returns. Parameters ---------- filepath : str Relative or absolute path to the file to be uploaded on the user's filesystem service_path: str Path to the destination for the file on the ``DataService``'s filesystem remove : bool If true, the file is moved rather than copied ''' local = OSFS(os.path.dirname(filepath)) # Skip if source and dest are the same if self.fs.hassyspath(service_path) and ( self.fs.getsyspath(service_path) == local.getsyspath( os.path.basename(filepath))): if remove: os.remove(filepath) return if not self.fs.isdir(fs.path.dirname(service_path)): self.fs.makedir(fs.path.dirname(service_path), recursive=True, allow_recreate=True) if remove: fs.utils.movefile(local, os.path.basename(filepath), self.fs, service_path) else: fs.utils.copyfile(local, os.path.basename(filepath), self.fs, service_path)
def deploy(self): """Deploy latest firmware""" self.log.info("requesting firmware...") with self.remote.batch() as batch: batch.call_with_id('register_result', 'device.register', auth_token=self.auth_token, name=self.name or self.serial, serial=self.serial, device_class_name=self.device_class) batch.call_with_id('auth_result', 'device.check_auth', device_class=self.device_class, serial=self.serial, auth_token=self.auth_token) batch.call_with_id('firmware_result', 'device.get_firmware') try: batch.get_result('register_result') except Exception as e: self.log.warning(e) batch.get_result('auth_result') fw = batch.get_result('firmware_result') if not fw['firmware']: self.log.warning('no firmware available!') return False version = fw['version'] firmware_bin = b64decode(fw['firmware']) firmware_file = StringIO(firmware_bin) firmware_fs = ZipFS(firmware_file) dst_fs = OSFS(constants.FIRMWARE_PATH, create=True) firmware.install(self.device_class, version, firmware_fs, dst_fs) fw_path = dst_fs.getsyspath('/') self.log.info("installed firmware {:010} to {}".format(version, fw_path)) firmware.activate(self.device_class, version, dst_fs) self.log.info("activated firmware {:010}".format(version))
def deploy(self): """Deploy latest firmware""" self.log.info("requesting firmware...") with self.remote.batch() as batch: batch.call_with_id('register_result', 'device.register', auth_token=self.auth_token, name=self.name or self.serial, serial=self.serial, device_class_name=self.device_class) batch.call_with_id('auth_result', 'device.check_auth', device_class=self.device_class, serial=self.serial, auth_token=self.auth_token) batch.call_with_id('firmware_result', 'device.get_firmware') try: batch.get_result('register_result') except Exception as e: self.log.warning(e) batch.get_result('auth_result') fw = batch.get_result('firmware_result') if not fw['firmware']: self.log.warning('no firmware available!') return False version = fw['version'] firmware_bin = b64decode(fw['firmware']) firmware_file = StringIO(firmware_bin) firmware_fs = ZipFS(firmware_file) dst_fs = OSFS(constants.FIRMWARE_PATH, create=True) firmware.install(self.device_class, version, firmware_fs, dst_fs) fw_path = dst_fs.getsyspath('/') self.log.info("installed firmware {:010} to {}".format( version, fw_path)) firmware.activate(self.device_class, version, dst_fs) self.log.info("activated firmware {:010}".format(version))
def main(arguments): cwdFS = OSFS(unicode(".")) try: # my packages sys.path.append(cwdFS.getsyspath(unicode("."))) forgeconfig = importlib.import_module("forgeconfig") if hasattr(forgeconfig, "config"): config = getattr(forgeconfig, "config") for argument in arguments: if hasattr(forgeconfig, argument): method = getattr(forgeconfig, argument) method(config)() except ImportError as e: raise e
def reduce_path(carry_fs, value): """ Reduce path by opening or creating each segment and returning the last. Args: carry_fs (fs): Opened filesystem value (str): Next path segment Returns: fs """ if not isinstance(carry_fs, FS): carry_fs = OSFS(carry_fs + separator) if not carry_fs.isdir(value): carry_fs.makedirs(value) # open next carry_fs next_carry_fs = OSFS(carry_fs.getsyspath(value)) # close carry_fs carry_fs.close() return next_carry_fs
def cmd_makeadminpreviews(self, *params, **options): try: iconset = params[0] except IndexError: iconset = '' icon_fs = OSFS(settings.MEDIA_ROOT).opendir('iconsets') if params: icon_fs = icon_fs.opendir(params[0]) done_dirs = set() for path in icon_fs.walkfiles(wildcard='*.png'): dirpath = dirname(path) png_path = icon_fs.getsyspath(path) img = Image.open(png_path).convert('RGBA') background_img = Image.new('RGB', img.size, (255, 255, 255)) background_img.paste(img, None, img) new_path = os.path.splitext(png_path)[0] + '.jpg' background_img.save(new_path) if dirpath not in done_dirs: print "Generating admin previews in %s/*" % dirpath done_dirs.add(dirpath)
#!/usr/bin/env python import sys from fs.osfs import OSFS import Image img_fs = OSFS(sys.argv[1]) imgs = [] for path in img_fs.listdir(wildcard='*.png'): img = Image.open(img_fs.getsyspath(path)) size = img.size[0] if size != 16: continue imgs.append((path, img)) sprite = Image.new('RGBA', (16, len(imgs)*16)) imgs.sort(key=lambda i:i[0]) sprite_text_f = img_fs.open('sprites.txt', 'wt') for i, (path, img) in enumerate(imgs): y = i*16 sprite.paste(img, (0, y)) sprite_text_f.write( "%i\t%s\n" % (y, path)) sprite.save(img_fs.getsyspath('sprites.png'))
# dfs.append(_df) # pandas.concat(dfs, sort=True).to_csv("links.csv", index=False, encoding="utf8") import sys if __name__ == '__main__': _folder = sys.argv[1] t = {"toi": 1001094, "hindu": 3113377, "indian_express": 92956} ofs = OSFS("archives/%s" % _folder) tc = t[_folder] c = 0 for f in ofs.walk.files(filter=["*manifest.csv"]): _p = ofs.getsyspath(f) folder = fs.path.combine(fs.path.dirname(_p), "_pages") _fs = OSFS(folder, create=True) try: _df = pandas.read_csv(_p) except pandas.errors.EmptyDataError: continue tasks = [] for row in _df.to_dict(orient="records"): c += 1 if not row['link'].startswith("http"): continue
def run(self): parser = self.get_argparse() args = parser.parse_args(sys.argv[1:]) if args.version is None: major, minor = __version__.split('.')[:2] version = "{}.{}".format(major, minor) else: version = args.version try: with open(expanduser(args.settings), 'rt') as f_ini: cfg = SettingsContainer.read_from_file(f_ini) print("Read settings from {}".format(args.settings)) except IOError: cfg = SettingsContainer() from ..docgen.extracter import Extracter from ..docgen.builder import Builder from ..command import doc_project location = dirname(doc_project.__file__) extract_fs = OSFS(join('doccode', version), create=True) base_docs_fs = OSFS('text') languages = [d for d in base_docs_fs.listdir(dirs_only=True) if len(d) == 2] def do_extract(): print("Extracting docs v{}".format(version)) utils.remove_all(extract_fs, '/') try: archive, context, doc = moya_build.build_server(location, 'settings.ini') except Exception: raise return -1 extract_fs.makedir("site/docs", recursive=True) extract_fs.makedir("site/tags", recursive=True) #extract_fs.makedir("libs") with extract_fs.opendir('site/tags') as tags_fs: extracter = Extracter(archive, tags_fs) const_data = {} builtin_tags = [] for namespace in self.builtin_namespaces: xmlns = getattr(namespaces, namespace, None) if xmlns is None: raise ValueError("XML namespace '{}' is not in namespaces.py".format(namespace)) namespace_tags = archive.registry.get_elements_in_xmlns(xmlns).values() builtin_tags.extend(namespace_tags) extracter.extract_tags(builtin_tags, const_data=const_data) for language in languages: with extract_fs.makeopendir("site/docs") as language_fs: doc_extracter = Extracter(None, language_fs) docs_fs = base_docs_fs.opendir(language) doc_extracter.extract_site_docs(docs_fs, dirname=language) if args.extract: do_extract() if args.build: theme_path = cfg.get('paths', 'theme', None) dst_path = join('html', version) if theme_path is None: theme_fs = OSFS('theme') else: theme_fs = fsopendir(theme_path) output_path = cfg.get('paths', 'output', None) if output_path is None: output_base_fs = OSFS(dst_path, create=True) else: output_root_base_fs = fsopendir(output_path) output_base_fs = output_root_base_fs.makeopendir(dst_path, recursive=True) #output_base_fs = OSFS(join('html', version), create=True) utils.remove_all(output_base_fs, '/') def do_build(): print("Building docs v{}".format(version)) lib_info = {} lib_paths = {} for long_name, lib in self.document_libs: lib_info[long_name] = moya_build.get_lib_info(lib) lib_paths[long_name] = output_base_fs.getsyspath(join('libs', long_name, 'index.html')) for language in languages: docs_fs = base_docs_fs.makeopendir(language) output_fs = output_base_fs.makeopendir(language) utils.remove_all(output_fs, '/') with extract_fs.opendir("site") as extract_site_fs: builder = Builder(extract_site_fs, output_fs, theme_fs) from ..tools import timer with timer('render time'): builder.build({"libs": lib_info, "lib_paths": lib_paths}) # output_base_fs.makedir("libs", allow_recreate=True) # for long_name, lib in self.document_libs: # source_path = extract_fs.getsyspath(join("libs", long_name)) # output_path = output_base_fs.getsyspath('libs') # cmd_template = 'moya --debug doc build {} --theme libtheme --source "{}" --output "{}"' # cmd = cmd_template.format(lib, source_path, output_path) # os.system(cmd) def extract_build(): do_extract() do_build() do_build() if not args.nobrowser: import webbrowser webbrowser.open(output_base_fs.getsyspath('en/index.html')) if args.watch: print("Watching for changes...") watcher = ReloadChangeWatcher(base_docs_fs, extract_build) while 1: try: time.sleep(0.1) except: break return 0
def handle_rechannel(**kwargs): """ Handle rechannel actions. Args: **kwargs (dict): Arguments """ default_args = { 'input': None, 'output': None, 'prefix': None, 'map': None, 'num_threads': None, 'multithreading': 1 } default_args.update(kwargs) args = dict_to_namedtuple(default_args) # open output filesystem out_fs = assure_fs(args.output) # split map path dirname, basename = os.path.split(unicode(args.map)) try: map_fs = OSFS(dirname) if map_fs.isfile(basename): with map_fs.open(basename) as file_handle: try: layer_map = json.loads(file_handle.read()) except Exception as error: console.error(error) return else: console.error('Map {} does not exist.'.format(args.map)) return except CreateFailed: console.error('Map parent directory {} does not exist.'.format(args.map)) return # split input path dirname, basename = os.path.split(unicode(args.input)) # open input filesystem try: in_fs = OSFS(dirname) if in_fs.isfile(basename): # prepend prefix to basename if args.prefix: basename = args.prefix + basename rechannel_file(in_fs.getsyspath(basename), out_fs.getsyspath(basename), layer_map) elif in_fs.isdir(basename): rechannel_dir(in_fs.opendir(basename), out_fs, layer_map, args.num_threads, bool(args.multithreading), prefix=args.prefix) except CreateFailed: console.error('Input {} does not exist.'.format(args.input)) return
def build( fs, settings_path="settings.ini", rebuild=False, archive=None, strict=False, master_settings=None, test_build=False, develop=False, ): """Build a project""" if isinstance(fs, string_types): if "://" in fs: fs = open_fs(fs) else: fs = OSFS(fs) if isinstance(settings_path, string_types): settings_path = [settings_path] try: syspath = fs.getsyspath("/") except NoSysPath: syspath = None cwd = os.getcwd() if syspath is not None: os.chdir(syspath) try: log.debug("reading settings from {}".format( textual_list(settings_path))) cfg = SettingsContainer.read(fs, settings_path, master=master_settings) if "customize" in cfg: customize_location = cfg.get("customize", "location") if customize_location: settings_path = cfg.get("customize", "settings", "settings.ini") startup_log.info("customizing '%s'", customize_location) customize_fs = open_fs(cfg.get("customize", "location")) cfg = SettingsContainer.read(customize_fs, settings_path, master=cfg) overlay_fs = MultiFS() overlay_fs.add_fs("project", fs) overlay_fs.add_fs("custom", customize_fs, write=True) fs = overlay_fs try: syspath = fs.getsyspath("/") except NoSysPath: pass else: os.chdir(syspath) if archive is None: archive = Archive(fs, strict=strict, test_build=test_build, develop=develop) context = Context() archive.cfg = cfg root = context.root root["libs"] = archive.libs root["apps"] = archive.apps root["fs"] = FSWrapper(fs) root["settings"] = SettingsContainer.from_dict(archive.cfg["settings"]) startup_path = archive.cfg.get("project", "startup") docs_location = archive.cfg.get("project", "location") archive.init_settings() root["console"] = archive.console root["debug"] = archive.debug root["_rebuild"] = rebuild parser = Parser(archive, fs.opendir(docs_location), startup_path) doc = parser.parse() if doc is None: raise errors.StartupFailedError( 'unable to parse "{}"'.format(startup_path)) archive.build(doc, fs=fs) return fs, archive, context, doc finally: os.chdir(cwd) gc.collect()
class OdooAddonManager: """ Class wrapping the OAM behaviour Attributes ---------- install_dir: OSFS The installation directory src_cache: dict A dictionary containing, for each source type supporting cache (git), the temporary location of the previously downloaded sources odoo_version: str Version of Odoo using the addons desc_version: str Version of the description file used to log changes modules_to_install: dict Modules to install as described in the YAML file verbose_level: str Level of details to print """ install_dir: OSFS modules_to_install: Dict[str, Dict[str, Any]] src_cache: Dict[str, Dict[str, Any]] odoo_version: str desc_version: str = None verbose_level: str _tmp_dir: TempFS = None _hst_file: TextIOWrapper = None _chglog_file: TextIOWrapper = None def __init__(self, description_file: str = None, install_directory: str = ".", verbose_level: str = VERBOSE_NONE): self.install_dir = OSFS(install_directory) self.verbose_level = verbose_level self.src_cache = { "git": {}, } if description_file: with open(description_file, "r") as description_file: install_data = yaml.load(description_file, Loader=yaml.Loader) self.modules_to_install = install_data.get("modules", []) self.odoo_version = install_data.get("odoo_version") self.desc_version = install_data.get("version") def __del__(self): self.install_dir.close() if self._tmp_dir: self._tmp_dir.close() if self._hst_file: self._hst_file.close() if self._chglog_file: self._chglog_file.close() @property def tmp_dir(self) -> TempFS: """ The temporary directory used to download modules before installing them if needed. """ if not self._tmp_dir: self._tmp_dir = TempFS(TEMP_DIR_NAME) return self._tmp_dir @property def history_file(self) -> TextIOWrapper: """ The history file where are logged the operations performed in the installation directory """ if not self._hst_file: self._hst_file = open( self.install_dir.getsyspath(HISTORY_FILE_NAME), 'a+') return self._hst_file @property def changelog_file(self) -> TextIOWrapper: """ The markdown changelog file listing changes in a human-readable format """ if not self._chglog_file: self._chglog_file = open( self.install_dir.getsyspath(CHANGELOG_FILE_NAME), "a+") self._chglog_file.seek(0) if not self._chglog_file.read(): self._chglog_file.write("# CHANGELOG") self._chglog_file.seek(0) return self._chglog_file def install_all(self, force: bool = False): """ Install all modules described in the description file. :param force: whether to overwrite installed modules or not """ installed_modules = [] if self.verbose_level == VERBOSE_NONE: with click.progressbar(self.modules_to_install) as modules: for module in modules: if self.install(module, force): installed_modules.append(module) else: for module in self.modules_to_install: if self.install(module, force): installed_modules.append(module) # Modules installed are removed from the list to avoid being processed twice e.g. in case of a refresh for module in installed_modules: self.modules_to_install.pop(module) click.echo("{} module(s) installed.".format(len(installed_modules))) def install(self, module_name: str, force: bool = False) -> bool: """ Install a single module from its source. :param module_name: Name of the module :param force: Whether to overwrite the module if it is already installed :param: Whether the module has been installed or not """ success = False self.pretty_print(module_name, "Installing...", level=VERBOSE_FULL) source = self.modules_to_install[module_name] origin_name = source.get("origin_name", module_name) installed_version = self.get_module_version(module_name, self.install_dir) if force or not installed_version: try: source_fs = self.fetch_module_from_source(module_name) self.install_from_fs(origin_name, source_fs, output_name=module_name) version = self.get_module_version(module_name, self.install_dir) self.log(module_name, OPERATION_INSTALL, force=force, extra=version) if not force: self.log_md(module_name, OPERATION_INSTALL, new_version=version) success = True except InvalidModuleError as err: self.pretty_print(module_name, err.message, status=LOG_STATUS_ERROR, level=VERBOSE_NONE) except pygit2.errors.GitError: self.pretty_print( module_name, "Installation failed - Could not fetch from Git repository.", status=LOG_STATUS_ERROR, level=VERBOSE_NONE) except Exception as e: self.pretty_print(module_name, "Installation failed ({})".format( type(e).__name__), status=LOG_STATUS_ERROR, level=VERBOSE_NONE) else: self.pretty_print(module_name, "Already installed. Skipping installation.", status=LOG_STATUS_WARNING, level=VERBOSE_NORMAL) return success def update_all(self, force: bool = False): """ Update all modules :param force: Whether to skip version check or not. If True, modules are just replaced no matter if they are being downgraded or installed for the first time. """ updated_modules = [] if self.verbose_level == VERBOSE_NONE: with click.progressbar(self.modules_to_install) as modules: for module in modules: if self.update(module, force): updated_modules.append(module) else: for module in self.modules_to_install: if self.update(module, force): updated_modules.append(module) # Modules updated are removed from the list to avoid being processed twice in case of a refresh for module in updated_modules: self.modules_to_install.pop(module) click.echo("{} module(s) updated.".format(len(updated_modules))) def update(self, module_name: str, force: bool = False) -> bool: """ Update a single module. :param module_name: Name of the module :param force: Whether to skip version check or not. If True, modules are just replaced no matter if they are being downgraded or installed for the first time. :return: Whether the module has been updated or not """ success = False self.pretty_print(module_name, "Updating...", level=VERBOSE_FULL) installed_version = self.get_module_version(module_name, self.install_dir) if force or installed_version: try: source_fs = self.fetch_module_from_source(module_name) origin_name = self.modules_to_install[module_name].get( "origin_name", module_name) new_version = self.get_module_version(origin_name, source_fs) if force or version.parse(new_version) >= version.parse( installed_version): self.pretty_print(module_name, "Updating from {0} to {1}".format( installed_version, new_version), level=VERBOSE_FULL) self.install_from_fs(origin_name, source_fs, output_name=module_name) self.log(module_name, OPERATION_UPDATE, force=force, extra="from {0} to {1}".format( installed_version, new_version)) if not force: self.log_md(module_name, OPERATION_UPDATE, installed_version, new_version) success = True else: self.pretty_print( module_name, "Fetched version ({0}) is inferior to current version ({1}). Skipping update." .format(new_version, installed_version), status=LOG_STATUS_ERROR, level=VERBOSE_NORMAL) except InvalidModuleError as err: self.pretty_print(module_name, err.message, status=LOG_STATUS_ERROR, level=VERBOSE_NONE) except pygit2.errors.GitError: self.pretty_print( module_name, "Update failed - Could not fetch from Git repository.", status=LOG_STATUS_ERROR, level=VERBOSE_NONE) except Exception as e: self.pretty_print(module_name, "Update failed ({})".format( type(e).__name__), status=LOG_STATUS_ERROR, level=VERBOSE_NONE) else: self.pretty_print( module_name, "Not installed. Skipping update.".format(module_name), status=LOG_STATUS_WARNING, level=VERBOSE_NORMAL) return success def uninstall_all(self, auto_confirm=False): """ Uninstall all modules that are installed but not present in the description file. Ask confirmation to the user. :param auto_confirm: Do not ask the user to confirm if True """ installed_modules = self.get_installed_modules() modules_to_uninstall = set(installed_modules.keys()) - set( self.modules_to_install.keys()) if not auto_confirm: click.echo("The following modules will be removed:") for module in modules_to_uninstall: click.echo(module) click.confirm('Do you want to continue?', abort=True) count = 0 if self.verbose_level == VERBOSE_NONE: with click.progressbar(modules_to_uninstall) as modules: for module in modules: count += self.uninstall(module) else: for module in modules_to_uninstall: count += self.uninstall(module) click.echo("{} module(s) removed.".format(count)) def uninstall(self, module_name: str) -> bool: """ Uninstall a single module if it is installed. :param module_name: Name of the module :return: Whether the module has been uninstalled or not """ success = False if module_name in self.install_dir.listdir("."): self.pretty_print(module_name, "Uninstalling...", level=VERBOSE_FULL) self.install_dir.removetree(module_name) success = True self.log(module_name, OPERATION_UNINSTALL) self.log_md(module_name, OPERATION_UNINSTALL) self.pretty_print(module_name, "Uninstalled.", status=LOG_STATUS_OK, level=VERBOSE_NORMAL) else: self.pretty_print(module_name, "Not installed. Skipping uninstall.", status=LOG_STATUS_ERROR, level=VERBOSE_NORMAL) return success def get_installed_modules(self) -> Dict[str, str]: """ Scan installation directory to list currently installed modules :return: A dictionary of module names as keys and their currently installed version as values """ modules = {} for module in self.install_dir.scandir("."): if module.is_dir and "__manifest__.py" in self.install_dir.listdir( module.name): manifest_file = self.install_dir.getsyspath( join(module.name, "__manifest__.py")) with open(manifest_file, "r") as manifest: modules[module.name] = ast.literal_eval( manifest.read())["version"] return modules @staticmethod def get_module_version(module_name: str, directory: FS) -> str: """ Get the version of the module in the given directory :param module_name: name of the module :param directory: FS object pointing to the parent directory of the module :return: version of the module or None if it is not present in the directory """ version = None if module_name in directory.listdir("."): manifest = directory.readtext(join(module_name, "__manifest__.py")) version = ast.literal_eval(manifest)["version"] return version def fetch_module_from_source(self, module_name: str) -> FS: """ Download a module from its source if needed and return the directory where it is located. :param module_name: Name of the module :return: An FS object pointing to the module location """ source = self.modules_to_install[module_name] source_fs: FS if source["source_type"] == SOURCE_LOCAL_DIR: source_fs = OSFS(source["path"]) elif source["source_type"] == SOURCE_LOCAL_ZIP: source_fs = ZipFS(source["path"]) elif source["source_type"] == SOURCE_GIT: source_fs = self.download_from_git( module_name, source["url"], source.get("branch", self.odoo_version), source.get("path", ".")) return source_fs def download_from_git(self, module_name: str, url: str, branch: str, path: str = ".") -> OSFS: """ Clone a git repository or find it in the source cache. :param module_name: name of the module being installed :param url: URL of the repository :param branch: branch of the desired module version :param path: path to the module inside the repository (default to '.') :return: an OSFS object pointing to the module location inside the repository """ repo_dir_name = urlparse(url).path.replace("/", "_") if url in self.src_cache["git"]: self.pretty_print(module_name, "Repository found in cache", level=VERBOSE_FULL) repo = self.src_cache["git"][url] repo.checkout("refs/remotes/origin/{}".format(branch)) else: self.pretty_print(module_name, "Cloning repository", level=VERBOSE_FULL) repo = pygit2.clone_repository( url, self.tmp_dir.getsyspath(repo_dir_name), checkout_branch=branch) self.src_cache["git"][url] = repo return OSFS(join(repo.workdir, path)) def install_from_fs(self, name: str, source_fs: FS, path: str = ".", output_name: str = None): """ Copy a module directory from where it is located to the installation directory. :param name: Name of the module :param source_fs: FS object pointing to the source location :param path: Path to the module directory from the source location root :param output_name: Name to give to the module's directory at installation """ path_to_module = join(path, name) if name not in source_fs.listdir(path): raise InvalidModuleError( name, "Module directory not found - Given path should be the parent directory" ) if "__manifest__.py" not in source_fs.listdir(path_to_module): raise InvalidModuleError( name, "Manifest not found - Given path should be the parent directory" ) self.pretty_print(output_name, "Copying from {}".format( source_fs.desc(path_to_module)), level=VERBOSE_FULL) copy_dir(source_fs, path_to_module, self.install_dir, output_name or name) self.pretty_print(output_name, "Installed and up to date.", status=LOG_STATUS_OK, level=VERBOSE_NORMAL) def log(self, module_name: str, operation: str, force=False, extra: str = ""): """ Log an operation in the history file. :param module_name: Name of the module :param operation: Type of the operation :param force: Whether the operation was performed with the force option or not :param extra: Extra information to log """ log_line = "{0} - {1}{2}: {3} {4}\n".format( datetime.now().replace(microsecond=0), operation, " (forced)" if force else "", module_name, extra) self.history_file.write(log_line) def log_md(self, module: str, operation: str, old_version: str = None, new_version: str = None): """ Log an operation in the markdown log file in human-readable format. :param module: Name of the module :param operation: Type of the operation :param old_version: Overwritten version of the module, in case of an update :param new_version: New version of the module, in case of an installation/update """ current_log_content = self.changelog_file.read() # Look for the section concerning the current version, or write a scaffold if not found version = self.desc_version or datetime.today().strftime("%Y-%m-%d") log_index = current_log_content.find("## {}".format(version)) if log_index >= 0: new_log_content = current_log_content[log_index:] else: new_log_content = "\n\n## {}\n\n**Added**\n\n\n**Updated**\n\n\n**Removed**\n\n".format( version) log_index = len(current_log_content) # Remove previous log entry concerning the module if module in new_log_content: new_log_content = re.sub(r"\n.*{}.*".format(module), "", new_log_content) # Append the new log line under the right operation type if operation == OPERATION_INSTALL: index = new_log_content.find("**Updated**") - 2 log_line = "\n * {0} ({1})".format(module, new_version) elif operation == OPERATION_UPDATE: index = new_log_content.find("**Removed**") - 2 log_line = "\n * {0} ({1} from {2})".format( module, new_version, old_version) elif operation == OPERATION_UNINSTALL: index = len(new_log_content) - 1 log_line = "\n * {0}".format(module) new_log_content = "{0}{1}{2}".format(new_log_content[:index], log_line, new_log_content[index:]) # Overwrite file with the updated logs old_log_content = current_log_content[:log_index] self.changelog_file.truncate() self.changelog_file.write(old_log_content + new_log_content) def list_external_dependencies(self, raw=False, modules: List[str] = None): """ Show external dependencies of all installed modules. :param raw: Whether to print only python dependencies in a 'requirements.txt' format :param modules: If given, show dependencies of those modules only """ dependencies = self.get_all_dependencies(modules=modules) if raw: for dep in dependencies.get("python", []): click.echo(dep) else: for type in dependencies: click.echo(type) for dep in dependencies[type]: if type == "python": dep_installed = self.check_python_dependency(dep) click.echo("\t{0} {1}".format( dep, "(OK)" if dep_installed else "(missing)")) else: click.echo("\t{}".format(dep)) def install_missing_dependencies(self, modules: List[str] = None): """ Install all missing dependencies. :param modules: If given, install dependencies of those modules only """ dependencies = self.get_all_dependencies(modules=modules) self.install_python_dependencies(dependencies.get("python", [])) def get_all_dependencies(self, modules: List[str] = None ) -> Dict[str, List[str]]: """ Get all missing dependencies from the installed modules. :param modules: If given, return dependencies of those modules only :return: A dictionary containing a list of dependencies for each type """ # Filter installed modules to keep the ones given modules = {mod: self.get_installed_modules()[mod] for mod in modules} if modules \ else self.get_installed_modules() all_deps = {} for module in modules: module_deps = self.parse_dependencies(module, self.install_dir) for type, deps in module_deps.items(): all_deps.setdefault(type, set()).update(set(deps)) return all_deps @staticmethod def parse_dependencies(module_name: str, directory: FS) -> Dict[str, List[str]]: """ Retrieve external dependencies from a module's manifest. :param module_name: Name of the module :param directory: Location of the module :return: A dictionary containing a list of dependencies for each type """ manifest = directory.readtext(join(module_name, "__manifest__.py")) manifest_dict = ast.literal_eval(manifest) return manifest_dict.get("external_dependencies", {}) @staticmethod def check_python_dependency(dependency: str) -> bool: """ Check if a python dependency is satisfied i.e. if the python module is installed. :param dependency: Name of the python module :return: True if the module is installed, False otherwise """ try: __import__(dependency) except ImportError: return False return True @staticmethod def install_python_dependencies(dependencies: List[str]): """ Call pip to install the given python dependencies. :param dependencies: List of python modules to install """ callable_pip.main("install", *dependencies) def pretty_print(self, module_name: str, message: str = "", status: str = LOG_STATUS_PENDING, level: int = 0): """ Format and print a log to the console. :param module_name: Name of the module concerned :param message: Message to print :param status: Status of the log ('pending', 'ok', 'warning', 'error') :param level: Minimum verbose level to actually print the log (0, 1, 2) """ if level <= self.verbose_level: if status == LOG_STATUS_OK: msg_color = "green" elif status == LOG_STATUS_WARNING: msg_color = "yellow" elif status == LOG_STATUS_ERROR: msg_color = "red" else: msg_color = "white" click.echo( click.style(module_name.ljust(30), fg="blue") + click.style(message, fg=msg_color))