def create_workflow_workspace(org, user, workflow_uuid): """Create analysis and workflow workspaces. A directory structure will be created where `/:analysis_uuid` represents the analysis workspace and `/:analysis_uuid/workspace` the workflow workspace. :param org: Organization which user is part of. :param user: Workspaces owner. :param workflow_uuid: Analysis UUID. :return: Workflow and analysis workspace path. """ reana_fs = fs.open_fs(app.config['SHARED_VOLUME_PATH']) analysis_workspace = fs_path.join(get_user_analyses_dir(org, user), workflow_uuid) if not reana_fs.exists(analysis_workspace): reana_fs.makedirs(analysis_workspace) workflow_workspace = fs_path.join(analysis_workspace, 'workspace') if not reana_fs.exists(workflow_workspace): reana_fs.makedirs(workflow_workspace) reana_fs.makedirs( fs_path.join(analysis_workspace, app.config['INPUTS_RELATIVE_PATH'])) reana_fs.makedirs( fs_path.join(analysis_workspace, app.config['OUTPUTS_RELATIVE_PATH'])) reana_fs.makedirs( fs_path.join(analysis_workspace, app.config['CODE_RELATIVE_PATH'])) return workflow_workspace, analysis_workspace
def test_search(): fullFS = FullFS() directory = f'testgoogledrivefs_{uuid4()}' fullFS.makedir(directory) filename = f'searchtestfilename_{uuid4()}' fullFS.touch(join(directory, filename)) nameResults = list(fullFS.search(NameEquals(filename))) assert len(nameResults) == 1 assert nameResults[0].name == filename textFilename = f'searchtestfilename_{uuid4()}.txt' with fullFS.open(join(directory, textFilename), 'w') as f: f.write('Some text') mimeTypeResults = list( fullFS.search( And(MimeTypeEquals('text/plain'), NameEquals(textFilename)))) assert len(mimeTypeResults) == 1 assert mimeTypeResults[0].name == textFilename mimeTypeResultsFail = list( fullFS.search( And(MimeTypeEquals('application/pdf'), NameEquals(textFilename)))) assert len(mimeTypeResultsFail) == 0 fullFS.removetree(directory)
def test_create(self): directory = join("home", self.user, "test", "directory") base = "ftp://*****:*****@{}:{}/foo".format(self.server.host, self.server.port) url = "{}/{}".format(base, directory) # Make sure unexisting directory raises `CreateFailed` with self.assertRaises(errors.CreateFailed): ftp_fs = open_fs(url) # Open with `create` and try touching a file with open_fs(url, create=True) as ftp_fs: ftp_fs.touch("foo") # Open the base filesystem and check the subdirectory exists with open_fs(base) as ftp_fs: self.assertTrue(ftp_fs.isdir(directory)) self.assertTrue(ftp_fs.isfile(join(directory, "foo"))) # Open without `create` and check the file exists with open_fs(url) as ftp_fs: self.assertTrue(ftp_fs.isfile("foo")) # Open with create and check this does fail with open_fs(url, create=True) as ftp_fs: self.assertTrue(ftp_fs.isfile("foo"))
def get_analysis_files_dir(workflow, file_type, action='list'): """Given a workflow and a file type, returns path to the file type dir.""" analysis_workspace = get_analysis_dir(workflow) if action == 'list': return fs_path.join(analysis_workspace, app.config['ALLOWED_LIST_DIRECTORIES'][file_type]) elif action == 'seed': return fs_path.join(analysis_workspace, app.config['ALLOWED_SEED_DIRECTORIES'][file_type])
def falta_caps(self): base = self.pathbar.text() with read_only(OSFS(base)) as ff: proces = [] folds = [] for i in ff.scandir('/'): if i.is_dir: folds.append(i) for i in folds: path = join('/', i.name) try: for j in ff.scandir(path): if j.is_file and splitext( j.name)[1].lower() in video_formats: proces.append((j, i)) except (PermissionError, DirectoryExpected) as e: self.loggin.emit("Acceso denegado a" + join(base, i.name), ERROR) folds = {} for filee, fold in proces: fold = fold.name filee = filee.name self.loggin.emit("Procesando: " + join(base, fold), INFORMATION) try: pp = parse2(filee) if pp.error: pp = parse(filee) except Exception as e: self.loggin.emit( "Error procesando: " + join(base, fold, filee), WARNING) self.loggin.emit(str(e), ERROR) continue t1 = transform(pp.title) if not pp.episode: continue t2 = pp.episode if fold in folds: if t1 in folds[fold]: folds[fold][t1].add(int(t2)) else: tt = best_ed(t1, folds[fold].keys(), gap=2) if tt in folds[fold]: folds[fold][tt].add(int(t2)) else: folds[fold][tt] = set() folds[fold][tt].add(int(t2)) else: folds[fold] = {} folds[fold][t1] = set() folds[fold][t1].add(int(t2)) # for i in folds.keys(): # for j in folds[i]: # folds[i][j]=list(set(folds[i][j])) self.caps_list = folds
def test_multiversioned_cowfs(self) -> None: base_partial_path = join(self.temp_dir, "base") fs = MultiversionedOSFS.create_suffixed(base_partial_path, create=True) rw_partial_path = join(self.temp_dir, "next") cowfs = MultiversionedCOWFS.create_cowfs_suffixed(fs, rw_partial_path, recreate=True) self.assertTrue(cowfs.is_multiversioned_fs()) names = OSFS(self.temp_dir).listdir("/") self.assertEqual({"base-mv", "next-deltas-mv"}, set(names))
def build_static(self, *args, **options): """ Builds the static files directory as well as robots.txt and favicon.ico """ logger.debug("Building static directory") if self.verbosity > 1: self.stdout.write("Building static directory") management.call_command("collectstatic", interactive=False, verbosity=0) # Set the target directory inside the filesystem. target_dir = path.join(self.build_dir, settings.STATIC_URL.lstrip('/')) target_dir = smart_text(target_dir) exclude_dirs = getattr(settings, 'BAKERY_STATIC_EXCLUDE_DIRS', None) if not exclude_dirs: # explicitly set to None to make sure we don't get an empty list/tuple exclude_dirs = None if os.path.exists(self.static_root) and settings.STATIC_URL: if getattr(settings, 'BAKERY_GZIP', False): self.copytree_and_gzip(self.static_root, target_dir, exclude_dirs) # if gzip isn't enabled, just copy the tree straight over else: logger.debug("Copying {}{} to {}{}".format( "osfs://", self.static_root, self.fs_name, target_dir)) walker = Walker(exclude_dirs=exclude_dirs) copy.copy_dir("osfs:///", self.static_root, self.fs, target_dir, walker=walker) # If they exist in the static directory, copy the robots.txt # and favicon.ico files down to the root so they will work # on the live website. robots_src = path.join(target_dir, 'robots.txt') if self.fs.exists(robots_src): robots_target = path.join(self.build_dir, 'robots.txt') logger.debug("Copying {}{} to {}{}".format(self.fs_name, robots_src, self.fs_name, robots_target)) self.fs.copy(robots_src, robots_target) favicon_src = path.join(target_dir, 'favicon.ico') if self.fs.exists(favicon_src): favicon_target = path.join(self.build_dir, 'favicon.ico') logger.debug("Copying {}{} to {}{}".format(self.fs_name, favicon_src, self.fs_name, favicon_target)) self.fs.copy(favicon_src, favicon_target)
def get_build_path(self, obj): """ Used to determine where to build the detail page. Override this if you would like your detail page at a different location. By default it will be built at get_url() + "index.html" """ target_path = path.join(settings.BUILD_DIR, self.get_url(obj).lstrip('/')) if not self.fs.exists(target_path): logger.debug("Creating {}".format(target_path)) self.fs.makedirs(target_path) return path.join(target_path, 'index.html')
def get_build_path(self): """ Used to determine where to build the page. Override this if you would like your page at a different location. By default it will be built at self.get_url() + "/index.html" """ target_path = path.join(settings.BUILD_DIR, self.get_url().lstrip('/')) if not self.fs.exists(target_path): logger.debug("Creating {}".format(target_path)) self.fs.makedirs(target_path) return path.join(target_path, 'index.html')
def build_ui_caps(self): dirr = self.dirr with OSFS(dirr) as f: data = make_temp_fs(f) self.vfs = data capsmap = {} vfs = self.vfs # vfs.tree() for path, _, files in vfs.walk(): for i in files: dd = {} nn = i.name pp = rename(nn) dd['fixname'] = nn dd['cap'] = pp.episode dd['season'] = pp.season opth = vfs.gettext(join(path, nn)) oon = split(opth)[1] dd['original'] = oon dd['ext'] = pp.ext.lower() dd['vpath'] = join(path, nn) dd['state'] = True dd['fold'] = split(path)[1] capsmap[oon] = dd self.capsmap = capsmap nonly = self.move.checkedId() == 3 li = self.li lic = li.count() cps = list(capsmap.values()) cpl = len(cps) if cpl <= lic: for n, i in enumerate(cps): name = i['fixname'] if nonly: name = i['cap'] + i['ext'] ll = li.item(n) ll.setText(name + "\t" + i['original']) for i in range(lic - cpl): ll = li.takeItem(0) del ll else: for i in range(lic): name = cps[i]['fixname'] if nonly: name = i['cap'] + i['ext'] ll = li.item(i) ll.setText(name + "\t" + cps[i]['original']) for i in range(cpl - lic): name = cps[lic + i]['fixname'] if nonly: name = i['cap'] + i['ext'] li.addItem(name + "\t" + cps[lic + i]['original'])
def ftp_move_to(self, item): txt = item.text() txt2 = normpath(join(self.pathbarftp.text(), txt)) self.li.clear() if txt2 != '/': normpath(join(txt2, '..')) item = QListWidgetItem( qta.icon('fa5s.folder-open', color='orange'), '..', self.li) self.li.addItem(item) for i in self.ftpm.list_dir(txt2): item = QListWidgetItem( qta.icon('fa5s.folder-open', color='orange'), i, self.li) self.li.addItem(item) self.pathbarftp.setText(txt2)
def collect_pdf(self, acron, issue_folder, pack_name): walker = Walker(filter=["*" + pack_name + "*.pdf"], max_depth=2) pdf_path = path.join(self.pdf_fs.root_path, acron, issue_folder) for pdf in walker.files(fs.open_fs(pdf_path)): pdf_path = path.join(acron, issue_folder, path.basename(pdf)) target_pdf_path = path.join(acron, issue_folder, pack_name, self.rename_pdf_trans_filename(pdf)) self.copy(pdf_path, target_pdf_path, src_fs=self.pdf_fs)
def test_move_file_fs_urls(self): # create a temp dir to work on with open_fs("temp://") as tmp: path = tmp.getsyspath("/") tmp.makedir("subdir_src") tmp.writetext("subdir_src/file.txt", "Content") tmp.makedir("subdir_dst") fs.move.move_file( "osfs://" + join(path, "subdir_src"), "file.txt", "osfs://" + join(path, "subdir_dst"), "target.txt", ) self.assertFalse(tmp.exists("subdir_src/file.txt")) self.assertEqual(tmp.readtext("subdir_dst/target.txt"), "Content")
def test_make_mv_deltas(self) -> None: with make_mv_osfs(join(self.temp_dir, "foo")) as base_fs, make_mv_deltas( base_fs, join(self.temp_dir, "bar")) as deltas_fs: names = OSFS(self.temp_dir).walk.dirs("/") self.assertEqual( { "/foo-mv", "/bar-deltas-mv", "/bar-deltas-mv/additions", "/bar-deltas-mv/deletions", }, set(names), )
def get_project_doc_folder(projectfolder): # TODO replace with doc_folder from configs? projectfolder = os.path.realpath(projectfolder) projectfolder = os.path.relpath(projectfolder, PROJECTS_ROOT) pth = path.join(projectfolder, 'doc') try: if not refdoc_fs.exists(pth): refdoc_fs.makedir(pth, recursive=True) if not refdoc_fs.exists(path.join(pth, 'confdocs')): refdoc_fs.makedir(path.join(pth, 'confdocs'), recursive=True) except PermissionDeniedError: logger.warning("Permission denied when creating folder for " + projectfolder) return None return pth
def collect_img(self, acron, issue_folder, pack_name): walker = Walker(filter=["*" + pack_name + "*"], max_depth=2, exclude_dirs=["html"]) img_path = path.join(self.img_fs.root_path, acron, issue_folder) for img in walker.files(fs.open_fs(img_path)): img_path = path.join(acron, issue_folder, path.basename(img)) target_img_path = path.join(acron, issue_folder, pack_name, path.basename(img)) self.copy(img_path, target_img_path, src_fs=self.img_fs)
def _itemsFromPath(self, path): pathIdMap = {'/': _rootMetadata} ipath = iteratepath(path) pathSoFar = '/' parentId = self.rootId if self.rootId is not None: # if we have been given a `rootId` then get the info for this directory and set it as # the root directory's metadata. rootMetadata = self._drive.files().get( fileId=self.rootId, fields=_ALL_FIELDS, **self._file_kwargs, ).execute() if rootMetadata is None: return pathIdMap pathIdMap['/'] = rootMetadata for childName in ipath: pathSoFar = join(pathSoFar, childName) metadata = self._childByName(parentId, childName) if metadata is None: break pathIdMap[pathSoFar] = metadata parentId = metadata['id'] return pathIdMap
def download_from_git(self, module_name: str, url: str, branch: str, path: str = ".") -> OSFS: """ Clone a git repository or find it in the source cache. :param module_name: name of the module being installed :param url: URL of the repository :param branch: branch of the desired module version :param path: path to the module inside the repository (default to '.') :return: an OSFS object pointing to the module location inside the repository """ repo_dir_name = urlparse(url).path.replace("/", "_") if url in self.src_cache["git"]: self.pretty_print(module_name, "Repository found in cache", level=VERBOSE_FULL) repo = self.src_cache["git"][url] repo.checkout("refs/remotes/origin/{}".format(branch)) else: self.pretty_print(module_name, "Cloning repository", level=VERBOSE_FULL) repo = pygit2.clone_repository( url, self.tmp_dir.getsyspath(repo_dir_name), checkout_branch=branch) self.src_cache["git"][url] = repo return OSFS(join(repo.workdir, path))
def compile_fragment_files( self, write_fs: FS, found_fragments: Iterable[FoundFragment]) -> List[str]: """ Compile fragment files into `parent_dir`. """ outputs = [] for version_fs, filename in found_fragments: try: fragment = self.load_fragment(version_fs.readtext(filename)) fragment_type = fragment.get('type') showcontent = self.config.fragment_types.get( fragment_type, {}).get('showcontent', True) section = fragment.get('section') or None rendered_content = render_fragment( fragment, showcontent, self.config.changelog_output_type) if rendered_content.strip(): filename_stem = splitext(basename(filename))[0] output_path = join(*filter(None, [ section, '{}.{}'.format(filename_stem, fragment_type) ])) log.info('Compiling {} -> {}'.format( version_fs.getsyspath(filename), write_fs.getsyspath(output_path))) parent_dir = dirname(output_path) if parent_dir: write_fs.makedirs(parent_dir, recreate=True) write_fs.writetext(output_path, rendered_content) outputs.append(output_path) except Exception: raise FragmentCompilationError(filename) return outputs
def lid_to_dir(lid: LID) -> str: """ Convert a LID to a directory path. """ dir_parts = _lid_to_parts(lid) dir_parts.insert(0, "/") return join(*dir_parts)
def _get_directory(self, path, content, format, *, type=None): self.log.debug('_get_directory(%s)', path) path = self.fs.validatepath(path) d = self.fs.getdetails(path) if not d.is_dir: raise HTTPError(404, '"%s" not a directory', path) model = _base_model(*fspath.split(path)) model['type'] = 'directory' model['size'] = None model['format'] = None model['created'], model['last_modified'] = _created_modified(d) if content: model['content'] = [] model['format'] = 'json' for item in self.fs.scandir(path, ['basic', 'details']): child_path = fspath.join(path, item.name) if item.is_dir: model['content'].append( self._get_directory(child_path, False, None)) if item.is_file: model['content'].append( self._get_file(child_path, False, format)) return model
def add_parent(self, path, parent_dir): _log.info(f"add_parent: {path} -> {parent_dir}") _CheckPath(path) _CheckPath(parent_dir) with self._lock: targetPath = join(parent_dir, basename(path)) idsFromPath = self._itemsFromPath(targetPath) # don't allow violation of our requirement to keep filename unique inside new directory if targetPath in idsFromPath: raise FileExists(targetPath) parentDirItem = idsFromPath.get(parent_dir) if parentDirItem is None: raise ResourceNotFound(parent_dir) if parentDirItem["mimeType"] != _folderMimeType: raise DirectoryExpected(parent_dir) sourceItem = self._itemFromPath(path) if sourceItem is None: raise ResourceNotFound(path) self.drive.files().update( fileId=sourceItem["id"], addParents=parentDirItem["id"], body={}).execute(num_retries=self.retryCount)
def format_directory(path, levels): # type: (Text, List[bool]) -> None """Recursive directory function.""" try: directory = sorted( fs.filterdir(path, exclude_dirs=exclude, files=filter), key=sort_key_dirs_first if dirs_first else sort_key, # type: ignore ) except Exception as error: prefix = ("".join(indent if last else line_indent for last in levels) + char_corner + char_line) write("{} {}".format(format_prefix(prefix), format_error("error ({})".format(error)))) return _last = len(directory) - 1 for i, info in enumerate(directory): is_last_entry = i == _last counts["dirs" if info.is_dir else "files"] += 1 prefix = "".join(indent if last else line_indent for last in levels) prefix += char_corner if is_last_entry else char_newnode if info.is_dir: write("{} {}".format(format_prefix(prefix + char_line), format_dirname(info.name))) if max_levels is None or len(levels) < max_levels: format_directory(join(path, info.name), levels + [is_last_entry]) else: write("{} {}".format(format_prefix(prefix + char_line), format_filename(info.name)))
def run(self): if self.check_acrons(): for acron in self.acrons: logging.info("Process acronym: %s" % acron) walker = Walker(filter=["*.xml"], exclude=["*.*.xml"]) acron_folder = path.join(self.xml_fs.root_path, acron) for xml in walker.files(fs.open_fs(acron_folder)): if len(path.iteratepath(xml)) == 2: logging.info("Process XML: %s" % xml) issue_folder, pack_name = self.collect_xml(acron, xml) self.collect_pdf(acron, issue_folder, pack_name) self.collect_img(acron, issue_folder, pack_name) else: return False
def build_static_directory(self, obj): """ Builds an object's static subdirectory. """ # The location of static files in the dynamic page directory source_dir = os.path.join(obj.page_directory_path, 'static') # The location in the build directory where we want to copy them target_dir = path.join( bigbuild.get_build_directory(), obj.get_static_url().lstrip("/") ) # An internal django-bakery trick to gzip them if we need to if settings.BAKERY_GZIP: cmd = Build() cmd.set_options() cmd.copytree_and_gzip( source_dir, target_dir ) else: # Or a more vanilla way of copying the files with Python logger.debug("Copying {}{} to {}{}".format("osfs://", source_dir, self.fs_name, target_dir)) copy.copy_dir("osfs:///", smart_text(source_dir), self.fs, smart_text(target_dir))
def get_build_path(self): build_path = '' if self.get_paginate_by(self.queryset) and self.kwargs['page']: build_path = self.get_page_build_path() else: build_path = path.join(self.build_folder, self.build_path) return build_path
def test_pathjoin(self): tests = [ ("", "a", "a"), ("a", "a", "a/a"), ("a/b", "../c", "a/c"), ("a/b/../c", "d", "a/c/d"), ("/a/b/c", "d", "/a/b/c/d"), ("/a/b/c", "../../../d", "/d"), ("a", "b", "c", "a/b/c"), ("a/b/c", "../d", "c", "a/b/d/c"), ("a/b/c", "../d", "/a", "/a"), ("aaa", "bbb/ccc", "aaa/bbb/ccc"), ("aaa", "bbb\\ccc", "aaa/bbb\\ccc"), ("aaa", "bbb", "ccc", "/aaa", "eee", "/aaa/eee"), ("a/b", "./d", "e", "a/b/d/e"), ("/", "/", "/"), ("/", "", "/"), ("a/\N{GREEK SMALL LETTER BETA}", "c", "a/\N{GREEK SMALL LETTER BETA}/c"), ] for testpaths in tests: paths = testpaths[:-1] result = testpaths[-1] self.assertEqual(join(*paths), result) self.assertRaises(ValueError, join, "..") self.assertRaises(ValueError, join, "../") self.assertRaises(ValueError, join, "/..") self.assertRaises(ValueError, join, "./../") self.assertRaises(ValueError, join, "a/b", "../../..") self.assertRaises(ValueError, join, "a/b/../../../d")
def save(self, incoming_file, filename, unique_name=True, with_checksum=True, chunksize=65536): """Store the incoming file.""" if unique_name: filename = self.unique_filename(filename) fs_file = self.storage.open(filename, 'wb') checksum = None m = hashlib.md5() f_bytes = incoming_file.read(chunksize) while f_bytes: fs_file.write(f_bytes) if with_checksum: m.update(f_bytes) f_bytes = incoming_file.read(chunksize) fs_file.close() checksum = m.hexdigest() # Create complete file path and return it return ( path.join(self.fs_path, filename), self.storage.getsize(filename), checksum, with_checksum, )
def open_fs(self, fs_url, parse_result, writeable, create, cwd): # pylint: disable=no-self-use path_parts = iteratepath(parse_result.resource) bucket_name = path_parts[0] root_path = join(*path_parts[1:]) if not bucket_name: raise OpenerError("invalid bucket name in '{}'".format(fs_url)) if parse_result.params.get("strict") == "False": strict = False else: strict = True client = Client() project = parse_result.params.get("project") if project: client.project = project api_endpoint = parse_result.params.get("api_endpoint") if api_endpoint: client.client_options = {"api_endpoint": api_endpoint} return GCSFS(bucket_name, root_path=root_path, create=create, client=client, strict=strict)
def get_project_doc_folder(projectfolder): # TODO replace with doc_folder from configs? projectfolder = os.path.realpath(projectfolder) projectfolder = os.path.relpath(projectfolder, PROJECTS_ROOT) pth = path.join(projectfolder, 'doc') try: if not refdoc_fs.exists(pth): refdoc_fs.makedir(pth, recursive=True) if not refdoc_fs.exists(path.join(pth, 'confdocs')): refdoc_fs.makedir(path.join(pth, 'confdocs'), recursive=True) except PermissionDeniedError: logger.warning( "Permission denied when creating folder for " + projectfolder ) return None return pth
def last(self, base): with read_only(OSFS(base)) as ff: proces = [] folds = [] for i in ff.scandir('/'): if i.is_dir: folds.append(i) for i in folds: path = join('/', i.name) try: for j in ff.scandir(path): if j.is_file and splitext( j.name)[1].lower() in video_formats: proces.append((j, i)) except (PermissionError, DirectoryExpected) as e: self.logger.emit("Acceso denegado a" + join(base, i.name), ERROR) folds = {} for filee, fold in proces: fold = fold.name filee = filee.name try: pp = parse2(filee) if pp.error: pp = parse(filee) except Exception as e: self.logger.emit( "Error procesando: " + join(base, fold, filee), WARNING) self.logger.emit(str(e), ERROR) continue t1 = transform(pp.title) if not pp.episode: continue t2 = pp.episode if t1 in folds: if folds[t1] < int(t2): folds[t1] = int(t2) else: tt = best_ed(t1, folds.keys()) if tt in folds: if folds[tt] < int(t2): folds[tt] = int(t2) folds[tt] = int(t2) self.caps_list = folds
def install_packages(self, output_fs, selected_packages, application=None): """Install packages""" download_fs = TempFS() install_packages = [] for index, (_, select_package) in enumerate(selected_packages): app_name = self.args.app or select_package["name"].split( ".", 1)[-1].replace(".", "") _install = self.download_package( download_fs, select_package, app=app_name if index == 0 else None, mount=self.args.mount if index == 0 else None, ) install_packages.append(_install) installed = [] if application: cfg = application.archive.cfg else: cfg = build.read_config(self.location, self.args.settings) changed_server = False for _package in install_packages: _changed_server, _installed_packages = self.install_package( download_fs, output_fs, _package, cfg=cfg) installed.extend(_installed_packages) changed_server = changed_server or _changed_server table = [] for _package, mount in installed: table.append([ Cell("{name}".format(**_package), fg="magenta", bold=True), Cell("{version}".format(**_package)), Cell(_package["location"], fg="blue", bold=True), Cell(mount or "", fg="cyan", bold=True), ]) if table: self.console.table(table, ["package", "version", "location", "mount"]) if application is not None: archive = application.archive logic_location = archive.cfg.get("project", "location") server_xml = archive.cfg.get("project", "startup") server_xml = archive.project_fs.getsyspath( join(logic_location, server_xml)) if changed_server: self.console.text( "moya-pm modified '{}' -- please check changes".format( server_xml), fg="green", bold="yes", )
def DeleteDirectory(self, FileName, DokanFileInfo): FileName = self._dokanpath2pyfs(FileName) for nm in self.fs.listdir(FileName): if not self._is_pending_delete(join(FileName, nm)): return STATUS_DIRECTORY_NOT_EMPTY self._pending_delete.add(FileName) # the actual delete takes place in self.CloseFile() return STATUS_SUCCESS
def parse(self, fs_url, default_fs_name=None, writeable=False, create_dir=False): """Parses a FS url and returns an fs object a path within that FS object (if indicated in the path). A tuple of (<FS instance>, <path>) is returned. :param fs_url: an FS url :param default_fs_name: the default FS to use if none is indicated (defaults is OSFS) :param writeable: if True, a writeable FS will be returned :oaram create_dir: if True, then the directory in the FS will be created """ orig_url = fs_url match = self.split_segments(fs_url) if match: fs_name, fs_url, _, path = match.groups() path = path or '' fs_url = fs_url or '' if ':' in fs_name: fs_name, sub_protocol = fs_name.split(':', 1) fs_url = '%s://%s' % (sub_protocol, fs_url) if '!' in path: paths = path.split('!') path = paths.pop() fs_url = '%s!%s' % (fs_url, '!'.join(paths)) fs_name = fs_name or self.default_opener else: fs_name = default_fs_name or self.default_opener fs_url = _expand_syspath(fs_url) path = '' fs_name, fs_name_params = _parse_name(fs_name) opener = self.get_opener(fs_name) if fs_url is None: raise OpenerError("Unable to parse '%s'" % orig_url) fs, fs_path = opener.get_fs(self, fs_name, fs_name_params, fs_url, writeable, create_dir) if fs_path and iswildcard(fs_path): pathname, resourcename = pathsplit(fs_path or '') if pathname: fs = fs.opendir(pathname) return fs, resourcename fs_path = join(fs_path, path) if create_dir and fs_path: fs.makedir(fs_path, allow_recreate=True) pathname, resourcename = pathsplit(fs_path or '') if pathname and resourcename: fs = fs.opendir(pathname) fs_path = resourcename return fs, fs_path or ''
def save(self, incoming_file, filename, chunk=None, chunks=None): """Save one chunk of an incoming file.""" try: # Generate chunked file name chunk = int(chunk) chunks = int(chunks) except (ValueError, TypeError): raise UploadError("Invalid chunk value: %s" % chunk) # Store chunk chunk_filename = self.chunk_filename(filename, chunks, chunk) res = super(ChunkedDepositionStorage, self).save( incoming_file, chunk_filename, unique_name=False, with_checksum=False, ) # Only merge files on last_trunk if chunk != chunks - 1: return res # Get the chunks file_chunks = self.storage.listdir( wildcard=self.chunk_filename( filename, chunks, '*' ) ) file_chunks.sort(key=lambda x: int(x.split("_")[-1])) # Write the chunks into one file filename = self.unique_filename(filename) fs_file = self.storage.open(filename, 'wb') m = hashlib.md5() for c in file_chunks: fs_c = self.storage.open(c, 'rb') f_bytes = fs_c.read(65536) while f_bytes: fs_file.write(f_bytes) m.update(f_bytes) f_bytes = fs_c.read(65536) fs_c.close() # Remove each chunk right after appending to main file, to # minimize storage usage. self.storage.remove(c) fs_file.close() checksum = m.hexdigest() return ( path.join(self.fs_path, filename), self.storage.getsize(filename), checksum, True )
def gen_pcb_pdf(projfolder, force=False): """ Generates a PDF file of the PCB layers for the PCB provided by the gEDA project. The pcb file is the one listed in the gEDA project file, and the pcbname is the one specified in the :mod:`tendril.gedaif.conffile.ConfigsFile`. This function does not use jinja2 and latex. It relies on :func:`tendril.gedaif.pcb.conv_pcb2pdf` instead. :param projfolder: The gEDA project folder. :type projfolder: str :param force: Regenerate even if up-to-date. :type force: bool :return: The output file path. .. rubric:: Paths * Output File : ``<project_doc_folder>/<pcbname>-pdf.pdf`` * Source Files : The project's `.pcb` file. """ configfile = conffile.ConfigsFile(projfolder) gpf = projfile.GedaProjectFile(configfile.projectfolder) pcb_mtime = fsutils.get_file_mtime( os.path.join(configfile.projectfolder, 'pcb', gpf.pcbfile + '.pcb') ) if pcb_mtime is None: logger.warning("PCB does not seem to exist for : " + projfolder) return docfolder = get_project_doc_folder(projfolder) pdffile = path.join(docfolder, configfile.pcbname + '-pcb.pdf') outf_mtime = fsutils.get_file_mtime(pdffile, fs=refdoc_fs) if not force and outf_mtime is not None and outf_mtime > pcb_mtime: logger.debug('Skipping up-to-date ' + pdffile) return pdffile logger.info('Regenerating ' + pdffile + os.linesep + 'Last modified : ' + str(pcb_mtime) + '; Last Created : ' + str(outf_mtime)) workspace_folder = workspace_fs.getsyspath(path.dirname(pdffile)) workspace_fs.makedir(path.dirname(pdffile), recursive=True, allow_recreate=True) pcb.conv_pcb2pdf( os.path.join(configfile.projectfolder, 'pcb', gpf.pcbfile + '.pcb'), workspace_folder, configfile.pcbname ) copyfile(workspace_fs, pdffile, refdoc_fs, pdffile, overwrite=True) return pdffile
def get_img_list(projfolder, cardname=None): """ Returns a list of :class:`docstore.ExposedDocument` instances, pointing to the generated renders for the gEDA project or card specified by the parameters. Currently, the ``cardname`` parameter is ignored, since no configuration specific images are generated. :param projfolder: The gEDA project folder. :param cardname: The cardname. :return: list of :class:`ExposedDocument` """ configfile = conffile.ConfigsFile(projfolder) gpf = projfile.GedaProjectFile(configfile.projectfolder) namebase = configfile.pcbname project_doc_folder = get_project_doc_folder(projfolder) if not project_doc_folder: return [] project_img_folder = os.path.join(project_doc_folder, os.pardir, 'img') rval = [ ExposedDocument( namebase + ' PCB Top View', path.join(project_img_folder, gpf.pcbfile + '.top.png'), refdoc_fs ), ExposedDocument( namebase + ' PCB Bottom View', path.join(project_img_folder, gpf.pcbfile + '.bottom.png'), refdoc_fs ), ExposedDocument( namebase + ' PCB Layers', path.join(project_img_folder, gpf.pcbfile + '.devel.png'), refdoc_fs ) ] for img in rval: if not img.exists: rval.remove(img) return rval
def prep_directory(self, target_dir): """ Prepares a new directory to store the file at the provided path, if needed. """ dirname = path.dirname(target_dir) if dirname: dirname = path.join(settings.BUILD_DIR, dirname) if not self.fs.exists(dirname): logger.debug("Creating directory at {}{}".format(self.fs_name, dirname)) self.fs.makedirs(dirname)
def gen_cobom_csv(projfolder, namebase, force=False): """ Generates a CSV file in the :mod:`tendril.boms.outputbase.CompositeOutputBom` format, including the BOMs of the all the defined configurations of the project. This function uses a :mod:`csv.writer` instead of rendering a jinja2 template. It also generates configdocs for all the defined configurations of the project, using :func:`gen_confpdf`. :param projfolder: The gEDA project folder. :type projfolder: str :param namebase: The project name. :type namebase: str :param force: Regenerate even if up-to-date. :type force: bool :return: The output file path. .. rubric:: Paths * Output Files : ``<project_doc_folder>/confdocs/conf_boms.csv`` * Also triggers : :func:`gen_confpdf` for all listed configurations. * Source Files : The project's schematic folder. """ gpf = projfile.GedaProjectFile(projfolder) configfile = conffile.ConfigsFile(projfolder) sch_mtime = fsutils.get_folder_mtime(gpf.schfolder) docfolder = get_project_doc_folder(projfolder) cobom_csv_path = path.join(docfolder, 'confdocs', 'conf-boms.csv') outf_mtime = fsutils.get_file_mtime(cobom_csv_path, fs=refdoc_fs) if not force and outf_mtime is not None and outf_mtime > sch_mtime: logger.debug('Skipping up-to-date ' + cobom_csv_path) return cobom_csv_path logger.info('Regenerating ' + cobom_csv_path + os.linesep + 'Last modified : ' + str(sch_mtime) + '; Last Created : ' + str(outf_mtime)) bomlist = [] for cfn in configfile.configuration_names: gen_confpdf(projfolder, cfn, namebase, force=force) lbom = boms_electronics.import_pcb(projfolder) lobom = lbom.create_output_bom(cfn) bomlist.append(lobom) cobom = boms_outputbase.CompositeOutputBom(bomlist) with refdoc_fs.open(cobom_csv_path, 'wb') as f: writer = csv.writer(f) writer.writerow(['device'] + [x.configname for x in cobom.descriptors]) for line in cobom.lines: writer.writerow([line.ident] + line.columns)
def build_static(self, *args, **options): """ Builds the static files directory as well as robots.txt and favicon.ico """ logger.debug("Building static directory") if self.verbosity > 1: self.stdout.write("Building static directory") management.call_command( "collectstatic", interactive=False, verbosity=0 ) # Set the target directory inside the filesystem. target_dir = path.join( self.build_dir, settings.STATIC_URL.lstrip('/') ) target_dir = smart_text(target_dir) if os.path.exists(self.static_root) and settings.STATIC_URL: if getattr(settings, 'BAKERY_GZIP', False): self.copytree_and_gzip(self.static_root, target_dir) # if gzip isn't enabled, just copy the tree straight over else: logger.debug("Copying {}{} to {}{}".format("osfs://", self.static_root, self.fs_name, target_dir)) copy.copy_dir("osfs:///", self.static_root, self.fs, target_dir) # If they exist in the static directory, copy the robots.txt # and favicon.ico files down to the root so they will work # on the live website. robots_src = path.join(target_dir, 'robots.txt') if self.fs.exists(robots_src): robots_target = path.join(self.build_dir, 'robots.txt') logger.debug("Copying {}{} to {}{}".format(self.fs_name, robots_src, self.fs_name, robots_target)) self.fs.copy(robots_src, robots_target) favicon_src = path.join(target_dir, 'favicon.ico') if self.fs.exists(favicon_src): favicon_target = path.join(self.build_dir, 'favicon.ico') logger.debug("Copying {}{} to {}{}".format(self.fs_name, favicon_src, self.fs_name, favicon_target)) self.fs.copy(favicon_src, favicon_target)
def build_media(self): """ Build the media files. """ logger.debug("Building media directory") if self.verbosity > 1: self.stdout.write("Building media directory") if os.path.exists(self.media_root) and settings.MEDIA_URL: target_dir = path.join(self.fs_name, self.build_dir, settings.MEDIA_URL.lstrip('/')) logger.debug("Copying {}{} to {}{}".format("osfs://", self.media_root, self.fs_name, target_dir)) copy.copy_dir("osfs:///", smart_text(self.media_root), self.fs, smart_text(target_dir))
def insert_document(sno, docpath, series): fname = os.path.split(docpath)[1] if not fname.startswith(sno) and not os.path.splitext(fname)[0].endswith(sno): fname = sno + "-" + fname if series is None: series = serialnos.get_series(sno) storepath = path.join(series, fname) if not docstore_fs.exists(path.dirname(storepath)): docstore_fs.makedir(path.dirname(storepath), recursive=True) copyfile(local_fs, docpath, docstore_fs, storepath) return storepath
def gen_schpdf(projfolder, namebase, force=False): """ Generates a PDF file of all the project schematics listed in the gEDA project file. This function does not ise jinja2 and latex. It relies on :func:`tendril.gedaif.gschem.conv_gsch2pdf` instead. :param projfolder: The gEDA project folder. :type projfolder: str :param namebase: The project name. :type namebase: str :param force: Regenerate even if up-to-date. :type force: bool :return: The output file path. .. rubric:: Paths * Output File : ``<project_doc_folder>/<namebase>-schematic.pdf`` * Source Files : The project's schematic folder. """ gpf = projfile.GedaProjectFile(projfolder) sch_mtime = fsutils.get_folder_mtime(gpf.schfolder) configfile = conffile.ConfigsFile(projfolder) docfolder = get_project_doc_folder(projfolder) schpdfpath = path.join(docfolder, namebase + '-schematic.pdf') outf_mtime = fsutils.get_file_mtime(schpdfpath, fs=refdoc_fs) if not force and outf_mtime is not None and outf_mtime > sch_mtime: logger.debug('Skipping up-to-date ' + schpdfpath) return schpdfpath logger.info('Regenerating ' + schpdfpath + os.linesep + 'Last modified : ' + str(sch_mtime) + '; Last Created : ' + str(outf_mtime)) if configfile.rawconfig is not None: workspace_outpath = workspace_fs.getsyspath(schpdfpath) workspace_folder = workspace_fs.getsyspath(path.dirname(schpdfpath)) workspace_fs.makedir(path.dirname(schpdfpath), recursive=True, allow_recreate=True) pdffiles = [] for schematic in gpf.schfiles: schfile = os.path.normpath(projfolder + '/schematic/' + schematic) pdffile = gschem.conv_gsch2pdf(schfile, workspace_folder) pdffiles.append(pdffile) pdf.merge_pdf(pdffiles, workspace_outpath) for pdffile in pdffiles: os.remove(pdffile) copyfile(workspace_fs, schpdfpath, refdoc_fs, schpdfpath, overwrite=True) return schpdfpath
def extract(self, archive, lib_name): args = self.args from ...docgen.extracter import Extracter if getattr(args, 'extract', None) is None: extract_fs = TempFS('moyadoc-{}'.format(lib_name)) else: extract_fs = self.get_fs(join(args.extract, lib_name)) extracter = Extracter(archive, extract_fs) extracter.extract_lib(lib_name) return extract_fs
def compile_fs_template(fs, template_text, data=None, path=None): """Compile a fs template structure in to a filesystem object""" if data is None: data = {} template = Template(template_text) template.re_special = re.compile(r'\{\{\%((?:\".*?\"|\'.*?\'|.|\s)*?)\%\}\}|(\{\{\#)|(\#\}\})') context = Context({"data": data}, re_sub=r'\$\{\{(.*?)\}\}') with context.frame("data"): fs_template = template.render(context) out_type = None out_filename = None file_lines = [] def write_file(filename, file_type): if filename: if file_type.lower() == "text": with fs.open(filename, 'wt') as f: f.write('\n'.join(file_lines) + '\n') elif file_type.lower() == "wraptext": import textwrap with fs.open(filename, 'wt') as f: for line in file_lines: f.write('\n'.join(textwrap.wrap(line, 79)) + '\n') elif file_type.lower() == "bin": with fs.open(filename, 'wb') as f: for line in file_lines: chunk = b''.join(chr(int(a + b, 16)) for a, b in zip(line[::2], line[1::2])) f.write(chunk) del file_lines[:] for line in fs_template.splitlines(): line = line.rstrip() if line.startswith('@'): #out_path = out_filename write_file(out_filename, out_type) out_filename = None out_type, path_spec = line[1:].split(' ', 1) if path: path_spec = join(path, relpath(path_spec)) if path_spec.endswith('/'): fs.makedir(path_spec, allow_recreate=True, recursive=True) out_filename = None else: fs.makedir(dirname(path_spec), allow_recreate=True, recursive=True) out_filename = path_spec continue if out_filename: file_lines.append(line) if out_filename: write_file(out_filename, out_type)
def do_build(dataplicity_path): """Build firmware in project directory""" with fsopendir(dataplicity_path) as src_fs: version = firmware.get_version(src_fs) print("Building version {:010}...".format(version)) filename = "firmware-{}.zip".format(version) firmware_path = join('__firmware__', filename) src_fs.makedir('__firmware__', allow_recreate=True) with src_fs.open(firmware_path, 'wb') as zip_file: dst_fs = ZipFS(zip_file, 'w') firmware.build(src_fs, dst_fs) dst_fs.close() size = src_fs.getsize(firmware_path) print("Wrote {} ({:,} bytes)".format(firmware_path, size))
def do_build(): print("Building docs v{}".format(version)) lib_info = {} lib_paths = {} for long_name, lib in self.document_libs: lib_info[long_name] = moya_build.get_lib_info(lib) lib_paths[long_name] = output_base_fs.getsyspath(join('libs', long_name, 'index.html')) for language in languages: docs_fs = base_docs_fs.makeopendir(language) output_fs = output_base_fs.makeopendir(language) utils.remove_all(output_fs, '/') with extract_fs.opendir("site") as extract_site_fs: builder = Builder(extract_site_fs, output_fs, theme_fs) from ..tools import timer with timer('render time'): builder.build({"libs": lib_info, "lib_paths": lib_paths})
def __init__(self, deposition_id): """Initialize storage.""" self.fs_path = path.join( cfg['DEPOSIT_STORAGEDIR'], str(deposition_id) )
def gen_confdoc(projfolder, configname, force=False): """ Generate a PDF documenting a single configuration of the project. The document should include a reasonably thorough representation of the contents of the configuration related sections of the `tendril.gedaif.conffile.ConfigsFile``. :param projfolder: The gEDA project folder :type projfolder: str :param configname: The configuration name for which the BOM should be generated. :type configname: str :param force: Regenerate even if up-to-date. :type force: bool :return: The output file path. .. rubric:: Paths * Output File : ``<project_doc_folder>/confdocs/<configname>-doc.pdf`` * Source Files : The project's schematic folder. .. rubric:: Template Used ``tendril/dox/templates/projects/geda-conf-doc.tex`` (:download:`Included version <../../tendril/dox/templates/projects/geda-conf-doc.tex>`) .. rubric:: Stage Keys Provided .. list-table:: * - ``configname`` - The name of the configuration (a card or cable name). * - ``desc`` - The description of the configuration. * - ``pcbname`` - The name of the base PCB. * - ``obom`` - An :mod:`tendril.boms.outputbase.OutputBom` instance """ gpf = projfile.GedaProjectFile(projfolder) sch_mtime = fsutils.get_folder_mtime(gpf.schfolder) docfolder = get_project_doc_folder(projfolder) outpath = path.join(docfolder, 'confdocs', configname + '-doc.pdf') outf_mtime = fsutils.get_file_mtime(outpath, fs=refdoc_fs) if not force and outf_mtime is not None and outf_mtime > sch_mtime: logger.debug('Skipping up-to-date ' + outpath) return outpath logger.info('Regenerating ' + outpath + os.linesep + 'Last modified : ' + str(sch_mtime) + '; Last Created : ' + str(outf_mtime)) bom = boms_electronics.import_pcb(projfolder) obom = bom.create_output_bom(configname) group_oboms = bom.get_group_boms(configname) stage = {'configname': obom.descriptor.configname, 'pcbname': obom.descriptor.pcbname, 'bom': bom, 'obom': obom, 'group_oboms': group_oboms} config = obom.descriptor.configurations.configuration(configname) stage['desc'] = config['desc'] template = 'projects/geda-conf-doc.tex' workspace_outpath = workspace_fs.getsyspath(outpath) workspace_fs.makedir(path.dirname(outpath), recursive=True, allow_recreate=True) render.render_pdf(stage, template, workspace_outpath) copyfile(workspace_fs, outpath, refdoc_fs, outpath, overwrite=True) return outpath
def __init__(self, deposition_id): self.fs_path = path.join( cfg['DEPOSIT_STORAGEDIR'], str(deposition_id) )
def get_fs(self, path): if path is None: path = join(get_moya_dir(), './documentation') fs = fsopendir(path, create_dir=True) fs.dir_mode = int('777', 8) return fs
def gen_confpdf(projfolder, configname, namebase, force=False): """ Generates a PDF file of the documentation for a specific configuration of a project. It uses other document generator functions to make the various parts of the master document and then merges them. :param projfolder: The gEDA project folder. :type projfolder: str :param configname: The name of the configuration. :type configname: str :param namebase: The project name. :type namebase: str :param force: Regenerate even if up-to-date. :type force: bool :return: The output file path. .. rubric:: Paths * Output File : ``<project_doc_folder>/confdocs/<configname>.pdf`` * Source Files : The project's schematic folder. .. rubric:: Included Documents * Configuration BOM, generated by :func:`gen_confbom` * (Full) Schematic PDF, generated by :func:`gen_schpdf` .. todo:: It may be useful to rebuild the schematics after removing all the unpopulated components. This is a fairly involved process, and is deferred until later. """ gpf = projfile.GedaProjectFile(projfolder) sch_mtime = fsutils.get_folder_mtime(gpf.schfolder) docfolder = get_project_doc_folder(projfolder) confdocfile = path.join(docfolder, 'confdocs', configname + '.pdf') outf_mtime = fsutils.get_file_mtime(confdocfile, fs=refdoc_fs) if not force and outf_mtime is not None and outf_mtime > sch_mtime: logger.debug('Skipping up-to-date ' + confdocfile) return confdocfile logger.info('Regenerating ' + confdocfile + os.linesep + 'Last modified : ' + str(sch_mtime) + '; Last Created : ' + str(outf_mtime)) pdffiles = [gen_confbom(projfolder, configname, force), gen_confdoc(projfolder, configname, force), gen_schpdf(projfolder, namebase, force)] for p in pdffiles: if p and not workspace_fs.exists(p): workspace_fs.makedir(path.dirname(p), recursive=True, allow_recreate=True) copyfile(refdoc_fs, p, workspace_fs, p) workspace_pdffiles = [workspace_fs.getsyspath(x) for x in pdffiles if x is not None] workspace_outpath = workspace_fs.getsyspath(confdocfile) workspace_fs.makedir(path.dirname(confdocfile), recursive=True, allow_recreate=True) pdf.merge_pdf(workspace_pdffiles, workspace_outpath) copyfile(workspace_fs, confdocfile, refdoc_fs, confdocfile, overwrite=True) return confdocfile
def run(self): parser = self.get_argparse() args = parser.parse_args(sys.argv[1:]) if args.version is None: major, minor = __version__.split('.')[:2] version = "{}.{}".format(major, minor) else: version = args.version try: with open(expanduser(args.settings), 'rt') as f_ini: cfg = SettingsContainer.read_from_file(f_ini) print("Read settings from {}".format(args.settings)) except IOError: cfg = SettingsContainer() from ..docgen.extracter import Extracter from ..docgen.builder import Builder from ..command import doc_project location = dirname(doc_project.__file__) extract_fs = OSFS(join('doccode', version), create=True) base_docs_fs = OSFS('text') languages = [d for d in base_docs_fs.listdir(dirs_only=True) if len(d) == 2] def do_extract(): print("Extracting docs v{}".format(version)) utils.remove_all(extract_fs, '/') try: archive, context, doc = moya_build.build_server(location, 'settings.ini') except Exception: raise return -1 extract_fs.makedir("site/docs", recursive=True) extract_fs.makedir("site/tags", recursive=True) #extract_fs.makedir("libs") with extract_fs.opendir('site/tags') as tags_fs: extracter = Extracter(archive, tags_fs) const_data = {} builtin_tags = [] for namespace in self.builtin_namespaces: xmlns = getattr(namespaces, namespace, None) if xmlns is None: raise ValueError("XML namespace '{}' is not in namespaces.py".format(namespace)) namespace_tags = archive.registry.get_elements_in_xmlns(xmlns).values() builtin_tags.extend(namespace_tags) extracter.extract_tags(builtin_tags, const_data=const_data) for language in languages: with extract_fs.makeopendir("site/docs") as language_fs: doc_extracter = Extracter(None, language_fs) docs_fs = base_docs_fs.opendir(language) doc_extracter.extract_site_docs(docs_fs, dirname=language) if args.extract: do_extract() if args.build: theme_path = cfg.get('paths', 'theme', None) dst_path = join('html', version) if theme_path is None: theme_fs = OSFS('theme') else: theme_fs = fsopendir(theme_path) output_path = cfg.get('paths', 'output', None) if output_path is None: output_base_fs = OSFS(dst_path, create=True) else: output_root_base_fs = fsopendir(output_path) output_base_fs = output_root_base_fs.makeopendir(dst_path, recursive=True) #output_base_fs = OSFS(join('html', version), create=True) utils.remove_all(output_base_fs, '/') def do_build(): print("Building docs v{}".format(version)) lib_info = {} lib_paths = {} for long_name, lib in self.document_libs: lib_info[long_name] = moya_build.get_lib_info(lib) lib_paths[long_name] = output_base_fs.getsyspath(join('libs', long_name, 'index.html')) for language in languages: docs_fs = base_docs_fs.makeopendir(language) output_fs = output_base_fs.makeopendir(language) utils.remove_all(output_fs, '/') with extract_fs.opendir("site") as extract_site_fs: builder = Builder(extract_site_fs, output_fs, theme_fs) from ..tools import timer with timer('render time'): builder.build({"libs": lib_info, "lib_paths": lib_paths}) # output_base_fs.makedir("libs", allow_recreate=True) # for long_name, lib in self.document_libs: # source_path = extract_fs.getsyspath(join("libs", long_name)) # output_path = output_base_fs.getsyspath('libs') # cmd_template = 'moya --debug doc build {} --theme libtheme --source "{}" --output "{}"' # cmd = cmd_template.format(lib, source_path, output_path) # os.system(cmd) def extract_build(): do_extract() do_build() do_build() if not args.nobrowser: import webbrowser webbrowser.open(output_base_fs.getsyspath('en/index.html')) if args.watch: print("Watching for changes...") watcher = ReloadChangeWatcher(base_docs_fs, extract_build) while 1: try: time.sleep(0.1) except: break return 0
def get_docs_list(projfolder, cardname=None): """ Returns a list of :class:`docstore.ExposedDocument` instances, pointing to the documentation linked to the gEDA project or card specified by the parameters. If the ``cardname`` is not specified, the documents linked to the base PCB only are returned. If the ``cardname`` is specified, the documents defining the specific configuration only are returned. :param projfolder: The gEDA project folder. :param cardname: The cardname. :return: list of :class:`ExposedDocument` """ configfile = conffile.ConfigsFile(projfolder) namebase = configfile.pcbname is_cable = False if namebase is None: try: namebase = configfile.rawconfig['cblname'] is_cable = True except KeyError: logger.error("Project does not have a known identifier. " "Skipping : " + projfolder) return project_doc_folder = get_project_doc_folder(projfolder) if not project_doc_folder: return [] if not cardname: # Get all docs linked to the project rval = [ExposedDocument('Project Master Doc', path.join(project_doc_folder, namebase + '-masterdoc.pdf'), refdoc_fs), ExposedDocument(namebase + ' Schematic (Full)', path.join(project_doc_folder, namebase + '-schematic.pdf'), refdoc_fs), ExposedDocument('Composite Bom (All Configs)', path.join(project_doc_folder, 'confdocs', 'conf-boms.csv'), refdoc_fs), ] if is_cable: return rval gpf = projfile.GedaProjectFile(configfile.projectfolder) rval.extend([ExposedDocument(namebase + ' PCB Layers', path.join(project_doc_folder, namebase + '-pcb.pdf'), refdoc_fs), ExposedDocument(namebase + ' PCB Pricing', path.join(project_doc_folder, namebase + '-pricing.pdf'), refdoc_fs), ExposedDocument(namebase + ' PCB DXF', path.join( project_doc_folder, os.pardir, configfile.pcbname + '.dxf'), refdoc_fs), ExposedDocument(namebase + ' PCB Gerber', path.join(project_doc_folder, os.pardir, gpf.pcbfile + '-gerber.zip'), refdoc_fs), ]) return rval else: cardname = cardname.strip() rval = [ExposedDocument(cardname + ' Doc', path.join(project_doc_folder, 'confdocs', cardname + '.pdf'), refdoc_fs), ExposedDocument(cardname + ' Reference BOM', path.join(project_doc_folder, 'confdocs', cardname + '-bom.pdf'), refdoc_fs), ExposedDocument(cardname + ' Schematic (Full)', path.join(project_doc_folder, namebase + '-schematic.pdf'), refdoc_fs), ExposedDocument('Composite Bom (All Configs)', path.join(project_doc_folder, 'confdocs', 'conf-boms.csv'), refdoc_fs), ExposedDocument('Project Master Doc', path.join(project_doc_folder, namebase + '-masterdoc.pdf'), refdoc_fs), ] return rval
def build_queryset(self): logger.debug("Building %s" % self.build_path) self.request = self.create_request(self.build_path) self.prep_directory(self.build_path) target_path = path.join(settings.BUILD_DIR, self.build_path) self.build_file(target_path, self.get_content())
def gen_pcbpricing(projfolder, namebase, force=False): """ Generates a PDF file with the pricing of the (bare) PCB provided by the gEDA project. The pcb file is the one listed in the gEDA project file, and the pcbname is the one specified in the :mod:`tendril.gedaif.conffile.ConfigsFile`. The pricing information is read out from the PCB's ``sourcing.yaml`` file, which in turn is intended to be created by sourcing modules. .. todo:: This function presently uses :func:`tendril.dox.render.render_lineplot`, which is marked for deprecation. It should be rewritten to use the :func:`tendril.dox.render.make_graph` route instead. :param projfolder: The gEDA project folder. :type projfolder: str :param namebase: The project name. :type namebase: str :param force: Regenerate even if up-to-date. :type force: bool :return: The output file path. .. rubric:: Paths * Output File : ``<project_doc_folder>/<namebase>-pricing.pdf`` * Source Files : ``<projectfolder>/pcb/sourcing.yaml`` """ gpf = projfile.GedaProjectFile(projfolder) pcbpricingfp = os.path.join( gpf.configsfile.projectfolder, 'pcb', 'sourcing.yaml' ) pcbpricing_mtime = fsutils.get_file_mtime(pcbpricingfp) if not os.path.exists(pcbpricingfp): return None docfolder = get_project_doc_folder(projfolder) plotfile = path.join(docfolder, namebase + '-pricing.pdf') outf_mtime = fsutils.get_file_mtime(plotfile, fs=refdoc_fs) if not force and outf_mtime is not None and outf_mtime > pcbpricing_mtime: logger.debug('Skipping up-to-date ' + pcbpricingfp) return pcbpricingfp logger.info('Regnerating ' + plotfile + os.linesep + 'Last modified : ' + str(pcbpricing_mtime) + '; Last Created : ' + str(outf_mtime)) with open(pcbpricingfp, 'r') as f: data = yaml.load(f) workspace_outpath = workspace_fs.getsyspath(plotfile) workspace_folder = workspace_fs.getsyspath(path.dirname(plotfile)) workspace_fs.makedir(path.dirname(plotfile), recursive=True, allow_recreate=True) plot1file = os.path.join(workspace_folder, namebase + '-1pricing.pdf') plot2file = os.path.join(workspace_folder, namebase + '-2pricing.pdf') pltnote = "This pricing refers to the bare PCB only. " \ "See the corresponding Config Docs for Card Pricing" plt1data = {key: data['pricing'][key] for key in data['pricing'].keys() if key <= 10} plt1title = gpf.configsfile.configdata['pcbname'] plt1title += " PCB Unit Price vs Order Quantity (Low Quantity)" plot1file = render.render_lineplot( plot1file, plt1data, plt1title, pltnote ) if max(data['pricing'].keys()) > 10: plt2data = {key: data['pricing'][key] for key in data['pricing'].keys() if key > 10} plt2title = gpf.configsfile.configdata['pcbname'] plt2title += " PCB Unit Price vs Order Quantity (Production Quantity)" plot2file = render.render_lineplot( plot2file, plt2data, plt2title, pltnote ) pdf.merge_pdf([plot1file, plot2file], workspace_outpath) os.remove(plot2file) else: shutil.copyfile(plot1file, workspace_outpath) os.remove(plot1file) copyfile(workspace_fs, plotfile, refdoc_fs, plotfile, overwrite=True) return plotfile
def testOpen(self): filename = path.join(self.temp_dir, 'foo.txt') file_object = opener.open(filename, 'wb') file_object.close() self.assertTrue(file_object.closed)
def run(self): log.setLevel(logging.DEBUG) args = self.args username = args.username password = args.password if username is None: username = raw_input('username: '******'password: '******'dataplicity path is {}'.format(dataplicity_path)) if args.build: do_build(dataplicity_path) with fsopendir(dataplicity_path) as src_fs: version = firmware.get_version(src_fs) filename = "firmware-{}.zip".format(version) firmware_path = join('__firmware__', filename) try: firmware_contents = src_fs.getcontents(firmware_path, 'rb') except ResourceNotFoundError: print("{} is missing, you can build firmware with 'dataplicity build'".format(firmware_path)) return -1 firmware_b64 = b64encode(firmware_contents) client = self.app.make_client(log, create_m2m=False) conf = client.conf remote = client.remote device_class_name = conf.get('device', 'class') #serial = conf.get('device', 'serial') ui = firmware.get_ui(fsopendir(dataplicity_path)) remote = self.app.make_client(log, create_m2m=False, conf="/etc/dataplicity/dataplicity.conf").remote print("uploading firmware...") with remote.batch() as batch: # batch.call_with_id('auth_result', # 'device.check_auth', # device_class=device_class_name, # serial=client.serial, # auth_token=client.auth_token) batch.call_with_id("publish_result", "device.publish", device_class=device_class_name, version=version, firmware_b64=firmware_b64, ui=ui, username=username, password=password, company=company, replace=args.replace) #batch.get_result('auth_result') try: publish_result = batch.get_result('publish_result') except JSONRPCError as e: if e.code == ErrorCodes.FIRMWARE_EXISTS: print("Firmware {:010} exists!\nBump the version number in firmware.conf or use --replace to overwrite".format(version)) return -1 raise print("visit {} to manage firmware".format(publish_result['url'])) if args.bump: with fsopendir(dataplicity_path) as src_fs: firmware.bump(src_fs)
def gen_masterdoc(projfolder, namebase, force=False): """ Generates a PDF file of the project's Master documentation. It uses other document generator functions to make the various parts of the master document and then merges them. .. note:: Due to the way groups and motifs are handled, an unconfigured BOM is somewhat meaningless. Therefore, no BOM is included in the masterdoc. :param projfolder: The gEDA project folder. :type projfolder: str :param namebase: The project name. :type namebase: str :param force: Regenerate even if up-to-date. :type force: bool :return: The output file path. .. rubric:: Paths * Output File : ``<project_doc_folder>/<namebase>-masterdoc.pdf`` * Source Files : The project's schematic folder. .. rubric:: Included Documents * Config Documentation, generated by :func:`gen_configsdoc` * Schematic PDF, generated by :func:`gen_schpdf` """ gpf = projfile.GedaProjectFile(projfolder) sch_mtime = fsutils.get_folder_mtime(gpf.schfolder) docfolder = get_project_doc_folder(projfolder) masterdocfile = path.join(docfolder, namebase + '-masterdoc.pdf') outf_mtime = fsutils.get_file_mtime(masterdocfile, fs=refdoc_fs) if not force and outf_mtime is not None and outf_mtime > sch_mtime: logger.debug('Skipping up-to-date ' + masterdocfile) return masterdocfile logger.info('Regnerating ' + masterdocfile + os.linesep + 'Last modified : ' + str(sch_mtime) + '; Last Created : ' + str(outf_mtime)) pdffiles = [gen_configsdoc(projfolder, namebase, force=False), gen_schpdf(projfolder, namebase, force=False)] for p in pdffiles: if p and not workspace_fs.exists(p): workspace_fs.makedir(path.dirname(p), recursive=True, allow_recreate=True) copyfile(refdoc_fs, p, workspace_fs, p) workspace_pdffiles = [workspace_fs.getsyspath(x) for x in pdffiles if x is not None] workspace_outpath = workspace_fs.getsyspath(masterdocfile) workspace_fs.makedir(path.dirname(masterdocfile), recursive=True, allow_recreate=True) pdf.merge_pdf(workspace_pdffiles, workspace_outpath) copyfile(workspace_fs, masterdocfile, refdoc_fs, masterdocfile, overwrite=True) return masterdocfile