def run_update(args): m = MetapackCliMemo(args, downloader) m.doc # Trigger an error if the doc can't be found if m.args.schemas: update_schemas(m) if m.args.schema_properties: update_schema_props(m) if m.args.clean_properties: clean_properties(m) if m.args.alt_name: move_alt_names(m) if m.args.categories: update_categories(m) if m.args.descriptions: update_descriptions(m) if m.args.giturl: from metapack.cli.core import add_giturl add_giturl(m.doc, force=True) write_doc(m.doc) if m.args.promote: update_promote(m) if m.args.custom_update: update_custom(m) if m.args.files: add_files(m) if m.mtfile_url.scheme == 'file' and m.args.name: mod_version = m.args.version if m.args.version \ else '+' if m.args.increment \ else False update_name(m.mt_file, fail_on_missing=True, force=m.args.force, mod_version=mod_version) if m.args.coverage: update_coverage(m) if m.args.touch: touch_metadata(m) if m.args.semantic: to_semantic_version(m)
def new_cmd(args): downloader = Downloader.get_instance() m = MetapackCliMemo(args, downloader) if m.args.eda: write_eda_notebook(m) elif m.args.new_notebook: write_notebook(m) elif m.args.metatab: write_metatab_notebook(m)
def run_url_add(args): """Add a resources entry, downloading the intuiting the file, replacing entries with the same reference""" m = MetapackCliMemo(args, downloader) update_name(m.mt_file, fail_on_missing=False, report_unchanged=False) if isinstance(m.mt_file, MetapackDoc): doc = m.mt_file else: doc = MetapackDoc(m.mt_file) if 'Resources' not in doc: doc.new_section('Resources') doc['Resources'].args = [ e for e in set(doc['Resources'].args + ['Name', 'StartLine', 'HeaderLines', 'Encoding']) if e ] seen_names = set() u = parse_app_url(args.url) # The web and file URLs don't list the same. if u.proto == 'file': entries = u.list() else: entries = [ssu for su in u.list() for ssu in su.list()] errors = [] for e in entries: if not add_single_resource( doc, e, cache=m.cache, seen_names=seen_names): errors.append(e) if errors: prt() warn("Found, but failed to add these urls:") for e in errors: print(' ', e) write_doc(doc)
def run_url_scrape(args): m = MetapackCliMemo(args, downloader) from metapack.util import scrape_urls_from_web_page doc = m.doc url = m.args.url doc['resources'].new_term('DownloadPage', url) d = scrape_urls_from_web_page(url) if d.get('error'): err(d.get('error')) new_resources = 0 new_documentation = 0 if not args.no_resources: for k, v in d['sources'].items(): u = parse_app_url(v['url']) t = doc['Resources'].new_term('DataFile', v['url'], name=u.fspath.stem, description=v.get('description')) new_resources += 1 if args.verbose: prt(t, t.props) if not args.no_docs: for k, v in d['external_documentation'].items(): term_name = classify_url(v['url']) u = parse_app_url(v['url']) t = doc['Documentation'].new_term(term_name, v['url'], name=u.fspath.stem, description=v.get('description')) new_documentation += 1 if args.verbose: prt(t, t.props) prt("Added {} resource and {} documentation terms".format( new_resources, new_documentation)) if not args.dry_run: write_doc(doc)
def touch_cmd(args): # Always turn the cache off; won't update otherwise downloader.use_cache = False args.no_cache = True m = MetapackCliMemo(args, downloader) if m.args.build: build(m) if args.compare: compare_hashes(m) # This will exit, so nothing else after will run if args.write_hashes: write_hashes(m) if args.read_hashes: read_hashes(m)
def _build_cmd(args): from rowgenerators.rowpipe.exceptions import TooManyCastingErrors downloader.set_callback((build_downloader_callback)) m = MetapackCliMemo(args, downloader) if m.args.profile: from metatab.s3 import set_s3_profile set_s3_profile(m.args.profile) if m.args.clean_cache: clean_cache('metapack') try: changes = metatab_derived_handler(m) prt(f"{changes} changes") except TooManyCastingErrors as e: prt('Casting Errors:') for error in e.errors: prt(error) if m.args.exceptions: raise e else: err(e) except Exception as e: raise if m.args.exceptions: raise e else: err(e) clean_cache(m.cache) return changes
def make_cmd(args): # Always turn the cache off; builds won't update otherwise downloader = Downloader.get_instance() m = MetapackCliMemo(args, downloader) sb, reason = should_build(m) print(f"🛠{m.doc.name}") if not sb: print(f"☑� Not building; {reason}") return else: print(f"⚙� Building: {reason}") if args.dry_run: return if not any([args.build, args.s3, args.wordpress_site]): args.build = True if args.build: downloader = Downloader.get_instance() downloader.use_cache = False ns = mk_ns(args, 'build', '-X', '-F', '-f', '-z' if args.zip else '') do_uploads = build_is_different = _build_cmd(ns) downloader.reset_callback() else: build_is_different = False do_uploads = True if do_uploads and args.s3: ns = mk_ns(args, 's3') ns.s3 = args.s3 if args.profile: ns.profile = args.profile ns.metatabfile = args.metatabfile run_s3(ns) if do_uploads and args.wordpress_site: try: from metapack_wp.wp import run_wp ns = mk_ns(args, 'wp') ns.site_name = args.wordpress_site ns.source = args.metatabfile ns.group = args.group ns.tag = args.tag run_wp(ns) except ModuleNotFoundError: warn("Can't publish to Wordpress: {str(e)}") m = MetapackCliMemo(args, downloader) # If the build changed, update the version for the next build if build_is_different: # Increment the version number m.doc.update_name(mod_version='+') print(f"➕ incremented version to {m.doc.name}") write_hashes(m) m.doc.write() # Updates modified time