def __init__(self, root, module_list, version=get_versions()["version"], base_url = None): self.root = root self.module_list = module_list self.version = version self.pkg_cfg = packaging.build_pkg_cfg(root) self.packages_json = packaging.build_pkg_index(self.pkg_cfg) self.base_page = self._create_base_page(root, base_url)
def generate_static_docs(env_root, override_version=get_versions()["version"]): clean_generated_docs(get_sdk_docs_path(env_root)) generate_docs(env_root, override_version, stdout=StringIO.StringIO()) tgz = tarfile.open(TGZ_FILENAME, 'w:gz') tgz.add(get_sdk_docs_path(env_root), "doc") tgz.close() return TGZ_FILENAME
def __init__(self, root, module_list, version=get_versions()["version"], base_url=None): self.root = root self.module_list = module_list self.version = version self.pkg_cfg = packaging.build_pkg_cfg(root) self.packages_json = packaging.build_pkg_index(self.pkg_cfg) self.base_page = self._create_base_page(root, base_url)
def generate_named_file(env_root, filename): web_docs = webdocs.WebDocs(env_root, get_versions()["version"], get_base_url(env_root)) # next, generate api doc or guide doc abs_path = os.path.abspath(filename) if abs_path.startswith(os.path.join(env_root, 'packages')): doc_html, dest_dir, filename = generate_api_doc(env_root, abs_path, web_docs) write_file(env_root, doc_html, dest_dir, filename, False) elif abs_path.startswith(os.path.join(get_sdk_docs_path(env_root), 'dev-guide-source')): doc_html, dest_dir, filename = generate_guide_doc(env_root, abs_path, web_docs) write_file(env_root, doc_html, dest_dir, filename, False) else: raise ValueError("Not a valid path to a documentation file")
def _create_base_page(self, root, base_url): base_page = unicode(open(root + INDEX_PAGE, "r").read(), "utf8") if base_url: base_tag = 'href="' + base_url + '"' base_page = insert_after(base_page, BASE_URL_INSERTION_POINT, base_tag) sdk_version = get_versions()["version"] base_page = insert_after(base_page, VERSION_INSERTION_POINT, "Version " + sdk_version) third_party_summaries = self._create_package_summaries(self.packages_json, is_third_party) base_page = insert_after(base_page, THIRD_PARTY_PACKAGE_SUMMARIES, third_party_summaries) high_level_summaries = self._create_package_summaries(self.packages_json, is_high_level) base_page = insert_after(base_page, HIGH_LEVEL_PACKAGE_SUMMARIES, high_level_summaries) low_level_summaries = self._create_package_summaries(self.packages_json, is_low_level) base_page = insert_after(base_page, LOW_LEVEL_PACKAGE_SUMMARIES, low_level_summaries) return base_page
def generate_named_file(env_root, filename_and_path): web_docs = webdocs.WebDocs(env_root, get_versions()["version"], get_base_url(env_root)) abs_path = os.path.abspath(filename_and_path) path, filename = os.path.split(abs_path) if abs_path.startswith(os.path.join(env_root, 'doc', 'module-source')): module_root = os.sep.join([env_root, "doc", "module-source"]) module_info = ModuleInfo(module_root, path, filename) write_module_doc(env_root, web_docs, module_info, False) elif abs_path.startswith(os.path.join(get_sdk_docs_path(env_root), 'dev-guide-source')): devguide_root = os.sep.join([env_root, "doc", "dev-guide-source"]) devguideitem_info = DevGuideItemInfo(devguide_root, path, filename) write_devguide_doc(env_root, web_docs, devguideitem_info, False) else: raise ValueError("Not a valid path to a documentation file")
def generate_named_file(env_root, filename_and_path): module_list = get_module_list(env_root) web_docs = webdocs.WebDocs(env_root, module_list, get_versions()["version"], get_base_url(env_root)) abs_path = os.path.abspath(filename_and_path) path, filename = os.path.split(abs_path) if abs_path.startswith(os.path.join(env_root, 'doc', 'module-source')): module_root = os.sep.join([env_root, "doc", "module-source"]) module_info = ModuleInfo(env_root, module_root, path, filename) write_module_doc(env_root, web_docs, module_info, False) elif abs_path.startswith(os.path.join(get_sdk_docs_path(env_root), 'dev-guide-source')): devguide_root = os.sep.join([env_root, "doc", "dev-guide-source"]) devguideitem_info = DevGuideItemInfo(env_root, devguide_root, path, filename) write_devguide_doc(env_root, web_docs, devguideitem_info, False) else: raise ValueError("Not a valid path to a documentation file")
def _create_base_page(self, root, base_url): base_page = unicode(open(root + INDEX_PAGE, 'r').read(), 'utf8') base_tag = 'href="' + base_url + '"' base_page = insert_after(base_page, BASE_URL_INSERTION_POINT, base_tag) sdk_version = get_versions()["version"] base_page = insert_after(base_page, VERSION_INSERTION_POINT, "Version " + sdk_version) high_level_summaries = \ self._create_package_summaries(self.packages_json, is_high_level) base_page = insert_after(base_page, \ HIGH_LEVEL_PACKAGE_SUMMARIES, high_level_summaries) low_level_summaries = \ self._create_package_summaries(self.packages_json, is_low_level) base_page = insert_after(base_page, \ LOW_LEVEL_PACKAGE_SUMMARIES, low_level_summaries) return base_page
def _create_base_page(self, root, base_url): base_page = unicode(open(root + INDEX_PAGE, 'r').read(), 'utf8') if base_url: base_tag = 'href="' + base_url + '"' base_page = insert_after(base_page, BASE_URL_INSERTION_POINT, base_tag) sdk_version = get_versions()["version"] base_page = insert_after(base_page, VERSION_INSERTION_POINT, "Version " + sdk_version) module_list = get_module_list(os.sep.join([root, "doc", "module-source"])) high_level_module_list = [module_info for module_info in module_list if module_info.level() == "high"] high_level_module_text = self._make_module_text(high_level_module_list) base_page = insert_after(base_page, \ HIGH_LEVEL_MODULE_SUMMARIES, high_level_module_text) low_level_module_list = [module_info for module_info in module_list if module_info.level() == "low"] low_level_module_text = self._make_module_text(low_level_module_list) base_page = insert_after(base_page, \ LOW_LEVEL_MODULE_SUMMARIES, low_level_module_text) return base_page
def generate_named_file(env_root, filename): web_docs = webdocs.WebDocs(env_root, get_versions()["version"], get_base_url(env_root)) # next, generate api doc or guide doc abs_path = os.path.abspath(filename) if abs_path.startswith(os.path.join(env_root, 'packages')): doc_html, dest_dir, filename = generate_api_doc( env_root, abs_path, web_docs) write_file(env_root, doc_html, dest_dir, filename, False) elif abs_path.startswith( os.path.join(get_sdk_docs_path(env_root), 'dev-guide-source')): doc_html, dest_dir, filename = generate_guide_doc( env_root, abs_path, web_docs) write_file(env_root, doc_html, dest_dir, filename, False) else: raise ValueError("Not a valid path to a documentation file")
def _create_base_page(self, root, base_url): base_page = unicode(open(root + INDEX_PAGE, 'r').read(), 'utf8') if base_url: base_tag = 'href="' + base_url + '"' base_page = insert_after(base_page, BASE_URL_INSERTION_POINT, base_tag) sdk_version = get_versions()["version"] base_page = insert_after(base_page, VERSION_INSERTION_POINT, "Version " + sdk_version) high_level_summaries = \ self._create_package_summaries(self.packages_json, is_high_level) base_page = insert_after(base_page, \ HIGH_LEVEL_PACKAGE_SUMMARIES, high_level_summaries) low_level_summaries = \ self._create_package_summaries(self.packages_json, is_low_level) base_page = insert_after(base_page, \ LOW_LEVEL_PACKAGE_SUMMARIES, low_level_summaries) return base_page
def generate_docs(env_root, version=get_versions()["version"], base_url=None, stdout=sys.stdout): docs_dir = get_sdk_docs_path(env_root) # if the generated docs don't exist, generate everything if not os.path.exists(os.path.join(docs_dir, "dev-guide")): print >>stdout, "Generating documentation..." generate_docs_from_scratch(env_root, version, base_url) current_status = calculate_current_status(env_root) open(os.path.join(docs_dir, DIGEST), "w").write(current_status) else: current_status = calculate_current_status(env_root) previous_status_file = os.path.join(docs_dir, DIGEST) docs_are_up_to_date = False if os.path.exists(previous_status_file): docs_are_up_to_date = current_status == open(previous_status_file, "r").read() # if the docs are not up to date, generate everything if not docs_are_up_to_date: print >>stdout, "Regenerating documentation..." generate_docs_from_scratch(env_root, version, base_url) open(os.path.join(docs_dir, DIGEST), "w").write(current_status) return get_base_url(env_root) + "index.html"
def run(arguments=sys.argv[1:], target_cfg=None, pkg_cfg=None, defaults=None, env_root=os.environ.get('CUDDLEFISH_ROOT'), stdout=sys.stdout): versions = get_versions() sdk_version = versions["version"] display_version = "Add-on SDK %s (%s)" % (sdk_version, versions["full"]) parser_kwargs = dict(arguments=arguments, global_options=global_options, parser_groups=parser_groups, usage=usage, version=display_version, defaults=defaults) (options, args) = parse_args(**parser_kwargs) config_args = get_config_args(options.config, env_root) # reparse configs with arguments from local.json if config_args: parser_kwargs['arguments'] += config_args (options, args) = parse_args(**parser_kwargs) command = args[0] if command == "init": initializer(env_root, args) return if command == "testpkgs": test_all_packages(env_root, defaults=options.__dict__) return elif command == "testaddons": test_all_testaddons(env_root, defaults=options.__dict__) return elif command == "testex": test_all_examples(env_root, defaults=options.__dict__) return elif command == "testall": test_all(env_root, defaults=options.__dict__) return elif command == "testcfx": if options.filter: print >> sys.stderr, "The filter option is not valid with the testcfx command" return test_cfx(env_root, options.verbose) return elif command not in ["xpi", "test", "run"]: print >> sys.stderr, "Unknown command: %s" % command print >> sys.stderr, "Try using '--help' for assistance." sys.exit(1) target_cfg_json = None if not target_cfg: if not options.pkgdir: options.pkgdir = find_parent_package(os.getcwd()) if not options.pkgdir: print >> sys.stderr, ("cannot find 'package.json' in the" " current directory or any parent.") sys.exit(1) else: options.pkgdir = os.path.abspath(options.pkgdir) if not os.path.exists(os.path.join(options.pkgdir, 'package.json')): print >> sys.stderr, ("cannot find 'package.json' in" " %s." % options.pkgdir) sys.exit(1) target_cfg_json = os.path.join(options.pkgdir, 'package.json') target_cfg = packaging.get_config_in_dir(options.pkgdir) if options.manifest_overload: for k, v in packaging.load_json_file( options.manifest_overload).items(): target_cfg[k] = v # At this point, we're either building an XPI or running Jetpack code in # a Mozilla application (which includes running tests). use_main = False inherited_options = [ 'verbose', 'enable_e10s', 'parseable', 'check_memory', 'abort_on_missing' ] enforce_timeouts = False if command == "xpi": use_main = True elif command == "test": if 'tests' not in target_cfg: target_cfg['tests'] = [] inherited_options.extend( ['iterations', 'filter', 'profileMemory', 'stopOnError']) enforce_timeouts = True elif command == "run": use_main = True else: assert 0, "shouldn't get here" if use_main and 'main' not in target_cfg: # If the user supplies a template dir, then the main # program may be contained in the template. if not options.templatedir: print >> sys.stderr, "package.json does not have a 'main' entry." sys.exit(1) if not pkg_cfg: pkg_cfg = packaging.build_config(env_root, target_cfg, options.packagepath) target = target_cfg.name # TODO: Consider keeping a cache of dynamic UUIDs, based # on absolute filesystem pathname, in the root directory # or something. if command in ('xpi', 'run'): from cuddlefish.preflight import preflight_config if target_cfg_json: config_was_ok, modified = preflight_config(target_cfg, target_cfg_json) if not config_was_ok: if modified: # we need to re-read package.json . The safest approach # is to re-run the "cfx xpi"/"cfx run" command. print >> sys.stderr, ( "package.json modified: please re-run" " 'cfx %s'" % command) else: print >> sys.stderr, ("package.json needs modification:" " please update it and then re-run" " 'cfx %s'" % command) sys.exit(1) # if we make it this far, we have a JID else: assert command == "test" jid = buildJID(target_cfg) targets = [target] if command == "test": targets.append(options.test_runner_pkg) extra_packages = [] if options.extra_packages: extra_packages = options.extra_packages.split(",") if extra_packages: targets.extend(extra_packages) target_cfg.extra_dependencies = extra_packages deps = packaging.get_deps_for_targets(pkg_cfg, targets) from cuddlefish.manifest import build_manifest, ModuleNotFoundError, \ BadChromeMarkerError # Figure out what loader files should be scanned. This is normally # computed inside packaging.generate_build_for_target(), by the first # dependent package that defines a "loader" property in its package.json. # This property is interpreted as a filename relative to the top of that # file, and stored as a path in build.loader . generate_build_for_target() # cannot be called yet (it needs the list of used_deps that # build_manifest() computes, but build_manifest() needs the list of # loader files that it computes). We could duplicate or factor out this # build.loader logic, but that would be messy, so instead we hard-code # the choice of loader for manifest-generation purposes. In practice, # this means that alternative loaders probably won't work with # --strip-xpi. assert packaging.DEFAULT_LOADER == "addon-sdk" assert pkg_cfg.packages[ "addon-sdk"].loader == "lib/sdk/loader/cuddlefish.js" cuddlefish_js_path = os.path.join(pkg_cfg.packages["addon-sdk"].root_dir, "lib", "sdk", "loader", "cuddlefish.js") loader_modules = [("addon-sdk", "lib", "sdk/loader/cuddlefish", cuddlefish_js_path)] scan_tests = command == "test" test_filter_re = None if scan_tests and options.filter: test_filter_re = options.filter if ":" in options.filter: test_filter_re = options.filter.split(":")[0] try: manifest = build_manifest(target_cfg, pkg_cfg, deps, scan_tests, test_filter_re, loader_modules, abort_on_missing=options.abort_on_missing) except ModuleNotFoundError, e: print str(e) sys.exit(1)
def run(arguments=sys.argv[1:], target_cfg=None, pkg_cfg=None, defaults=None, env_root=os.environ.get('CUDDLEFISH_ROOT'), stdout=sys.stdout): versions = get_versions() sdk_version = versions["version"] display_version = "Add-on SDK %s (%s)" % (sdk_version, versions["full"]) parser_kwargs = dict(arguments=arguments, global_options=global_options, parser_groups=parser_groups, usage=usage, version=display_version, defaults=defaults) (options, args) = parse_args(**parser_kwargs) config_args = get_config_args(options.config, env_root); # reparse configs with arguments from local.json if config_args: parser_kwargs['arguments'] += config_args (options, args) = parse_args(**parser_kwargs) command = args[0] if command == "init": initializer(env_root, args) return if command == "testpkgs": test_all_packages(env_root, defaults=options.__dict__) return elif command == "testex": test_all_examples(env_root, defaults=options.__dict__) return elif command == "testall": test_all(env_root, defaults=options.__dict__) return elif command == "testcfx": test_cfx(env_root, options.verbose) return elif command == "docs": from cuddlefish.docs import generate if len(args) > 1: docs_home = generate.generate_named_file(env_root, filename=args[1]) else: docs_home = generate.generate_local_docs(env_root) webbrowser.open(docs_home) return elif command == "sdocs": from cuddlefish.docs import generate filename = generate.generate_static_docs(env_root) print >>stdout, "Wrote %s." % filename return target_cfg_json = None if not target_cfg: if not options.pkgdir: options.pkgdir = find_parent_package(os.getcwd()) if not options.pkgdir: print >>sys.stderr, ("cannot find 'package.json' in the" " current directory or any parent.") sys.exit(1) else: options.pkgdir = os.path.abspath(options.pkgdir) if not os.path.exists(os.path.join(options.pkgdir, 'package.json')): print >>sys.stderr, ("cannot find 'package.json' in" " %s." % options.pkgdir) sys.exit(1) target_cfg_json = os.path.join(options.pkgdir, 'package.json') target_cfg = packaging.get_config_in_dir(options.pkgdir) # At this point, we're either building an XPI or running Jetpack code in # a Mozilla application (which includes running tests). use_main = False inherited_options = ['verbose', 'enable_e10s'] enforce_timeouts = False if command == "xpi": use_main = True elif command == "test": if 'tests' not in target_cfg: target_cfg['tests'] = [] inherited_options.extend(['iterations', 'filter', 'profileMemory', 'stopOnError']) enforce_timeouts = True elif command == "run": use_main = True else: print >>sys.stderr, "Unknown command: %s" % command print >>sys.stderr, "Try using '--help' for assistance." sys.exit(1) if use_main and 'main' not in target_cfg: # If the user supplies a template dir, then the main # program may be contained in the template. if not options.templatedir: print >>sys.stderr, "package.json does not have a 'main' entry." sys.exit(1) if not pkg_cfg: pkg_cfg = packaging.build_config(env_root, target_cfg, options.packagepath) target = target_cfg.name # TODO: Consider keeping a cache of dynamic UUIDs, based # on absolute filesystem pathname, in the root directory # or something. if command in ('xpi', 'run'): from cuddlefish.preflight import preflight_config if target_cfg_json: config_was_ok, modified = preflight_config(target_cfg, target_cfg_json) if not config_was_ok: if modified: # we need to re-read package.json . The safest approach # is to re-run the "cfx xpi"/"cfx run" command. print >>sys.stderr, ("package.json modified: please re-run" " 'cfx %s'" % command) else: print >>sys.stderr, ("package.json needs modification:" " please update it and then re-run" " 'cfx %s'" % command) sys.exit(1) # if we make it this far, we have a JID else: assert command == "test" jid = buildJID(target_cfg) targets = [target] if command == "test": targets.append(options.test_runner_pkg) extra_packages = [] if options.extra_packages: extra_packages = options.extra_packages.split(",") if extra_packages: targets.extend(extra_packages) target_cfg.extra_dependencies = extra_packages deps = packaging.get_deps_for_targets(pkg_cfg, targets) from cuddlefish.manifest import build_manifest, ModuleNotFoundError # Figure out what loader files should be scanned. This is normally # computed inside packaging.generate_build_for_target(), by the first # dependent package that defines a "loader" property in its package.json. # This property is interpreted as a filename relative to the top of that # file, and stored as a path in build.loader . generate_build_for_target() # cannot be called yet (it needs the list of used_deps that # build_manifest() computes, but build_manifest() needs the list of # loader files that it computes). We could duplicate or factor out this # build.loader logic, but that would be messy, so instead we hard-code # the choice of loader for manifest-generation purposes. In practice, # this means that alternative loaders probably won't work with # --strip-xpi. assert packaging.DEFAULT_LOADER == "api-utils" assert pkg_cfg.packages["api-utils"].loader == "lib/cuddlefish.js" cuddlefish_js_path = os.path.join(pkg_cfg.packages["api-utils"].root_dir, "lib", "cuddlefish.js") loader_modules = [("api-utils", "lib", "cuddlefish", cuddlefish_js_path)] scan_tests = command == "test" test_filter_re = None if scan_tests and options.filter: test_filter_re = options.filter if ":" in options.filter: test_filter_re = options.filter.split(":")[0] try: manifest = build_manifest(target_cfg, pkg_cfg, deps, scan_tests, test_filter_re, loader_modules) except ModuleNotFoundError, e: print str(e) sys.exit(1)
def generate_local_docs(env_root): return generate_docs(env_root, get_versions()["version"], get_base_url(env_root))
def test_current_version(self): # the SDK should be able to determine its own version. We don't care # what it is, merely that it can be computed. version = get_versions()["version"] self.failUnless(isinstance(version, str), (version, type(version))) self.failUnless(len(version) > 0, version)