def check_generate_is_skipped(self, test_root, files_to_expect, initial_digest): generate.generate_docs(test_root, stdout=StringIO.StringIO()) docs_root = os.path.join(test_root, "doc") for file_to_expect in files_to_expect: self.assertTrue( os.path.exists(os.path.join(docs_root, *file_to_expect))) self.assertTrue(initial_digest == open( os.path.join(docs_root, "status.md5"), "r").read())
def check_generate_regenerate_cycle(self, test_root, files_to_expect, initial_digest = None): # test that if we generate, files are getting generated generate.generate_docs(test_root, stdout=StringIO.StringIO()) docs_root = os.path.join(test_root, "doc") for file_to_expect in files_to_expect: self.assertTrue(os.path.exists(os.path.join(docs_root, *file_to_expect)), os.path.join(docs_root, *file_to_expect) + "not found") if initial_digest: self.assertTrue(initial_digest != open(os.path.join(docs_root, "status.md5"), "r").read()) # and that if we regenerate, nothing changes... new_digest = open(os.path.join(docs_root, "status.md5"), "r").read() self.check_generate_is_skipped(test_root, files_to_expect, new_digest) return new_digest
def check_generate_is_skipped(self, test_root, files_to_expect, initial_digest): generate.generate_docs(test_root, stdout=StringIO.StringIO()) docs_root = os.path.join(test_root, "doc") for file_to_expect in files_to_expect: self.assertTrue(os.path.exists(os.path.join(docs_root, *file_to_expect))) self.assertTrue(initial_digest == open(os.path.join(docs_root, "status.md5"), "r").read())
def run(arguments=sys.argv[1:], target_cfg=None, pkg_cfg=None, defaults=None, env_root=os.environ.get('CUDDLEFISH_ROOT'), stdout=sys.stdout): parser_kwargs = dict(arguments=arguments, global_options=global_options, parser_groups=parser_groups, usage=usage, defaults=defaults) (options, args) = parse_args(**parser_kwargs) config_args = get_config_args(options.config, env_root); # reparse configs with arguments from local.json if config_args: parser_kwargs['arguments'] += config_args (options, args) = parse_args(**parser_kwargs) command = args[0] if command == "init": initializer(env_root, args) return if command == "testpkgs": test_all_packages(env_root, defaults=options.__dict__) return elif command == "testex": test_all_examples(env_root, defaults=options.__dict__) return elif command == "testall": test_all(env_root, defaults=options.__dict__) return elif command == "testcfx": test_cfx(env_root, options.verbose) return elif command == "docs": from cuddlefish.docs import generate if len(args) > 1: docs_home = generate.generate_docs(env_root, filename=args[1]) else: docs_home = generate.generate_docs(env_root) webbrowser.open(docs_home) return elif command == "sdocs": from cuddlefish.docs import generate # TODO: Allow user to change this filename via cmd line. filename = generate.generate_static_docs(env_root, base_url=options.baseurl) print >>stdout, "Wrote %s." % filename return target_cfg_json = None if not target_cfg: if not options.pkgdir: options.pkgdir = find_parent_package(os.getcwd()) if not options.pkgdir: print >>sys.stderr, ("cannot find 'package.json' in the" " current directory or any parent.") sys.exit(1) else: options.pkgdir = os.path.abspath(options.pkgdir) if not os.path.exists(os.path.join(options.pkgdir, 'package.json')): print >>sys.stderr, ("cannot find 'package.json' in" " %s." % options.pkgdir) sys.exit(1) target_cfg_json = os.path.join(options.pkgdir, 'package.json') target_cfg = packaging.get_config_in_dir(options.pkgdir) # At this point, we're either building an XPI or running Jetpack code in # a Mozilla application (which includes running tests). use_main = False inherited_options = ['verbose', 'enable_e10s'] enforce_timeouts = False if command == "xpi": use_main = True elif command == "test": if 'tests' not in target_cfg: target_cfg['tests'] = [] inherited_options.extend(['iterations', 'filter', 'profileMemory']) enforce_timeouts = True elif command == "run": use_main = True else: print >>sys.stderr, "Unknown command: %s" % command print >>sys.stderr, "Try using '--help' for assistance." sys.exit(1) if use_main and 'main' not in target_cfg: # If the user supplies a template dir, then the main # program may be contained in the template. if not options.templatedir: print >>sys.stderr, "package.json does not have a 'main' entry." sys.exit(1) if not pkg_cfg: pkg_cfg = packaging.build_config(env_root, target_cfg, options.packagepath) target = target_cfg.name # the harness_guid is used for an XPCOM class ID. We use the # JetpackID for the add-on ID and the XPCOM contract ID. if "harnessClassID" in target_cfg: # For the sake of non-bootstrapped extensions, we allow to specify the # classID of harness' XPCOM component in package.json. This makes it # possible to register the component using a static chrome.manifest file harness_guid = target_cfg["harnessClassID"] else: import uuid harness_guid = str(uuid.uuid4()) # TODO: Consider keeping a cache of dynamic UUIDs, based # on absolute filesystem pathname, in the root directory # or something. if command in ('xpi', 'run'): from cuddlefish.preflight import preflight_config if target_cfg_json: config_was_ok, modified = preflight_config(target_cfg, target_cfg_json) if not config_was_ok: if modified: # we need to re-read package.json . The safest approach # is to re-run the "cfx xpi"/"cfx run" command. print >>sys.stderr, ("package.json modified: please re-run" " 'cfx %s'" % command) else: print >>sys.stderr, ("package.json needs modification:" " please update it and then re-run" " 'cfx %s'" % command) sys.exit(1) # if we make it this far, we have a JID else: assert command == "test" if "id" in target_cfg: jid = target_cfg["id"] else: jid = harness_guid if not ("@" in jid or jid.startswith("{")): jid = jid + "@jetpack" unique_prefix = '%s-' % jid # used for resource: URLs bundle_id = jid # the resource: URL's prefix is treated too much like a DNS hostname unique_prefix = unique_prefix.lower() unique_prefix = unique_prefix.replace("@", "-at-") unique_prefix = unique_prefix.replace(".", "-dot-") targets = [target] if command == "test": targets.append(options.test_runner_pkg) extra_packages = [] if options.extra_packages: extra_packages = options.extra_packages.split(",") if extra_packages: targets.extend(extra_packages) target_cfg.extra_dependencies = extra_packages deps = packaging.get_deps_for_targets(pkg_cfg, targets) from cuddlefish.manifest import build_manifest, ModuleNotFoundError uri_prefix = "resource://%s" % unique_prefix # Figure out what loader files should be scanned. This is normally # computed inside packaging.generate_build_for_target(), by the first # dependent package that defines a "loader" property in its package.json. # This property is interpreted as a filename relative to the top of that # file, and stored as a URI in build.loader . generate_build_for_target() # cannot be called yet (it needs the list of used_deps that # build_manifest() computes, but build_manifest() needs the list of # loader files that it computes). We could duplicate or factor out this # build.loader logic, but that would be messy, so instead we hard-code # the choice of loader for manifest-generation purposes. In practice, # this means that alternative loaders probably won't work with # --strip-xpi. assert packaging.DEFAULT_LOADER == "api-utils" assert pkg_cfg.packages["api-utils"].loader == "lib/cuddlefish.js" cuddlefish_js_path = os.path.join(pkg_cfg.packages["api-utils"].root_dir, "lib", "cuddlefish.js") loader_modules = [("api-utils", "lib", "cuddlefish", cuddlefish_js_path)] scan_tests = command == "test" try: manifest = build_manifest(target_cfg, pkg_cfg, deps, uri_prefix, scan_tests, loader_modules) except ModuleNotFoundError, e: print str(e) sys.exit(1)