def __init__(self, s3, manifestPath, localWorkingDir): self.manifestPath = manifestPath self.manifest = Manifest(manifestPath) self.s3interface = S3Interface(s3, self.manifest.GetBucketName(), localWorkingDir) metafac = InstanceMetadataFactory(self.manifest) self.instanceManager = InstanceManager(self.s3interface, self.manifest, metafac) self.manifestKey = "/".join([self.manifest.GetS3KeyPrefix(), "manifest.json"])
def run(self): try: manifest = Manifest(Manifest.locate_file(self.config.cwd)) self.install(manifest) except MissingNpmShrinkwrap as e: Log.error(e.message) sys.exit(1)
def __init__(self, manifest_path, cert_path): rhnSQL.initDB() self.manifest = Manifest(manifest_path) # Satellite 5 certificate c = open(cert_path, 'r') try: self.sat5_cert = SatelliteCert() content = c.read() self.sat5_cert.load(content) finally: if c is not None: c.close() # Channel families metadata f = open(constants.CHANNEL_FAMILY_MAPPING_PATH, 'r') try: self.families = json.load(f) finally: if f is not None: f.close() # product to family mapping p = open(constants.PRODUCT_FAMILY_MAPPING_PATH, 'r') try: self.products = json.load(p) finally: if p is not None: p.close() self.families_to_import = []
def build_manifest( docker_obj, skip_fs_dump, kml, output, envs, cmd, ) -> Manifest: manifest = Manifest() manifest.runtime.entry = (docker_obj['Config']['Entrypoint'] or []) + (docker_obj['Config']['Cmd'] or []) + cmd manifest.runtime.envs = docker_obj['Config']['Env'] + envs manifest.runtime.working_directory = docker_obj['Config']['WorkingDir'] manifest.linux_configuration.options, manifest.runtime.enabled_init_options = get_linux_options( ) if manifest.runtime.enabled_init_options[0] == '/init': print("Entry command cannot use /init", file=sys.stderr) exit(1) manifest.linux_configuration.kml = kml manifest.filesystem = dump_fs(docker_obj['RepoTags'][0], skip_fs_dump, output) return manifest
def show_main(): doc = '''Usage: oxt-pkg-show [options] <package-path> --help Print this screen. ''' from docopt import docopt args = docopt(doc) logging.basicConfig(level=logging.INFO) package_path = args['<package-path>'] with open_storage(package_path) as pkg: with resolve_path(pkg, MANIFEST_PATH).open() as f: manifest = Manifest() manifest.load(f) with resolve_path(pkg, DESCRIPTION_PATH).open() as f: description = Description.parse(f) from description import print_human_readable print_human_readable(description, pkg) for path in manifest: item = manifest[path] print path, item['media-type'], node = resolve_path(pkg, path) if node: print '-- OK' else: print '-- MISSING'
def __init__(self, repodir): Manifest.__init__(self, repodir) gitdir = os.path.join(repodir, 'manifest.git') config = GitConfig.ForRepository(gitdir = gitdir) if config.GetBoolean('repo.mirror'): worktree = os.path.join(repodir, 'manifest') relpath = None else: worktree = self.topdir relpath = '.' self.manifestProject = MetaProject(self, '__manifest__', gitdir = gitdir, worktree = worktree, relpath = relpath) self._modules = GitConfig(os.path.join(worktree, '.gitmodules'), pickleFile = os.path.join( repodir, '.repopickle_gitmodules' )) self._review = GitConfig(os.path.join(worktree, '.review'), pickleFile = os.path.join( repodir, '.repopickle_review' )) self._Unload()
def __init__(self, repodir): Manifest.__init__(self, repodir) gitdir = os.path.join(repodir, 'manifest.git') config = GitConfig.ForRepository(gitdir=gitdir) if config.GetBoolean('repo.mirror'): worktree = os.path.join(repodir, 'manifest') relpath = None else: worktree = self.topdir relpath = '.' self.manifestProject = MetaProject(self, '__manifest__', gitdir=gitdir, worktree=worktree, relpath=relpath) self._modules = GitConfig(os.path.join(worktree, '.gitmodules'), pickleFile=os.path.join( repodir, '.repopickle_gitmodules')) self._review = GitConfig(os.path.join(worktree, '.review'), pickleFile=os.path.join( repodir, '.repopickle_review')) self._Unload()
def test_min_max_diff_with_diff_at_muliples_levels(self): m1 = ManifestFileParser().build("""\ 1foo 2bar 1xyzzy 1blah 2zyxxy 3diff 3baz """.split("\n")) m2 = ManifestFileParser().build("""\ 1foo 2bar 1xyzzy 1blah 2diff 2zyxxy 3baz 4diff 1diff """.split("\n")) self.assertEqual(list(Manifest.diff(m1, m2)), [ (None, "2bar/1xyzzy/2diff"), ("2bar/3diff", None), (None, "4diff"), ]) self.assertEqual(list(Manifest.diff(m1, m2, recursive = True)), [ (None, "2bar/1xyzzy/2diff"), ("2bar/3diff", None), (None, "4diff"), (None, "4diff/1diff"), ])
def test_diff_two_files_vs_file_and_empty_subdir(self): m1 = Manifest_from_walking_unpacked_tar("two_files.tar") m2 = Manifest_from_walking_unpacked_tar("file_and_empty_subdir.tar") self.assertEqual(list(Manifest.diff(m1, m2)), [ ("bar", None), (None, "file"), ("foo", None), (None, "subdir")]) self.assertEqual(list(Manifest.diff(m2, m1)), [ (None, "bar"), ("file", None), (None, "foo"), ("subdir", None)])
def set_frame_color(m: Manifest, value: str): try: m.setup_frame(Color(value)) return True except Exception as e: print(e, '- hex or rgb are acceptable') return False
def initiate_manifest(self): """ initial manifest and validate it :return: None """ self._manifest = Manifest(self._source_manifest_file) self._manifest.validate_manifest()
def test_diff_like(self): for t in TEST_TARS: m1 = Manifest_from_walking_unpacked_tar(t) m2 = Manifest_from_walking_unpacked_tar(t) self.assertEqual(list(Manifest.diff(m1, m2)), []) self.assertEqual(list(Manifest.diff(m2, m1)), []) self.assertEqual(m1, m2) self.assertEqual(m2, m1)
def run(self, runner_param_dic=dict(), debug=False): cases = [] for name in self.model_names: tokens = name.split('_') load_name = name + ".pt" algo_name, env_name, last_score = tokens[0:3] train, check_interval, reward_scale = tokens[3:6] algo_params = dict() for token in tokens[6:-1]: k, v = token.split('=') k = Manifest.get_param_full(k) try: algo_params[k] = int(v) except: algo_params[k] = float(v) algo = Manifest.get_algo_class(algo_name) algop = Manifest.get_param_class(algo)(**algo_params) runner = Manifest.get_runner_class(algo) cases.append((env_name, runner, algop, algo_params, load_name)) for env, runner, algop, algo_params, load_name in cases: print( f'\t[ {env}, {runner} ]\n parameters {algo_params.items()}\n') runnerp = None if debug: runnerp = RunnerParams(train=False, save_net=False, load_net=True, target_score=9999.0, load_name=load_name, name_postfix=str(algop), check_interval=1, max_video=3, save_check_log=False, save_step_log=True, print_interval=1, video_record_interval=1, max_episode=1000) else: runnerp = RunnerParams(train=False, save_net=False, load_net=True, target_score=9999.0, load_name=load_name, name_postfix=str(algop), check_interval=1, max_video=0, save_check_log=False, save_step_log=True, print_interval=0, video_record_interval=0, max_episode=1000) runner_param_tmp = {**runnerp.__dict__, **runner_param_dic} runnerp = RunnerParams(**runner_param_tmp) runner(env, algop, runnerp).run() print('모두 종료됨')
def save_manifest(m: Manifest, path=None): if not path: path = input("manifest path/name > ") if not path: path = '.' if not os.path.exists(path): os.makedirs(path) path = os.path.join(path, 'manifest.json') m.save(path)
def getRPath(self): if not self.manifest: return False manifest = Manifest(self.manifest) path_ = self.build_path + "/gen/" + "/".join( manifest.getPackageName().split(".")) + "/R.java" if path.exists(path_): return path_ return False
def __init__(self, repodir): Manifest.__init__(self, repodir) self._manifestFile = os.path.join(repodir, MANIFEST_FILE_NAME) self.manifestProject = MetaProject(self, 'manifests', gitdir = os.path.join(repodir, 'manifests.git'), worktree = os.path.join(repodir, 'manifests')) self._Unload()
def test_GetBucketName(self): m = Manifest( self.writeTestJsonFile({ "ProjectName": "projectname", "BucketName": "myBucketName", "Documents": [], "InstanceJobs": [] })) self.assertEqual(m.GetBucketName(), "myBucketName")
def test_append_dependencies_to_tree_dicts_invalid_version_json(self): dep_json = { 'buffer': { 'version': False } } with self.assertRaises(RuntimeError) as context: Manifest.append_dependencies_to_tree(tree(), dep_json)
class Application(object): def __init__(self, s3, manifestPath, localWorkingDir): self.manifestPath = manifestPath self.manifest = Manifest(manifestPath) self.s3interface = S3Interface(s3, self.manifest.GetBucketName(), localWorkingDir) metafac = InstanceMetadataFactory(self.manifest) self.instanceManager = InstanceManager(self.s3interface, self.manifest, metafac) self.manifestKey = "/".join([self.manifest.GetS3KeyPrefix(), "manifest.json"]) def downloadS3Document(self, documentName): logging.info("downloading specified document '{0}' from s3 bucket {1}" .format(documentName, self.s3interface.bucketName)) filteredDocs = list( self.manifest.GetS3Documents( filter = {"Name": documentName})) if len(filteredDocs) == 0: raise ValueError("specified document {0} not found".format(documentName)) elif filteredDocs[0]["Direction"] != "AWSToLocal": raise ValueError("specified document not marked AWSToLocal") elif len(filteredDocs)> 1: raise ValueError("manifest error") doc = filteredDocs[0] self.s3interface.downloadCompressed(self.manifest.GetS3KeyPrefix(), documentName, os.path.abspath(doc["LocalPath"])) def downloadS3Documents(self): logging.info("downloading files from s3 bucket {0}".format(self.s3interface.bucketName)) for doc in self.manifest.GetS3Documents(filter = {"Direction": "AWSToLocal"}): self.s3interface.downloadCompressed(self.manifest.GetS3KeyPrefix(), doc["Name"], os.path.abspath(doc["LocalPath"])) logging.info("downloading finished") def downloadLogs(self, outputdir): logging.info("downloading instance logs s3 bucket {0}".format(self.s3interface.bucketName)) for j in self.manifest.GetJobs(): self.instanceManager.downloadInstanceLog(j["Id"], outputdir) def uploadS3Documents(self): logging.info("uploading files to s3 bucket {0}".format(self.s3interface.bucketName)) logging.info("uploading manifest {0} to {1}".format(self.manifestPath, self.manifestKey)) self.s3interface.uploadFile(self.manifestPath, self.manifestKey) for doc in self.manifest.GetS3Documents(filter = {"Direction": "LocalToAWS"}): self.s3interface.uploadCompressed(self.manifest.GetS3KeyPrefix(), doc["Name"], os.path.abspath(doc["LocalPath"])) logging.info("uploading finished") def runInstances(self, ec2, instanceConfig): ec2interface = EC2Interface(ec2, instanceConfig["BootStrapperConfig"]["WorkingDirectory"], self.manifest, self.manifestKey, self.instanceManager, instanceConfig["BootStrapperConfig"]["PythonPath"], instanceConfig["BootStrapperConfig"]["BootStrapScriptPath"], instanceConfig["BootStrapperConfig"]["LineBreak"], instanceConfig["BootStrapperConfig"]["BootstrapCommands"]) ec2interface.launchInstances(instanceConfig["EC2Config"]["InstanceConfig"]) logging.info("ec2 launch finished")
def set_toolbar_color(m: Manifest, value: str): if not value: color = m.colors['frame'].add_light(1, 0.1) else: try: color = Color(value) except Exception as e: print(e, '- hex or rgb are acceptable') return False m.setup_toolbar(color) return True
def run(self, **kwargs): """ QThread function that executes the class in a separate thread :param kwargs: :return: None """ install_type = kwargs.get("install_type", "install") try: self.set_tasks.emit(3) # Checks which type of manifest to load if install_type == "install": # Installs will load the bundled install.man self._zip.extract_all(self._install_temp) man_path = os.path.join(self._install_temp, "install.man") self._man = Manifest(man_path) else: # Uninstalls will attempt to load the uninstall.man in the installation directory man_path = os.path.join( self._dirs.get_render_farming_install(), "uninstall.man") self._man = Manifest(man_path) self._add() # Translates the manifest to InstallerItems self._man_translated = ManifestTranslator(self._dirs, self._man) self._add() # Retrieves these InstallerItems self._items = self._man_translated.get_items() self._add() # Checks which functions to run if install_type == "install": self.run_installation() elif install_type == "upgrade": self.run_uninstallation() else: self.run_uninstallation() self.run_cleaner() # Catches errors and prints them to the UI rather than crashing except (IOError, OSError, RuntimeError, WindowsError, ManifestError) as e: self.print_error.emit(str(e)) print(self._man_translated) else: # If no Errors, set UI to complete self.complete.emit() finally: # Always terminate the QThread self.terminate()
def test_remove_application(): # create a pair of applications in the manifest m = Manifest() m.create('manifest.json', 'test-guid', 'test-app-0', 'test-description', 'overview....', 'jesseward', 'utility') m.add_application('test-guid', 'test-app-1', 'test-description', 'overview....', 'jesseward', 'utility') assert len(m.applications()) == 2 # remove index 0 / application test-app-0 m.remove_application('test-app-0') assert len(m.applications()) == 1
def test_diff_unlike(self): shifted = TEST_TARS[:] shifted.append(shifted.pop(0)) for t1, t2 in zip(TEST_TARS, shifted): m1 = Manifest_from_walking_unpacked_tar(t1) m2 = Manifest_from_walking_unpacked_tar(t2) self.assertTrue(list(Manifest.diff(m1, m2))) self.assertTrue(list(Manifest.diff(m2, m1))) self.assertNotEqual(m1, m2) self.assertNotEqual(m2, m1) self.assertEqual(len(list(Manifest.diff(m1, m2))), len(list(Manifest.diff(m2, m1))))
def get_repo_commit(manifest_file): """ build {repository:commit-id} dictionary, return the dict. """ manifest = Manifest(manifest_file) manifest.validate_manifest() repo_commit_dict = {} for repo in manifest.repositories: repo_name = common.strip_suffix(os.path.basename(repo["repository"]), ".git") commit_id = repo["commit-id"] repo_commit_dict[repo_name] = commit_id print "[DEBUG] manifest repo_commit dict:", repo_commit_dict return repo_commit_dict
def test_ValidateDirectionReturnsExpectedValue(self): m = Manifest( self.writeTestJsonFile({ "ProjectName": "projectname", "BucketName": "myBucketName", "Documents": [], "InstanceJobs": [] })) self.assertFalse(m.validateDirection("a")) self.assertTrue(m.validateDirection("LocalToAWS")) self.assertTrue(m.validateDirection("AWSToLocal")) self.assertTrue(m.validateDirection("Static"))
def add_case(self, env, algo, algo_param_dic=None): if not algo_param_dic: algo_param_dic = dict() algo_param_class = Manifest.get_param_class(algo) algo_param_dic = { **self.default_hyperparam(env, algo).__dict__, **algo_param_dic } algo_param = algo_param_class(**algo_param_dic) algo_runner = Manifest.get_runner_class(algo) self._testcases += [(env, algo_runner, algo_param)]
def __init__(self, _uuid_project, _id_device, _version, _host_broker, \ _callback_on_receive_update, _private_key=None, _public_key=None, _delivery_type='Push', _debug=True): self.host_broker = _host_broker self.debug = _debug self.delivery_type = _delivery_type self.id_device = _id_device self.private_key = _private_key self.public_key = _public_key self.security = Security() self.message_incoming = bytes() self.do_decrypt = False self.aes_random_key = '' _id_on_broker = "FotaSuit-" + _uuid_project + "-" + self.id_device self.mqtt_client = MQTTClient(_id_on_broker, self.host_broker) self.mqtt_client.DEBUG = self.debug self.mqtt_client.set_callback(self.publish_received) self.update_file_size = 0 self.update_file_index = 0 self.update_file_handle = 0 self.memory = Memory(self.debug) _next_partition = self.memory.get_next_partition_name() self.callback_on_receive_update = _callback_on_receive_update if not (self.delivery_type == 'Push' or self.delivery_type == 'Pull'): raise ValueError("'type' variable not supported. Try 'Pull' or 'Push'.") while not self.connect_on_broker(True): self.print_debug("trying connection with broker...") time.sleep(3) self.manifest = Manifest(_next_partition, _public_key) self.manifest.load(_uuid_project, _version) files = os.listdir() if '_updated.iota' in files: #have an update ? # notify the upgrade _version = str(self.manifest.version) _msg = '{"idDevice":"'+self.id_device+'", "uuidProject":"'+_uuid_project+'"' _msg += ', "version":'+_version+', "date": ""}' #TODO: insert other informations in message like date self.publish_on_topic(_version, "updated", _msg) os.remove('_updated.iota') self.subscribe_task = "manifest" # waiting for manifest file self.print_debug("initialized.")
def test_nonrecursive(self): m1 = self.mfp.build(["bar", "foo", " bar", " foo"]) m2 = self.mfp.build(["foo", " foo", "xyzzy"]) m3 = self.mfp.build(["foo", " bar", " baz", " foo", " foo"]) self.assertEqual(list(Manifest.merge(m1, m2, m3)), [ ("bar", None, None), ("foo", "foo", "foo"), ("foo/bar", None, "foo/bar"), (None, None, "foo/bar/baz"), ("foo/foo", "foo/foo", "foo/foo"), (None, None, "foo/foo/foo"), (None, "xyzzy", None)]) # now without recursion self.assertEqual(list(Manifest.merge(m1, m2, m3, recursive = False)), [ ("bar", None, None), ("foo", "foo", "foo"), (None, "xyzzy", None)]) # and finally with selective recursion (only recurse into "foo"s) actual = [] gen = Manifest.merge(m1, m2, m3, recursive = False) try: t = next(gen) while True: actual.append(t) paths = [p for p in t if p is not None] self.assertTrue(paths) path = paths[0] self.assertEqual([path] * len(paths), paths) try: last_component = path.rsplit("/", 1)[1] except: last_component = path if last_component == "foo": t = gen.send(True) else: t = next(gen) except StopIteration: pass self.assertEqual(actual, [ ("bar", None, None), ("foo", "foo", "foo"), ("foo/bar", None, "foo/bar"), ("foo/foo", "foo/foo", "foo/foo"), (None, None, "foo/foo/foo"), (None, "xyzzy", None)])
def test_errorThrownOnMissingRequiredS3DataInJob(self): self.assertRaises( ValueError, lambda: Manifest( self.writeTestJsonFile({ "ProjectName": "testProject", "BucketName": "bucket", "Documents": [ { "Name": "document", "Direction": "AWSToLocal", "LocalPath": "mylocalPath", "AWSInstancePath": "awsinstancepath" }, ], "InstanceJobs": [{ "Id": 1, "RequiredS3Data": ["a_missing_document"], "Commands": [{ "Command": "run.exe", "Args": [] }] }] })))
def create_manifest(): manifest: Manifest = Manifest(context=context, manifest_files=manifest_files, resource_factory=MockResource) assert manifest.manifest_files == manifest_files assert manifest.context == context return manifest
def _read_manifest(self): try: with self._fetcher.retrieve_raw_file(_common._MANIFEST_NAME) as manifest_file: self.manifest = Manifest(manifest_file) self.fqrn = self.manifest.repository_name except FileNotFoundInRepository, e: raise RepositoryNotFound(self._storage_location)
def get_modules(self, modules_dir, auto_install=True): print '[DEBUG] Detecting modules in %s' % modules_dir modules = [] if auto_install: parent_dir = os.path.dirname(os.path.abspath(modules_dir)) self.auto_install_modules(parent_dir) if not os.path.exists(modules_dir): return modules for platform in os.listdir(modules_dir): platform_dir = os.path.join(modules_dir, platform) if not os.path.isdir(platform_dir): continue if platform in ['osx', 'win32', 'linux']: continue # skip desktop modules # iterate through the platform directory so we can get versioned modules too for root, dirs, files in os.walk(platform_dir): dirs.sort(reverse=True) for module_dir in dirs: module_dir = os.path.join(root, module_dir) manifest_file = os.path.join(module_dir, 'manifest') if not os.path.exists(manifest_file): continue manifest = Manifest(manifest_file) print '[DEBUG] Detected module for %s: %s %s @ %s' % (manifest.platform, manifest.moduleid, manifest.version, module_dir) modules.append(Module(module_dir, manifest)) return modules
def __init__(self, path_data: Path, skins, name, cap_name, uuid_s, texts): if uuid_s is None: self.uuid = uuid4() else: self.uuid = uuid_s self.skins = skins self.texts = texts self.name = name self.cap_name = cap_name self.manifest = Manifest(path_data, self.uuid, Manifest.SKINS, name=self.cap_name)
def handle_manifest(self, manifest_path): """ initial manifest and validate it :param manifest_path: the path of manifest file :return: None """ try: self._manifest = Manifest(manifest_path) self._manifest.validate_manifest() except KeyError as error: print "Failed to create a Manifest instance for the manifest file {0} \nERROR:\n{1}"\ .format(manifest_path, error.message) sys.exit(1) for repo in self._manifest.repositories: repo['directory-name'] = self.directory_for_repo(repo)
def main(): """Main function for invoking the bootstrap process :raises Exception: When the invoking user is not root and --dry-run isn't specified """ # Get the commandline arguments opts = get_opts() # Require root privileges, except when doing a dry-run where they aren't needed import os if os.geteuid() != 0 and not opts['--dry-run']: raise Exception('This program requires root privileges.') # Set up logging setup_loggers(opts) # Load the manifest from manifest import Manifest manifest = Manifest(path=opts['MANIFEST']) # Everything has been set up, begin the bootstrapping process run(manifest, debug=opts['--debug'], pause_on_error=opts['--pause-on-error'], dry_run=opts['--dry-run'])
def write_downstream_parameter_file(build_directory, manifest_file, is_official_release, parameter_file): try: params = {} # Add rackhd version to downstream parameters rackhd_repo_dir = os.path.join(build_directory, "RackHD") version_generator = VersionGenerator(rackhd_repo_dir) rackhd_version = version_generator.generate_package_version(is_official_release) if rackhd_version != None: params['RACKHD_VERSION'] = rackhd_version else: raise RuntimeError("Version of {0} is None. Maybe the repository doesn't contain debian directory ".format(rackhd_repo_dir)) # Add the commit of repository RackHD/RackHD to downstream parameters manifest = Manifest(manifest_file) # commit of repository RackHD/RackHD rackhd_commit = '' for repo in manifest.repositories: repository = repo['repository'].lower() if repository.endswith('rackhd') or repository.endswith('rackhd.git'): rackhd_commit = repo['commit-id'] if rackhd_commit != '': params['RACKHD_COMMIT'] = rackhd_commit else: raise RuntimeError("commit-id of RackHD is None. Please check the manifest {0}".format(manifest_file)) # Write downstream parameters to downstream parameter file. common.write_parameters(parameter_file, params) except Exception, e: raise RuntimeError("Failed to write downstream parameter file \ndue to {0}".format(e))
def test_max_diff_two_files_vs_files_at_many_levels(self): m1 = Manifest_from_walking_unpacked_tar("two_files.tar") m2 = Manifest_from_walking_unpacked_tar("files_at_many_levels.tar") self.assertEqual(list(Manifest.diff(m1, m2, recursive = True)), [ (None, "baz"), (None, "baz/bar"), (None, "baz/baz"), (None, "baz/baz/bar"), (None, "baz/baz/baz"), (None, "baz/baz/foo"), (None, "baz/foo")])
def __init__(self, dest, branch, builddir, git_credential=None, force=False, jobs=1): """ Generate a new manifest according to the manifest sample: manifest.json _dest_manifest_file: the path of new manifest _branch: the branch name _force: overwrite the destination if it exists. _builddir: the destination for checked out repositories. _jobs: number of parallel jobs to run. The number is related to the compute architecture, multi-core processors... :return: None """ self._dest_manifest_file = dest self._branch = branch self._builddir = builddir self._force = force self._jobs = jobs self._manifest = Manifest.instance_of_sample() self.repo_operator = RepoOperator(git_credential) self.check_builddir()
def wrap_manifest_file(self, file_path): """ Generated manifest file """ try: all_prs = self.get_all_related_prs(self.__repo, self.__merge_commit_sha, self.__pr_number) under_test_prs = self.get_under_test_prs() # instance of manifest template manifest = Manifest.instance_of_sample("manifest-pr-gate.json") # wrap with pr repo_url_list = [repo["repository"] for repo in manifest.repositories] for pr in all_prs: repo, sha1, _ = pr repo_url = "https://github.com/{0}.git".format(repo) # uniform the repo_url case, make sure the url is completely consistent with repo in the manifest repo_url = [url for url in repo_url_list if url.lower() == repo_url][0] if repo in under_test_prs: manifest.update_manifest(repo_url, "", sha1, True) else: manifest.update_manifest(repo_url, "", sha1, False) # fill in blank commit with latest commit sha for repo in manifest.repositories: if 'commit-id' in repo and repo['commit-id'] == "": repo_name = "/".join(repo["repository"][:-4].split("/")[3:]) latest_commit = self.get_latest_commit(repo_name, self.__target_branch) repo["commit-id"] = latest_commit manifest.validate_manifest() manifest.dump_to_json_file(file_path) except Exception as error: print "ERROR occured in parse manifest: {0}".format(error) sys.exit(1)
def validate_manifest_files(self, *args): """ validate several manifest files For example: validate_manifest_files(file1, file2) or validate_manifest_files(file1, file2, file3) """ validate_result = True for filename in args: try: manifest = Manifest(filename) manifest.validate_manifest() print "manifest file {0} is valid".format(filename) except KeyError as error: print "Failed to validate manifest file {0}".format(filename) print "\nERROR: \n{0}".format(error.message) validate_result = False return validate_result
def __init__(self, dest, builddir, git_credential=None, force=False, jobs=1): self._jenkins_author = config.gitbit_identity["username"] self._dest_manifest_file = dest self._builddir = builddir self._force = force self._jobs = jobs self._manifest = Manifest.instance_of_sample() self.repo_operator = RepoOperator(git_credential) self.check_builddir()
def test_custom_key(self): m1 = self.mfp.build(["1foo", "2bar", "3baz"]) m2 = self.mfp.build(["abc", "def", "ghi"]) m3 = self.mfp.build(["123", "456", "789"]) self.assertEqual( list(Manifest.merge(m1, m2, m3, key = lambda px: True)), [ ("1foo", "abc", "123"), ("2bar", "def", "456"), ("3baz", "ghi", "789")])
def update_manifest_repo(self, dir_name, repo_commit_message): """ Update manifest repository based on its contents and user arguments. :param dir_name: The directory of the repository :return: if repo is updated, return updated manifest file path and the manifest object otherwise, return None, None """ if self.__manifest_file is not None: path_name = os.path.join(dir_name, self.__manifest_file) if os.path.isfile(path_name): try: manifest = Manifest(path_name, self.__git_credentials) manifest.update_manifest(self.__repo, self.__branch, self.__commit) if manifest.changed: manifest.write_manifest_file(path_name, self.__dryrun) return path_name, manifest else: print "No changes to {0}".format(manifest.name) except KeyError as error: self.cleanup_and_exit("Failed to create an Manifest instance for the manifest file {0}\nError:{1}"\ .format(self.__manifest_file, error.message),1) except RuntimeError as error: self.cleanup_and_exit("Failed to update manifest repo\nError:{0}".format(error.message),1) else: for item in os.listdir(dir_name): path_name = os.path.join(dir_name, item) if os.path.isfile(path_name): try: manifest = Manifest(path_name, self.__git_credentials) manifest.update_manifest(self.__repo, self.__branch, self.__commit) if manifest.changed: manifest.write_manifest_file(path_name, self.__dryrun) return path_name, manifest else: print "No changes to {0}".format(manifest.name) except KeyError as error: self.cleanup_and_exit("Failed to create an Manifest instance for the manifest file {0}\nError:{1}"\ .format(path_name, error.message),1) except RuntimeError as error: self.cleanup_and_exit("Failed to update manifest repo\nError:{0}".format(error.message),1) return None, None
def main(args): global android_sdk # command platform project_dir command = args[1] platform = args[2] project_dir = os.path.expanduser(args[3]) manifest = Manifest(os.path.join(project_dir, 'manifest')) error = False if is_android(platform): android_sdk = AndroidSDK(manifest.get_property('android.sdk'), 4) if command == 'run': def run_callback(gen_project_dir): script = os.path.abspath(os.path.join(template_dir,'..',platform,'builder.py')) script_args = [script, 'run', gen_project_dir] if is_android(platform): script_args.append(android_sdk.get_android_sdk()) rc = run_python(script_args) # run the project if rc==1: if is_ios(platform): error = os.path.join(gen_project_dir,'build','iphone','build','build.log') print "[ERROR] Build Failed. See: %s" % os.path.abspath(error) else: print "[ERROR] Build Failed." stage(platform, project_dir, manifest, run_callback) elif command == 'run-emulator': if is_android(platform): def run_emulator_callback(gen_project_dir): script = os.path.abspath(os.path.join(template_dir, '..', platform, 'builder.py')) run_python([script, 'run-emulator', gen_project_dir, android_sdk.get_android_sdk()]) stage(platform, project_dir, manifest, run_emulator_callback) if error: sys.exit(1) else: sys.exit(0)
def main(): # parse arguments args = parse_command_line(sys.argv[1:]) try: manifest = Manifest(args.manifest) manifest.validate_manifest() except KeyError as error: print "Failed to create a Manifest instance for the manifest file {0} \nERROR:\n{1}"\ .format(args.manifest, error.message) sys.exit(1) if args.publish: if args.git_credential: repo_operator = RepoOperator(args.git_credential) else: print "Error occurs when get crendtail in update submodule" sys.exit(1) else: repo_operator = RepoOperator(args.git_credential) if os.path.isdir(args.build_dir): print args.build_dir for filename in os.listdir(args.build_dir): try: repo_dir = os.path.join(args.build_dir, filename) repo_operator.submodule_init(repo_dir) repo_operator.submodule_update(repo_dir) submodules_list = repo_operator.get_current_submodule(repo_dir) if len(submodules_list)==0: continue; for key in submodules_list: commit_id = get_manifest_commit_id(key,manifest) if commit_id != None: sub_dir = repo_dir+"/"+key repo_operator.checkout_to_commit(sub_dir,commit_id) if args.publish: print "start to publish update submodule in {0}".format(repo_dir) commit_message = "update submodule for new commit {0}".format(args.version) repo_operator.push_repo_changes(repo_dir, commit_message) except Exception,e: print "Failed to update submodule of {0} due to {1}".format(filename, e) sys.exit(1)
def init_main(): doc = '''Usage: oxt-pkg-init [options] <package-path> --help Print this screen. ''' from docopt import docopt args = docopt(doc) logging.basicConfig(level=logging.INFO) package_path = args['<package-path>'] manifest = Manifest() description = Description() with open_storage(package_path, 'w') as stg: with makedirs_to_file(stg, MANIFEST_PATH).open('w') as f: manifest.dump(f) with makedirs_to_file(stg, DESCRIPTION_PATH).open('w') as f: description.write(f)
def check_main(): doc = '''Usage: oxt-pkg-show [options] <package-path> --help Print this screen. ''' from docopt import docopt args = docopt(doc) logging.basicConfig(level=logging.INFO) package_path = args['<package-path>'] with open_storage(package_path) as pkg: with resolve_path(pkg, MANIFEST_PATH).open() as f: manifest = Manifest() manifest.load(f) with resolve_path(pkg, DESCRIPTION_PATH).open() as f: description = Description.parse(f) missing = dict() for path in manifest: node = resolve_path(pkg, path) if node is None: missing[path] = MANIFEST_PATH for path in description.required_files(): node = resolve_path(pkg, path) if node is None: missing[path] = DESCRIPTION_PATH if missing: for path in sorted(missing): referer = missing[path] logger.error('%s: MISSING (refered in %s)', path, referer) raise SystemExit(1) else: logger.info('%s: OK, identifier=%s, version=%s', package_path, description.identifier, description.version)
def test_append_dependencies_to_tree_dicts(self): dep_json = { 'buffer': { 'version': '4.0.0', 'dependencies': { 'bar': { 'version': '0.0.1', 'dependencies': { 'biz': { 'version': '0.0.2' }, 'fiz': { 'version': '9.0.0', 'from': 'git+http://github.com/fiz', 'resolved': 'git+http://github.com/fiz#abc123' } }, } } }, 'io': { 'version': '0.0.1' } } deps = tree() Manifest.append_dependencies_to_tree(deps, dep_json) expected = tree() buffer_str = 'buffer===4.0.0===https://registry.npmjs.org/buffer/-/buffer-4.0.0.tgz' bar_str = 'bar===0.0.1===https://registry.npmjs.org/bar/-/bar-0.0.1.tgz' biz_str = 'biz===0.0.2===https://registry.npmjs.org/biz/-/biz-0.0.2.tgz' io_str = 'io===0.0.1===https://registry.npmjs.org/io/-/io-0.0.1.tgz' fiz_str = 'fiz===abc123===git+http://github.com/fiz#abc123' expected[buffer_str][bar_str][biz_str] = tree() expected[buffer_str][bar_str][fiz_str] = tree() expected[io_str] = tree() self.assertEqual(deps, expected)
def test_entries_w_direct_attrs(self): m = Manifest() m.add(["foo"], {"uid": 1234, "gid": 321}) m.add(["foo", "bar"], {"size": 123, "mode": 0o040755}) m.add(["foo", "bar", "baz"], {"xyzzy": "z", "a": "b", "mode": 0o100644}) s = StringIO() ManifestFileWriter().write(m, s, indent = " ") self.assertEqual(s.getvalue(), """\ foo {gid: 321, uid: 1234} bar {mode: 0o040755, size: 123} baz {a: b, mode: 0o100644, xyzzy: z} """)
def download_manifest_file(self): """ Download the manifest json files. Return that directory name which stores manifest. The directory is temporary and deleted in the cleanup_and_exit function :return: A string containing the name of the folder where the manifest file was download. """ directory_name = tempfile.mkdtemp() if os.path.isdir(directory_name): pass # For now script of 'upload to bintray' is seperated from this script. # So before uploading this directory shouldn't be deleted' # The blow code snippet will be unfolded when involved bintray upload functions into this script. # self.__cleanup_directories.append(directory_name) else: self.cleanup_and_exit("Failed to make temporary directory for the repository: {0}".format(url), 1) try: url = "/".join([self.__manifest_download_url, self.__manifest_file]) dest_dir = "/".join([directory_name, self.__manifest_file]) if os.environ['BINTRAY_USERNAME'] and os.environ['BINTRAY_API_KEY']: print "Requests bintray with token" auth = (os.environ['BINTRAY_USERNAME'].strip(), os.environ['BINTRAY_API_KEY'].strip()) resp = requests.get(url, auth=auth) else: print "Requests without token" resp = requests.get(url) if resp.ok: with open(dest_dir, "wb") as file_handle: file_handle.write(resp.content) elif resp.status_code==404: # If there's no manifest file in bintray server, init an empty one print "can't find manifest in remote server, will use template manifest" Manifest.instance_of_sample().dump_to_json_file(dest_dir) else: print "Unknown error, {0}".format(resp.status_code) return directory_name except RuntimeError as error: self.cleanup_and_exit(error, 1)
def __init__(self, manifest_path): rhnSQL.initDB() self.manifest = Manifest(manifest_path) self.sat5_cert = SatelliteCert() self.sat5_cert.load(self.manifest.get_satellite_certificate()) verify_mappings() # Channel families metadata f = open(constants.CHANNEL_FAMILY_MAPPING_PATH, 'r') try: self.families = json.load(f) finally: if f is not None: f.close() self.families_to_import = []
def __init__(self, manifest_path, cert_path): rhnSQL.initDB() self.manifest = Manifest(manifest_path) # Satellite 5 certificate with open(cert_path, 'r') as f: self.sat5_cert = SatelliteCert() content = f.read() self.sat5_cert.load(content) # Channel families metadata with open(constants.CHANNEL_FAMILY_MAPPING_PATH, 'r') as f: self.families = json.load(f) with open(constants.PRODUCT_FAMILY_MAPPING_PATH, 'r') as f: self.products = json.load(f) self.families_to_import = []
def test_diff_empties(self): m1 = Manifest() m2 = ManifestFileParser().build([""]) m3 = Manifest_from_walking_unpacked_tar("empty.tar") self.assertEqual(list(Manifest.diff(m1, m2)), []) self.assertEqual(list(Manifest.diff(m1, m3)), []) self.assertEqual(list(Manifest.diff(m2, m1)), []) self.assertEqual(list(Manifest.diff(m2, m3)), []) self.assertEqual(list(Manifest.diff(m3, m1)), []) self.assertEqual(list(Manifest.diff(m3, m2)), []) self.assertEqual(m1, m2) self.assertEqual(m2, m3) self.assertEqual(m3, m1)
def build_from(package_path, src_folder, manifest_path=None, description_path=None, files=[], excludes=[], storage_factory=ZipFileStorage): if manifest_path: with file(manifest_path) as f: manifest = Manifest() manifest.load(f) else: node = resolve_path(src_folder, MANIFEST_PATH) if node: with node.open() as f: manifest = Manifest() manifest.load(f) else: logger.error('%s: not found' % MANIFEST_PATH) raise IOError('%s: not found' % MANIFEST_PATH) if description_path: with file(description_path) as f: description = Description.parse(f) else: node = resolve_path(src_folder, DESCRIPTION_PATH) if node: with node.open() as f: description = Description.parse(f) else: raise IOError('%s: not found' % DESCRIPTION_PATH) package_path = make_output_path(package_path, description) package_files = dict() from itertools import chain required_files = chain(manifest, description.required_files()) for path in required_files: node = resolve_path(src_folder, path) if node is None: raise IOError('%s: not found' % path) package_files[path] = node files = ((path, resolve_path(src_folder, path)) for path in files) files = expand_folders(files) files = exclude_files(excludes, files) package_files.update(files) return build(package_path, manifest, description, package_files, storage_factory=storage_factory)
def __init__(self, dest, branch, builddir, git_credential, force=False, jobs=1): """ Generate a new manifest according to the manifest sample: manifest.json _dest_manifest_file: the path of new manifest _branch: the branch name _force: overwrite the destination if it exists. _builddir: the destination for checked out repositories. _jobs: number of parallel jobs to run. The number is related to the compute architecture, multi-core processors... :return: None """ self._dest_manifest_file = dest self._branch = branch self._builddir = builddir self._force = force self._jobs = jobs self._manifest = Manifest.instance_of_sample() self.repo_operator = RepoOperator(git_credential) self.check_builddir()