def make_package(cls, rule): """@override""" name = rule['name'] workingdir = PkgUtils.create_working_dir(name) if 'packager' in rule and rule['packager']: data_path = os.path.join(workingdir, name) # create the data_path directory subprocess.check_call('mkdir -p %s' % data_path, shell=True) # run the packager command to setup the package cmd = '%s %s' % (rule['packager'], data_path) ret = subprocess.call(cmd, shell=True) if not ret == 0: raise Error(TermColor.ColorStr( 'failed to execute command %s for rule %s' % (cmd, name), 'RED')) elif 'data' in rule and rule['data']: data_path = rule['data'] # ensure the file exists if not os.path.exists(data_path): raise Error(TermColor.ColorStr( 'data path does NOT exist: %s' % data_path, 'RED')) # cannot use -L or it will complain about non-referent symlinks subprocess.check_call(['/usr/bin/rsync', '-a', data_path, workingdir]) else: raise Error(TermColor.ColorStr( 'data OR packager field must be specified for rule %s' % name, 'RED')) # copy the control script. assume in the directory if not specified. # otherwise, assume rule['control'] contains the control script location control_path = rule['control'] if 'control' in rule \ else os.path.join(data_path, Flags.ARGS.pkg_sys_ctrl_name) if not os.path.exists(control_path): raise Error(TermColor.ColorStr( 'control script %s does not exist for rule %s' % \ (control_path, name), 'RED')) ctrl_dest_path = os.path.join(workingdir, Flags.ARGS.pkg_sys_ctrl_name) shutil.copy2(control_path, ctrl_dest_path) os.chmod(ctrl_dest_path, 0o754) # copy the shared files CopyShared.copy(workingdir) # create the yaml ydata = {} ydata['name'] = name # the data directory in the package ydata['subdir'] = FileUtils.UnixBasename(data_path) yaml_path = os.path.join(workingdir, Flags.ARGS.pkg_sys_ctrl_yaml_name) with open(yaml_path, 'w') as f: yaml.dump(ydata, f, default_flow_style=False) # import the package packages = Packages(host=Flags.ARGS.pkg_host, user=Flags.ARGS.pkg_user, root=Flags.ARGS.pkg_repo) if Flags.ARGS.pkg_version_prefix: return name, packages.f_import(workingdir, name, Flags.ARGS.pkg_version_prefix) else: return name, packages.f_import(workingdir, name)
def make_packages(cls, packages): """builds a sets of package rules and returns the dict from package name to package version Args: packages (list) - list of package rules to build Returns: dict from package name to package version Raises: Error - one or more of the packages could not be created """ pkg_prefix = Packages.get_valid_package_prefix( GitUtil.get_latest_commit()[0:6]) tmp_fn = '%d_%d' % (int(time.time()), random.randint(1, 1e5)) cmd = 'flash --pkg_version_prefix=%s --pkg_version_path=%s run %s' % \ (pkg_prefix, tmp_fn, ' '.join(packages)) process = subprocess.Popen(['/bin/bash', '-c', cmd]) process.wait() if not process.returncode == 0: if os.path.exists(tmp_fn): # delete the temporary package file if it exists os.remove(tmp_fn) raise Error('one of more of the packages cannot be created') # construct the release yaml from the temp package list packages = {} with open(tmp_fn, 'r') as f: packages = yaml.safe_load(f) os.remove(tmp_fn) # delete the temporary package file return packages
def make_packages(cls, packages): """builds a sets of package rules and returns the dict from package name to package version Args: packages (list) - list of package rules to build Returns: dict from package name to package version Raises: Error - one or more of the packages could not be created """ pkg_prefix = Packages.get_valid_package_prefix( GitUtil.get_latest_commit()[0:6]) tmp_fn = '%d_%d' % (int(time.time()), random.randint(1, 1e5)) cmd = 'flash --pkg_version_prefix=%s --pkg_version_path=%s run %s' % \ (pkg_prefix, tmp_fn, ' '.join(packages)) process = subprocess.Popen(['/bin/bash', '-c', cmd]) process.wait() if not process.returncode == 0: if os.path.exists(tmp_fn): # delete the temporary package file if it exists os.remove(tmp_fn) raise Error('one of more of the packages cannot be created') # construct the release yaml from the temp package list packages = {} with open(tmp_fn, 'r') as f: packages = yaml.safe_load(f) os.remove(tmp_fn) # delete the temporary package file return packages
def make_package(cls, rule): """@override""" name = rule['name'] if not 'rule' in rule or not rule['rule']: err = 'no rule field for %s' % name TermColor.Error(err) raise Error(err) if not 'ctrl' in rule or not rule['ctrl']: err = 'no yaml ctrl field for %s' % name TermColor.Error(err) raise Error(err) subprocess.check_call('flash build %s' % rule['rule'], shell=True) # clean up the old package and create the working directory workingdir = PkgUtils.create_working_dir(name) # collect the files files = {} # the binary path files[os.path.join(FileUtils.GetBinDir(), rule['rule'])] = \ os.path.basename(rule['rule']) # the loop script files[os.path.join(FileUtils.GetSrcRoot(), Flags.ARGS.loop_script_path)] = 'loop' # the control script files[os.path.join(FileUtils.GetSrcRoot(), Flags.ARGS.pkg_bin_ctrl_path)] = 'control' # the yaml file files[os.path.join(FileUtils.GetSrcRoot(), rule['ctrl'])] = 'control.yaml' # copy the files for src, dest in files.items(): shutil.copy2(src, os.path.join(workingdir, dest)) # copy the shared files CopyShared.copy(workingdir) # import the package packages = Packages(host=Flags.ARGS.pkg_host, user=Flags.ARGS.pkg_user, root=Flags.ARGS.pkg_repo) # import the package if Flags.ARGS.pkg_version_prefix: return name, packages.f_import(workingdir, name, Flags.ARGS.pkg_version_prefix) else: return name, packages.f_import(workingdir, name)
def make_package(cls, rule): """@override""" name = rule['name'] # ensure a data or file is specified if not 'data' in rule or \ not rule['data'] or \ not isinstance(rule['data'], str): err = 'invalid data field for rule %s' % name TermColor.Error(err) raise Error(err) data_path = rule['data'] # ensure the file exists if not os.path.exists(data_path): err = 'data path does NOT exist: %s' % data_path TermColor.Error(err) raise Error(err) packages = Packages(host=Flags.ARGS.pkg_host, user=Flags.ARGS.pkg_user, root=Flags.ARGS.pkg_repo) workingdir = PkgUtils.create_working_dir(name) # copy the package to the working directory AND create the working directory # AND exclude specific invalid files subprocess.check_call([ '/usr/bin/rsync', '-aHL', '--exclude', '*.pyc', data_path, workingdir ]) # copy the control script and the control utility script ctrl_path = os.path.join(workingdir, Flags.ARGS.pkg_ctrl_name) shutil.copy2(Flags.ARGS.pkg_ctrl_path, ctrl_path) os.chmod(ctrl_path, 0o754) # the shared control utility scripts CopyShared.copy(workingdir) # create the yaml definition for the control script cls._write_control_yaml(name, data_path, workingdir) # import the package if Flags.ARGS.pkg_version_prefix: return name, packages.f_import(workingdir, name, Flags.ARGS.pkg_version_prefix) else: return name, packages.f_import(workingdir, name)
def make_package(cls, rule): """@override""" name = rule['name'] # ensure a data or file is specified if not 'data' in rule or \ not rule['data'] or \ not isinstance(rule['data'], basestring): err = 'invalid data field for rule %s' % name TermColor.Error(err) raise Error(err) data_path = rule['data'] # ensure the file exists if not os.path.exists(data_path): err = 'data path does NOT exist: %s' % data_path TermColor.Error(err) raise Error(err) packages = Packages(host=Flags.ARGS.pkg_host, user=Flags.ARGS.pkg_user, root=Flags.ARGS.pkg_repo) workingdir = PkgUtils.create_working_dir(name) # copy the package to the working directory AND create the working directory # AND exclude specific invalid files subprocess.check_call(['/usr/bin/rsync', '-aHL', '--exclude', '*.pyc', data_path, workingdir]) # copy the control script and the control utility script ctrl_path = os.path.join(workingdir, Flags.ARGS.pkg_ctrl_name) shutil.copy2(Flags.ARGS.pkg_ctrl_path, ctrl_path) os.chmod(ctrl_path, 0754) # the shared control utility scripts CopyShared.copy(workingdir) # create the yaml definition for the control script cls._write_control_yaml(name, data_path, workingdir) # import the package if Flags.ARGS.pkg_version_prefix: return name, packages.f_import(workingdir, name, Flags.ARGS.pkg_version_prefix) else: return name, packages.f_import(workingdir, name)
def run(self): """build and create the package Returns: tuple(string, string) the package name followed by the package version name """ if not Flags.ARGS.pkg_version_prefix: Flags.ARGS.pkg_version_prefix = Packages.get_valid_package_prefix( GitUtil.get_latest_commit()[0:6]) version = self._packager.make_package(self._rule) if Flags.ARGS.pkg_version_path: with open(Flags.ARGS.pkg_version_path, 'a') as f: config = {} config[version[0]] = version[1] f.write('%s' % yaml.safe_dump(config, default_flow_style=False)) return version
def make_package(cls, rule): """@override""" name = rule['name'] workingdir = PkgUtils.create_working_dir(name) if 'packager' in rule and rule['packager']: data_path = os.path.join(workingdir, name) # create the data_path directory subprocess.check_call('mkdir -p %s' % data_path, shell=True) # run the packager command to setup the package cmd = '%s %s' % (rule['packager'], data_path) ret = subprocess.call(cmd, shell=True) if not ret == 0: raise Error( TermColor.ColorStr( 'failed to execute command %s for rule %s' % (cmd, name), 'RED')) elif 'data' in rule and rule['data']: data_path = rule['data'] # ensure the file exists if not os.path.exists(data_path): raise Error( TermColor.ColorStr( 'data path does NOT exist: %s' % data_path, 'RED')) # cannot use -L or it will complain about non-referent symlinks subprocess.check_call( ['/usr/bin/rsync', '-a', data_path, workingdir]) else: raise Error( TermColor.ColorStr( 'data OR packager field must be specified for rule %s' % name, 'RED')) # copy the control script. assume in the directory if not specified. # otherwise, assume rule['control'] contains the control script location control_path = rule['control'] if 'control' in rule \ else os.path.join(data_path, Flags.ARGS.pkg_sys_ctrl_name) if not os.path.exists(control_path): raise Error(TermColor.ColorStr( 'control script %s does not exist for rule %s' % \ (control_path, name), 'RED')) ctrl_dest_path = os.path.join(workingdir, Flags.ARGS.pkg_sys_ctrl_name) shutil.copy2(control_path, ctrl_dest_path) os.chmod(ctrl_dest_path, 0o754) # copy the shared files CopyShared.copy(workingdir) # create the yaml ydata = {} ydata['name'] = name # the data directory in the package ydata['subdir'] = FileUtils.UnixBasename(data_path) yaml_path = os.path.join(workingdir, Flags.ARGS.pkg_sys_ctrl_yaml_name) with open(yaml_path, 'w') as f: yaml.dump(ydata, f, default_flow_style=False) # import the package packages = Packages(host=Flags.ARGS.pkg_host, user=Flags.ARGS.pkg_user, root=Flags.ARGS.pkg_repo) if Flags.ARGS.pkg_version_prefix: return name, packages.f_import(workingdir, name, Flags.ARGS.pkg_version_prefix) else: return name, packages.f_import(workingdir, name)