示例#1
0
  def WorkHorse(cls, rules):
    """Runs the workhorse for the command.

    Args:
      rules: list: List of rules to be handled.

    Return:
      (list, list): Returns a tuple of list in the form
          (successful_rules, failed_rules) specifying rules that succeeded and
          ones that failed.
    """
    (successful_build, failed_build) = Builder.WorkHorse(rules)

    # All our binaries assume they will be run from the source root.
    os.chdir(FileUtils.GetSrcRoot())

    pipe_output = len(successful_build) > 1
    args = zip(itertools.repeat(cls), itertools.repeat('_RunSingeRule'),
                          successful_build, itertools.repeat(pipe_output))
    rule_res = ExecUtils.ExecuteParallel(args, Flags.ARGS.pool_size)
    successful_run = []; failed_run = []
    for (res, rule) in rule_res:
      if res == 1:
        successful_run += [rule]
      elif res == -1:
        failed_run += [rule]

    return (successful_run, failed_build + failed_run)
示例#2
0
    def WriteMakefile(cls, spec, makefile):
        src_root = FileUtils.GetSrcRoot()
        name = spec['name']
        with open(makefile, 'w') as f:
            interface_files = {
                x
                for x in spec.get('src', set()) if x.endswith('.i')
            }
            for interface_file in interface_files:
                wrapper_file = cls.__GetWrapperFileName(interface_file)
                gen_out_dir = cls.__GetGenModuleDir(interface_file)
                target_lib = cls.__GetLibFileName(interface_file, name)
                target_lib_link = os.path.join(
                    gen_out_dir,
                    '_%s.so' % cls.__GetGenModuleName(interface_file))
                #target_lib = target_lib.replace('.so', '_swig.so') # TODO(KK) fix this hack
                assert gen_out_dir.startswith(
                    src_root)  # otherwise we will loop forever
                print("""
%(wrapper_file)s: %(interface_file)s
\t@mkdir -p %(gen_out_dir)s
\t@ln -s -f %(target_lib)s %(target_lib_link)s
\t@swig -c++ -python -o $@ -outdir %(gen_out_dir)s $<
\t@p=%(gen_out_dir)s; while [[ $$p != $(SRCROOT) ]]; do\
  touch $$p/__init__.py; \
  p=`dirname $$p`; \
done
""" % locals(),
                      file=f)
示例#3
0
def get_username_and_password(service,
                              section=None,
                              dir='static_data/push/auto/credentials'):
    """
  Parse config file named `service`.cfg in dir.

  Args:
    service (string): Name of service to get credentials for. This should match
      a file in dir (with .cfg extension)
    section (string): section of config file to use. Defaults to same as
      `service`
    dir (stirng): directory (relative to source root) to look in for config file.

  Returns:
    (username, password) for the requested service

  Raises:
    ValueError if file does not exist
    ConfigParser.NoSectionError if section does not exist
    ConfigParser.NoOptionError if username or password is
      not in the requested section
  """
    config_parser = ConfigParser()
    files_read = config_parser.read(
        os.path.join(FileUtils.GetSrcRoot(), dir, '%s.cfg' % service))
    if not files_read:
        raise ValueError('No config file found for %s in %s' % (service, dir))
    username = config_parser.get(section or service, 'username')
    password = config_parser.get(section or service, 'password')
    return username, password
示例#4
0
def __get_cookie_salt():
    CONFIG_FILE = 'static_data/push/auto/credentials/web.cfg'
    config_parser = ConfigParser()
    files_read = config_parser.read(
        os.path.join(FileUtils.GetSrcRoot(), CONFIG_FILE))
    if not files_read:
        raise ValueError('config file not found: %s' % CONFIG_FILE)
    return config_parser.get('web', 'user_cookie_salt')
示例#5
0
 def make_package(cls, rule):
   """@override"""
   name = rule['name']
   if not 'rule' in rule or not rule['rule']:
     err = 'no rule field for %s' % name
     TermColor.Error(err)
     raise Error(err)
   if not 'ctrl' in rule or not rule['ctrl']:
     err = 'no yaml ctrl field for %s' % name
     TermColor.Error(err)
     raise Error(err)
   subprocess.check_call('flash build %s' % rule['rule'], shell=True)
   # clean up the old package and create the working directory
   workingdir = PkgUtils.create_working_dir(name)
   # collect the files
   files = {}
   # the binary path
   files[os.path.join(FileUtils.GetBinDir(), rule['rule'])] = \
     os.path.basename(rule['rule'])
   # the loop script
   files[os.path.join(FileUtils.GetSrcRoot(),
                      Flags.ARGS.loop_script_path)] = 'loop'
   # the control script
   files[os.path.join(FileUtils.GetSrcRoot(),
                      Flags.ARGS.pkg_bin_ctrl_path)] = 'control'
   # the yaml file
   files[os.path.join(FileUtils.GetSrcRoot(),
                      rule['ctrl'])] = 'control.yaml'
   # copy the files
   for src, dest in files.items():
     shutil.copy2(src, os.path.join(workingdir, dest))
   # copy the shared files
   CopyShared.copy(workingdir)
   # import the package
   packages = Packages(host=Flags.ARGS.pkg_host,
                       user=Flags.ARGS.pkg_user,
                       root=Flags.ARGS.pkg_repo)
   # import the package
   if Flags.ARGS.pkg_version_prefix:
     return name, packages.f_import(workingdir, name,
                                    Flags.ARGS.pkg_version_prefix)
   else:
     return name, packages.f_import(workingdir, name)
示例#6
0
    def __GetGenModuleDir(cls, src):
        """Returns the python module name for the src.

    Args:
      src: string: The interface file for which the output is generated.

    Return:
      string: The directory name where the generated module will be outputted.
    """
        return os.path.dirname(
            src.replace(FileUtils.GetSrcRoot(), cls.GetSwigOutDir()))
示例#7
0
    def RuleRelativeName(cls, rule):
        """Returns the relative name for the rule w.r.t the src dir.
    Args:
      rule: string: The rule for which the relative name is required.

    Return:
      string: The relative name of the rule.
    """
        if not rule: return None
        return os.path.relpath(cls.RuleNormalizedName(rule),
                               FileUtils.GetSrcRoot())
示例#8
0
  def __GetOutFileName(cls, src, out_suffix):
    """Returns the output file name for the src.

    Args:
      src: string: The src file for which the output is generated.
      out_suffix: string: The suffix used to replace .proto in the src.

    Return:
      string: The output file name.
    """
    return (src.replace(FileUtils.GetSrcRoot(), cls.GetProtoBufOutDir())
            .replace('.proto', out_suffix))
示例#9
0
    def RuleNormalizedName(cls, rule):
        """Returns the normalized name for the rule.
    Args:
      rule: string: The rule to normalize.

    Return:
      string: The Normalized name of the rule.
    """
        if rule.find(FileUtils.GetSrcRoot()) == 0:
            return os.path.normpath(rule)

        rules_file = cls.GetRulesFileForRule(rule)
        if rules_file:
            return os.path.join(os.path.dirname(rules_file),
                                os.path.basename(rule))

        # This does not have a rules file. Generally this happens for src files.
        abs_path = FileUtils.GetAbsPathForFile(rule)
        if abs_path: return abs_path

        return rule
示例#10
0
    def GenMainMakeFile(self):
        """Generates the main make file."""
        f = open(self.GetMakeFileName(), 'w')
        f.write('SRCROOT = %s\n' % FileUtils.GetSrcRoot())
        f.write('BINDIR = %s\n' % FileUtils.GetBinDir())
        f.write('AUTO_MAKEFILE_CC = %s\n' % self.GetAutoMakeFileName('cc'))
        f.write('AUTO_MAKEFILE_JS = %s\n' % self.GetAutoMakeFileName('js'))
        f.write('AUTO_MAKEFILE_NG = %s\n' % self.GetAutoMakeFileName('ng'))
        f.write('AUTO_MAKEFILE_NGE2E = %s\n' %
                self.GetAutoMakeFileName('nge2e'))
        f.write('AUTO_MAKEFILE_PKG = %s\n' % self.GetAutoMakeFileName('pkg'))
        f.write('AUTO_MAKEFILE_PKG_BIN = %s\n' %
                self.GetAutoMakeFileName('pkg_bin'))
        f.write('AUTO_MAKEFILE_PKG_SYS = %s\n' %
                self.GetAutoMakeFileName('pkg_sys'))
        f.write('AUTO_MAKEFILE_PY = %s\n' % self.GetAutoMakeFileName('py'))
        f.write('AUTO_MAKEFILE_SWIG = %s\n' % self.GetAutoMakeFileName('swig'))
        f.write('PROTOBUFDIR = %s\n' % ProtoRules.GetProtoBufBaseDir())
        f.write('PROTOBUFOUTDIR = %s\n' % ProtoRules.GetProtoBufOutDir())
        f.write('SWIGBUFOUTDIR = %s\n' % SwigRules.GetSwigOutDir())
        f.write('\n')

        makefile_template = os.path.join(
            os.path.dirname(
                os.path.abspath(inspect.getfile(inspect.currentframe()))),
            self.GetMakeFileTemplate())

        f.write(
            '###############################################################\n'
        )
        f.write('#Template from: %s \n' % makefile_template)
        f.write(
            '###############################################################\n'
        )
        f.write(open(makefile_template).read())
        f.write(
            '\n###############################################################\n'
        )
示例#11
0
    def GetAllENVVars(self):
        """Returns all the relevant env vars for the given node.

    Returns:
      dict {string, string}: The dictionary of IDS to values.
    """
        # ADD all other relevant dirs.
        res = {}
        res['PIPELINE_ID'] = self.pipeline_id()
        res['PIPELINE_DATE'] = self.pipeline_date()
        res['PIPELINE_SRC_ROOT'] = FileUtils.GetSrcRoot()
        res['PIPELINE_BASE_DIR'] = self.pipeline_base_dir()
        res['PIPELINE_UTILS_DIR'] = self.pipeline_utils_dir()

        if self.pipeline_bin_dir():
            res['PIPELINE_BIN_DIR'] = self.pipeline_bin_dir()
        if self.pipeline_output_dir():
            res['PIPELINE_OUT_ROOT'] = self.pipeline_output_dir()
        if self.pipeline_log_dir():
            res['PIPELINE_LOG_DIR'] = self.pipeline_log_dir()
        if self.pipeline_publish_dir():
            res['PIPELINE_PUBLISH_DIR'] = self.pipeline_publish_dir()
        return res
示例#12
0
import json
import os

from pylib.file.file_utils import FileUtils

from web.server.environment import IS_PRODUCTION

DEFAULT_INSTANCE_CONFIG_PATH = os.path.join(
    os.getenv('ZENYSIS_SRC_ROOT', FileUtils.GetSrcRoot()),
    'instance_config.json')


# Verbose name is preferred in this case.
# pylint:disable=C0103
def load_instance_configuration_from_file(
        instance_config_path=DEFAULT_INSTANCE_CONFIG_PATH):
    # Instance config is global and loaded only once. It lives at the root of
    # the source tree.
    if instance_config_path and os.path.isfile(instance_config_path):
        with open(instance_config_path, 'r') as f:
            return json.load(f)

    error_msg = 'Instance config file does not exist: %s' % DEFAULT_INSTANCE_CONFIG_PATH
    if IS_PRODUCTION:
        raise IOError(error_msg)

    print(error_msg)
    return {}
示例#13
0
 def GetProtoBufBaseDir(cls):
   """Returns the protobuf base dir."""
   return os.path.join(FileUtils.GetSrcRoot(),
                        'third_party/protocol_buffer/latest')
示例#14
0
 def GetPyInstaller(cls):
     """Returns the pyinstaller."""
     return os.path.join(FileUtils.GetSrcRoot(),
                         'third_party/pyinstaller/latest/pyinstaller.py')
示例#15
0
  def WorkHorse(cls, tasks):
    """Runs the workhorse for the command.

    Args:
      tasks: OrderedDict {int, set(string)}: Dict from priority to set of tasks to execute at the
          priority. Note: the dict is ordered by priority.

    Return:
      (list, list): Returns a tuple of list in the form
          (successful_tasks, failed_tasks) specifying tasks that succeeded and
          ones that failed.
    """
    # All our binaries assume they will be run from the source root.
    start = time.time()

    os.chdir(FileUtils.GetSrcRoot())
    cls._CreateDirsForTasks(tasks)

    successful_run = []; failed_run = []
    aborted_task = None

    # NOTE(stephen): Storing task dir status and task out dir status separately since
    # pipelines do not always have an out dir defined.
    dirs_status = {}
    out_dirs_status = {}
    for set_tasks in tasks.values():
      if aborted_task:
        failed_run += set_tasks
        continue

      tasks_to_run = []
      for task in set_tasks:
        task_options = cls.__GetTaskOptions(task)
        # Check if this task requires all previous tasks in the same directory to be
        # successful.
        if task_options[Runner.TASK_OPTIONS['REQUIRE_DIR_SUCCESS']]:
          task_dir = PipelineUtils.TaskDirName(task)
          cur_dir_status = dirs_status.get(task_dir)
          # If any previous tasks have been run in this directory, check to ensure all
          # of them were successful.
          if cur_dir_status and cur_dir_status != Runner.EXITCODE['SUCCESS']:
            failed_run += [task]
            task_display_name = PipelineUtils.TaskDisplayName(task)
            TermColor.Info('Skipped   %s' % task_display_name)
            TermColor.Failure(
              'Skipped Task: %s due to earlier failures in task dir' % task_display_name
            )
            continue

        tasks_to_run.append(task)

      # It is possible for all steps at this priority level to be skipped due to the
      # task options selected.
      if set_tasks and not tasks_to_run:
        continue

      # Run all the tasks at the same priority in parallel.
      args = zip(itertools.repeat(cls), itertools.repeat('_RunSingeTask'),
                            tasks_to_run)
      task_res = ExecUtils.ExecuteParallel(args, Flags.ARGS.pool_size)
      # task_res = []
      # for task in tasks_to_run: task_res += [cls._RunSingeTask(task)]
      if not task_res:
        TermColor.Error('Could not process: %s' % tasks_to_run)
        failed_run += tasks_to_run
        continue
      for (res, task) in task_res:
        if res == Runner.EXITCODE['SUCCESS']:
          successful_run += [task]
        elif res == Runner.EXITCODE['FAILURE']:
          failed_run += [task]
        elif res == Runner.EXITCODE['ALLOW_FAIL']:
          failed_run += [task]
        elif res == Runner.EXITCODE['ABORT_FAIL']:
          failed_run += [task]
          aborted_task = task
        else:
          TermColor.Fatal('Invalid return %d code for %s' % (res, task))

        # Update the current status of all tasks in the same directory.
        task_dir = PipelineUtils.TaskDirName(task)
        dirs_status[task_dir] = max(
          dirs_status.get(task_dir, Runner.EXITCODE['_LOWEST']), res,
        )

        # Update the out dir status.
        out_dir = PipelineUtils.GetOutDirForTask(task)
        if out_dir:
          out_dirs_status[out_dir] = max(
            out_dirs_status.get(out_dir, Runner.EXITCODE['_LOWEST']), res,
          )

    # Write the status files to the dirs.
    cls._WriteOutDirsStatus(out_dirs_status)

    # Send the final status mail.
    time_taken = time.time() - start
    cls._SendFinalStatusMail(successful_run, failed_run, aborted_task, time_taken)

    if aborted_task:
      TermColor.Failure('Aborted by task: %s' % aborted_task)

    return (successful_run, failed_run)
示例#16
0
    def _RunSingeRule(cls, rule):
        """Runs a Single Rule.

    Args:
      rule: string: The rule to run.

    Return:
      (int, string): Returns a tuple of the result status and the rule.
          The status is '1' for success, '0' for 'ignore', '-1' for fail.
    """
        TermColor.Info('Generating dependencies for %s' %
                       Utils.RuleDisplayName(rule))
        start = time.time()

        gr = digraph.digraph()
        gr.add_node(rule)

        nodes = [rule]
        while len(nodes):
            node = nodes.pop(0)
            # The rule has already been processed. We assume if the node has outgoing
            # edges, the we already processed it.
            if gr.node_order(node) > 0: continue

            # Add the dependencies of the rule to the graph.
            if not Rules.LoadRule(node) or not Rules.GetRule(node):
                TermColor.Warning(
                    'Could not load dependency %s for target %s ' %
                    (Utils.RuleDisplayName(node), Utils.RuleDisplayName(rule)))
                return (-1, rule)

            node_data = Rules.GetRule(node)
            for dep in node_data.get('dep', set()):
                nodes += [dep]
                # Add the dep to the graph.
                if not gr.has_node(dep): gr.add_node(dep)
                if not gr.has_edge([node, dep]): gr.add_edge([node, dep])

        # Now we have the graph, lets render it.
        try:
            dt = dot.write(gr)
            dt = dt.replace('"%s";' % rule, ('"%s" [style=filled];' % rule), 1)
            dt = dt.replace(FileUtils.GetSrcRoot(), '')
            depgrah_file_name = cls.__GetDepGraphFileNameForRule(rule)
            if Flags.ARGS.mode == 'gv':
                gvv = gv.readstring(dt)
                gv.layout(gvv, 'dot')
                gv.render(gvv, 'pdf', depgrah_file_name)
                if not Flags.ARGS.quiet:
                    subprocess.call('gv %s &' % depgrah_file_name, shell=True)
            elif Flags.ARGS.mode == 'text':
                FileUtils.CreateFileWithData(depgrah_file_name, dt)

            TermColor.Info(
                'Generated dependency graph (%d nodes) for %s at %s \tTook %.2fs'
                % (len(gr.nodes()), Utils.RuleDisplayName(rule),
                   depgrah_file_name, (time.time() - start)))
            return (1, rule)
        except Exception as e:
            TermColor.Error('Failed to render %s. Error: %s' %
                            (Utils.RuleDisplayName(rule), e))
            if type(e) == KeyboardInterrupt: raise e

        return (-1, rule)