Example #1
0
 def refactoring_error(node):
   util.con_err(util.ConsoleRed, 'New input introduced: ',
                util.ConsoleBlue, node.path + '\n',
                util.ConsoleRed, 'Command: ',
                util.ConsoleBlue, cmd_entry.format(),
                util.ConsoleNormal)
   raise Exception('Refactoring error: new input introduced')
Example #2
0
  def __init__(self, sourcePath, buildPath, originalCwd, options, args):
    super(Generator, self).__init__(sourcePath, buildPath, originalCwd, options, args)
    self.compiler = None
    self.vs_version = None
    self.files_ = {}
    self.projects_ = set()

    if self.options.vs_version in SupportedVersions:
      self.vs_version = int(self.options.vs_version)
    else:
      if self.options.vs_version not in YearMap:
        util.con_err(
          util.ConsoleRed,
          'Unsupported Visual Studio version: {0}'.format(self.options.vs_version),
          util.ConsoleNormal
        )
        raise Exception('Unsupported Visual Studio version: {0}'.format(self.options.vs_version)) 
      self.vs_version = YearMap[self.options.vs_version]

    self.cacheFile = os.path.join(self.buildPath, '.cache')
    try:
      with open(self.cacheFile, 'rb') as fp:
        self.vars_ = util.pickle.load(fp)
    except:
      self.vars_ = {}

    if 'uuids' not in self.vars_:
      self.vars_['uuids'] = {}
Example #3
0
    def __init__(self, cm):
        super(Generator, self).__init__(cm)
        self.compiler = None
        self.vs_version = None
        self.files_ = {}
        self.projects_ = set()

        if self.cm.options.vs_version in SupportedVersions:
            self.vs_version = int(self.cm.options.vs_version)
        else:
            if self.cm.options.vs_version not in YearMap:
                util.con_err(
                    util.ConsoleRed,
                    'Unsupported Visual Studio version: {0}'.format(
                        self.cm.options.vs_version), util.ConsoleNormal)
                raise Exception(
                    'Unsupported Visual Studio version: {0}'.format(
                        self.cm.options.vs_version))
            self.vs_version = YearMap[self.cm.options.vs_version]

        self.cacheFile = os.path.join(self.cm.buildPath, '.cache')
        try:
            with open(self.cacheFile, 'rb') as fp:
                self.vars_ = util.pickle.load(fp)
        except:
            self.vars_ = {}

        if 'uuids' not in self.vars_:
            self.vars_['uuids'] = {}

        self.target_platform = 'windows'
Example #4
0
    def drop_folder(self, entry):
        assert entry.type in [
            nodetypes.Mkdir, nodetypes.Output, nodetypes.SharedOutput
        ]
        assert not os.path.isabs(entry.path)

        if os.path.exists(entry.path):
            util.con_out(util.ConsoleHeader, 'Removing old folder: ',
                         util.ConsoleBlue, '{0}'.format(entry.path),
                         util.ConsoleNormal)

        try:
            os.rmdir(entry.path)
        except OSError as exn:
            if exn.errno != errno.ENOENT:
                util.con_err(util.ConsoleRed, 'Could not remove folder: ',
                             util.ConsoleBlue, '{0}'.format(entry.path),
                             util.ConsoleNormal, '\n', util.ConsoleRed,
                             '{0}'.format(exn), util.ConsoleNormal)
                raise

        cursor = self.cn.execute("select count(*) from nodes where folder = ?",
                                 (entry.id, ))
        amount = cursor.fetchone()[0]
        if amount > 0:
            util.con_err(
                util.ConsoleRed, 'Folder id ', util.ConsoleBlue,
                '{0} '.format(entry.id), util.ConsoleRed,
                'is about to be deleted, but is still in use as a folder!',
                util.ConsoleNormal)
            raise Exception('folder still in use!')

        # If the node transitioned to an entry, don't delete its node.
        if entry.type == nodetypes.Mkdir:
            self.drop_entry(entry)
Example #5
0
    def addShellCommand(self,
                        context,
                        inputs,
                        argv,
                        outputs,
                        folder=-1,
                        dep_type=None,
                        weak_inputs=None,
                        shared_outputs=None):
        if folder is -1:
            folder = context.localFolder

        if dep_type is None:
            node_type = nodetypes.Command
            data = argv
        else:
            node_type = nodetypes.Cxx
            if dep_type not in ['gcc', 'msvc', 'sun']:
                util.con_err(util.ConsoleRed, 'Invalid dependency spew type: ',
                             util.ConsoleBlue, dep_type, util.ConsoleNormal)
                raise Exception('Invalid dependency spew type')
            data = {
                'type': dep_type,
                'argv': argv,
            }

        return self.addCommand(context=context,
                               node_type=node_type,
                               folder=folder,
                               data=data,
                               inputs=inputs,
                               outputs=outputs,
                               weak_inputs=weak_inputs,
                               shared_outputs=shared_outputs)
Example #6
0
    def update(self):
        for entry in self.graph.create:
            if entry.type == nodetypes.Mkdir:
                util.con_out(util.ConsoleBlue, '[create] ', util.ConsoleGreen,
                             entry.format(), util.ConsoleNormal)
                # The path might already exist because we mkdir -p and don't bother
                # ordering.
                if not os.path.exists(entry.path):
                    os.makedirs(entry.path)
            else:
                raise Exception('Unknown entry type: {0}'.format(entry.type))
        if not len(self.leafs):
            return True

        tm = TaskMasterParent(self.cx, self, self.leafs, self.max_parallel)
        success = tm.run()
        self.commit()

        if success and len(self.commands) != self.num_completed_tasks:
            util.con_err(
                util.ConsoleRed,
                'Build marked as completed, but some commands were not executed?!\n',
                'Commands:', util.ConsoleNormal)
            for task in self.commands:
                if not task:
                    continue
                util.con_err(util.ConsoleBlue, ' -> ', util.ConsoleRed,
                             '{0}'.format(task.entry.format()),
                             util.ConsoleNormal)

        return success
Example #7
0
 def ensureUnique(self, path):
     if path in self.files_:
         entry = self.files_[path]
         util.con_err(
             util.ConsoleRed, "Path {0} already exists as: {1}".format(path, entry.kind), util.ConsoleNormal
         )
         raise Exception("Path {0} already exists as: {1}".format(path, entry.kind))
Example #8
0
 def refactoring_error(node):
   util.con_err(util.ConsoleRed, 'New input introduced: ',
                util.ConsoleBlue, node.path + '\n',
                util.ConsoleRed, 'Command: ',
                util.ConsoleBlue, cmd_entry.format(),
                util.ConsoleNormal)
   raise Exception('Refactoring error: new input introduced')
Example #9
0
  def parseInput(self, context, source):
    if util.IsString(source):
      if not os.path.isabs(source):
        source = os.path.join(context.currentSourcePath, source)
      source = os.path.normpath(source)

      entry = self.db.query_path(source)
      if not entry:
        return self.db.add_source(source)

      # Otherwise, we have to valid the node.
      source = entry

    if source.type == nodetypes.Source or source.type == nodetypes.Output:
      return source

    if source.type == nodetypes.Mkdir:
      if source not in self.bad_outputs_:
        util.con_err(util.ConsoleRed, 'Tried to use folder path ',
                     util.ConsoleBlue, source.path,
                     util.ConsoleRed, ' as a file path.',
                     util.ConsoleNormal)
        raise Exception('Tried to use folder path as a file path')

    util.con_err(util.ConsoleRed, 'Tried to use incompatible node "',
                 util.ConsoleBlue, source.format(),
                 util.ConsoleRed, '" as a file path.',
                 util.ConsoleNormal)
    raise Exception('Tried to use non-file node as a file path')
Example #10
0
    def updateGraph(self, task_id, updates, message):
        if not self.commands[task_id]:
            util.con_err(
                util.ConsoleRed,
                'Received update for task_id {0} that was already completed!\n'
                .format(task_id), util.ConsoleBlue, 'Message details:\n',
                util.ConsoleNormal, '{0}'.format(message))
            return False

        node = self.commands[task_id]
        self.commands[task_id] = None

        if 'deps' in message:
            if not self.mergeDependencies(node, message['deps']):
                return False

        for incoming in self.cx.db.query_strong_inputs(node.entry):
            self.lazyUpdateEntry(incoming)
        for incoming in self.cx.db.query_dynamic_inputs(node.entry):
            self.lazyUpdateEntry(incoming)

        for path, stamp in updates:
            entry = self.cx.db.query_path(path)
            self.cx.db.unmark_dirty(entry, stamp)
        self.cx.db.unmark_dirty(node.entry)

        self.num_completed_tasks += 1
        return True
Example #11
0
  def updateGraph(self, task_id, updates, message):
    if not self.commands[task_id]:
      util.con_err(
        util.ConsoleRed,
        'Received update for task_id {0} that was already completed!\n'.format(task_id),
        util.ConsoleBlue,
        'Message details:\n',
        util.ConsoleNormal,
        '{0}'.format(message)
      )
      return False

    node = self.commands[task_id]
    self.commands[task_id] = None

    if 'deps' in message:
      if not self.mergeDependencies(node, message['deps']):
        return False

    if node.entry.dirty != nodetypes.ALWAYS_DIRTY:
      for incoming in self.cx.db.query_strong_inputs(node.entry):
        self.lazyUpdateEntry(incoming)
      for incoming in self.cx.db.query_dynamic_inputs(node.entry):
        self.lazyUpdateEntry(incoming)

      for path, stamp in updates:
        entry = self.cx.db.query_path(path)
        self.cx.db.unmark_dirty(entry, stamp)
      self.cx.db.unmark_dirty(node.entry)

    self.num_completed_tasks += 1
    return True
Example #12
0
    def ensureValidDependency(self, source, target):
        # Build the set of nodes that are valid connectors for the dependency. For
        # cxx and cpa commands, the exact dependencies are determined by AMB2, so
        # we don't allow the user to attach arbitrary strong dependencies; they
        # are always weak.
        roots = set()

        inputs = self.cx.db.query_weak_inputs(target)
        if not nodetypes.HasAutoDependencies(target.type):
            # Exclude dynamic inputs since they weren't specified in the build.
            inputs |= self.cx.db.query_strong_inputs(target)

        for input in inputs:
            if input == source:
                return True

            if self.findPath(source, input):
                return True

        # There is no explicit ordering defined between these two nodes; we have
        # abort the build.
        util.con_err(
            util.ConsoleRed,
            'Encountered an error while computing new dependencies: ',
            'A new dependency was discovered that exists as an output from another build step. ',
            'However, there is no explicit dependency between that path and this command. ',
            'The build must abort since the ordering of these two steps is undefined. ',
            util.ConsoleNormal)
        util.con_err(util.ConsoleRed, 'Dependency: ', util.ConsoleBlue,
                     source.path, util.ConsoleNormal)
        return False
Example #13
0
    def parseInput(self, context, source):
        if util.IsString(source):
            if not os.path.isabs(source):
                source = os.path.join(context.currentSourcePath, source)
            source = os.path.normpath(source)

            entry = self.db.query_path(source)
            if not entry:
                return self.db.add_source(source)

            # Otherwise, we have to valid the node.
            source = entry

        if source.type == nodetypes.Source or source.type == nodetypes.Output:
            return source

        if source.type == nodetypes.Mkdir:
            if source not in self.bad_outputs_:
                util.con_err(util.ConsoleRed, 'Tried to use folder path ',
                             util.ConsoleBlue, source.path, util.ConsoleRed,
                             ' as a file path.', util.ConsoleNormal)
                raise Exception('Tried to use folder path as a file path')

        util.con_err(util.ConsoleRed,
                     'Tried to use incompatible node "', util.ConsoleBlue,
                     source.format(), util.ConsoleRed, '" as a file path.',
                     util.ConsoleNormal)
        raise Exception('Tried to use non-file node as a file path')
Example #14
0
    def __init__(self, sourcePath, buildPath, originalCwd, options, args):
        super(Generator, self).__init__(sourcePath, buildPath, originalCwd, options, args)
        self.compiler = None
        self.vs_version = None
        self.files_ = {}
        self.projects_ = set()

        if self.options.vs_version in SupportedVersions:
            self.vs_version = int(self.options.vs_version)
        else:
            if self.options.vs_version not in YearMap:
                util.con_err(
                    util.ConsoleRed,
                    "Unsupported Visual Studio version: {0}".format(self.options.vs_version),
                    util.ConsoleNormal,
                )
                raise Exception("Unsupported Visual Studio version: {0}".format(self.options.vs_version))
            self.vs_version = YearMap[self.options.vs_version]

        self.cacheFile = os.path.join(self.buildPath, ".cache")
        try:
            with open(self.cacheFile, "rb") as fp:
                self.vars_ = util.pickle.load(fp)
        except:
            self.vars_ = {}

        if "uuids" not in self.vars_:
            self.vars_["uuids"] = {}

        self.target_platform = "windows"
Example #15
0
  def drop_folder(self, entry):
    assert entry.type in [nodetypes.Mkdir, nodetypes.Output, nodetypes.SharedOutput]
    assert not os.path.isabs(entry.path)

    if os.path.exists(entry.path):
      util.con_out(
        util.ConsoleHeader, 'Removing old folder: ',
        util.ConsoleBlue, '{0}'.format(entry.path),
        util.ConsoleNormal)

    try:
      os.rmdir(entry.path)
    except OSError as exn:
      if exn.errno != errno.ENOENT:
        util.con_err(util.ConsoleRed, 'Could not remove folder: ',
                     util.ConsoleBlue, '{0}'.format(entry.path),
                     util.ConsoleNormal, '\n',
                     util.ConsoleRed, '{0}'.format(exn),
                     util.ConsoleNormal)
        raise

    cursor = self.cn.execute("select count(*) from nodes where folder = ?", (entry.id,))
    amount = cursor.fetchone()[0]
    if amount > 0:
      util.con_err(util.ConsoleRed, 'Folder id ',
                   util.ConsoleBlue, '{0} '.format(entry.id),
                   util.ConsoleRed, 'is about to be deleted, but is still in use as a folder!',
                   util.ConsoleNormal)
      raise Exception('folder still in use!')

    # If the node transitioned to an entry, don't delete its node.
    if entry.type == nodetypes.Mkdir:
      self.drop_entry(entry)
Example #16
0
    def reconfigure(self):
        # See if we need to reconfigure.
        files = []
        reconfigure_needed = False
        self.db.query_scripts(lambda row, path, stamp: files.append(
            (path, stamp)))
        for path, stamp in files:
            if not os.path.exists(path) or os.path.getmtime(path) > stamp:
                reconfigure_needed = True
                break

        if not reconfigure_needed:
            return True

        util.con_out(util.ConsoleHeader, 'Reparsing build scripts.',
                     util.ConsoleNormal)

        # The database should be upgraded here, so we should always have an
        # API version set.
        api_version = Version(self.db.query_var('api_version'))
        assert api_version is not None

        if api_version >= '2.2':
            from ambuild2.frontend.v2_2.context_manager import ContextManager
        elif api_version >= '2.1':
            from ambuild2.frontend.v2_1.context_manager import ContextManager
        elif api_version >= '2.0':
            from ambuild2.frontend.v2_0.context_manager import ContextManager

        # Backwards compatibility: for an automatic reconfigure on an older build,
        # just assume the source path is the cwd. If the AMBuildScript suddenly
        # has decided to depend on originalCwd, then the user may have to manually
        # run configure.py again, until we remove configure.py entirely.
        if 'originalCwd' in self.vars:
            originalCwd = self.vars['originalCwd']
        else:
            originalCwd = self.vars['sourcePath']

        cm = ContextManager(sourcePath=self.vars['sourcePath'],
                            buildPath=self.vars['buildPath'],
                            originalCwd=originalCwd,
                            options=self.vars['options'],
                            args=self.vars['args'])
        cm.db = self.db
        cm.refactoring = self.options.refactor
        try:
            cm.generate('ambuild2')
        except:
            traceback.print_exc()
            util.con_err(util.ConsoleRed, 'Failed to reparse build scripts.',
                         util.ConsoleNormal)
            return False

        # We flush the node cache after this, since database.py expects to get
        # never-before-seen items at the start. We could change this and make
        # nodes individually import, which might be cleaner.
        self.db.flush_caches()

        return True
Example #17
0
 def ensureUnique(self, path):
   if path in self.files_:
     entry = self.files_[path]
     util.con_err(
       util.ConsoleRed, 'Path {0} already exists as: {1}'.format(path, entry.kind),
       util.ConsoleNormal
     )
     raise Exception('Path {0} already exists as: {1}'.format(path, entry.kind))
Example #18
0
def ResolveFolder(parent, folder):
    parent_path = ''
    if parent:
        parent_path = parent.path
    path = os.path.normpath(os.path.join(parent_path, folder))

    if path.startswith('..'):
        util.con_err(util.ConsoleRed, 'Output path ', util.ConsoleBlue, path,
                     util.ConsoleRed, ' is outside the build folder!',
                     util.ConsoleNormal)
        raise Exception('Cannot generate folders outside the build folder')

    return parent_path, path
Example #19
0
 def addDiscoveredSource(self, path):
     if not os.path.isabs(path):
         util.con_err(
             util.ConsoleRed,
             'Encountered an error while computing new dependencies: ',
             'A new dependent file or path was discovered that has no corresponding build entry. ',
             'This probably means a build script did not explicitly mark a generated file as an output. ',
             'The build must abort since the ordering of these two steps is undefined. ',
             util.ConsoleNormal)
         util.con_err(util.ConsoleRed, 'Path: ', util.ConsoleBlue, path,
                      util.ConsoleNormal)
         return None
     return self.cx.db.add_source(path)
Example #20
0
    def build_internal(self):
        if self.options.show_graph:
            self.db.printGraph()
            return True

        if self.options.show_changed:
            dmg_list = damage.ComputeDamageGraph(self.db, only_changed=True)
            for entry in dmg_list:
                if not entry.isFile():
                    continue
                print(entry.format())
            return True

        dmg_graph = damage.ComputeDamageGraph(self.db)
        if not dmg_graph:
            return False

        # If we get here, we have to compute damage.
        if self.options.show_damage:
            dmg_graph.printGraph()
            return True

        dmg_graph.filter_commands()

        if self.options.show_commands:
            dmg_graph.printGraph()
            return True

        builder = Builder(self, dmg_graph)
        if self.options.show_steps:
            builder.printSteps()
            return True

        status = builder.update()
        if status == TaskMaster.BUILD_FAILED:
            util.con_err(util.ConsoleHeader, 'Build failed.',
                         util.ConsoleNormal)
            return False
        if status == TaskMaster.BUILD_INTERRUPTED:
            util.con_err(util.ConsoleHeader, 'Build cancelled.',
                         util.ConsoleNormal)
            return False
        if status == TaskMaster.BUILD_NO_CHANGES:
            util.con_out(util.ConsoleHeader, 'Build succeeded, no changes.',
                         util.ConsoleNormal)
            return True

        assert status == TaskMaster.BUILD_SUCCEEDED
        util.con_out(util.ConsoleHeader, 'Build succeeded.',
                     util.ConsoleNormal)
        return True
Example #21
0
    def addDiscoveredSource(self, path):
        if not os.path.isabs(path):
            util.con_err(
                util.ConsoleRed, 'Encountered an error while computing new dependencies: ',
                'A new dependent file or path was discovered that has no corresponding build entry. ',
                'This probably means a build script did not explicitly mark a generated file as an output. ',
                'The build must abort since the ordering of these two steps is undefined. ',
                util.ConsoleNormal)
            util.con_err(util.ConsoleRed, 'Path: ', util.ConsoleBlue, path, util.ConsoleNormal)
            return None

        rel_to_objdir = util.RelPathIfCommon(path, self.cx.buildPath)
        if rel_to_objdir:
            entry = self.cx.db.query_path(rel_to_objdir)
            if not entry:
                util.con_err(
                    util.ConsoleRed, 'Encountered an error while computing new dependencies: ',
                    'A new dependent file was discovered, but it exists in the output folder, and ',
                    'no corresponding command creates this file. One of the followeing might have ',
                    'occurred: \n',
                    ' (1) The file was created outside of AMBuild, which is not supported.\n',
                    ' (2) The file was created by a custom AMBuild command, but was not specified as an output.\n',
                    util.ConsoleNormal)
                util.con_err(util.ConsoleRed, 'Path: ', util.ConsoleBlue, rel_to_objdir,
                             util.ConsoleNormal)
                return None
            return entry

        return self.cx.db.add_source(path)
Example #22
0
    def try_msvc_install(self, install):
        bat_file = install.vcvars[self.target_.arch]
        try:
            env_cmds = msvc_utils.DeduceEnv(bat_file, [])
            env = util.BuildEnv(env_cmds)
        except:
            util.con_err(util.ConsoleRed,
                         "Could not run or analyze {}".format(bat_file),
                         util.ConsoleNormal)
            return None

        necessary_tools = ['cl.exe', 'rc.exe', 'lib.exe']
        tools, _ = FindToolsInEnv(env, necessary_tools)
        for tool in necessary_tools:
            if tool not in tools:
                util.con_err(util.ConsoleRed,
                             "Could not find {} for {}".format(tool, bat_file))
                return None

        cc, _ = self.run_compiler(env,
                                  'CC',
                                  'cl',
                                  'msvc',
                                  abs_path=tools['cl.exe'])
        if not cc:
            return None
        cxx, _ = self.run_compiler(env,
                                   'CXX',
                                   'cl',
                                   'msvc',
                                   abs_path=tools['cl.exe'])
        if not cxx:
            return None

        # We use tuples here so the data is hashable without going through Pickle.
        tool_list = (
            ('cl', tools['cl.exe']),
            ('rc', tools['rc.exe']),
            ('lib', tools['lib.exe']),
        )
        env_data = (
            ('env_cmds', env_cmds),
            ('tools', tool_list),
        )
        return compiler.CliCompiler(cxx.vendor,
                                    cc.argv,
                                    cxx.argv,
                                    options=self.gen_options_,
                                    env_data=env_data)
Example #23
0
    def try_msvc_bat(self, bat_file, pass_arch=False):
        argv = []
        if pass_arch:
            argv.append(msvc_utils.MakeArchParam(self.host_, self.target_))

        try:
            env_cmds = msvc_utils.DeduceEnv(bat_file, argv)
            env = util.BuildEnv(env_cmds)
        except:
            util.con_err(util.ConsoleRed,
                         "Could not run or analyze {}".format(bat_file),
                         util.ConsoleNormal)
            return None

        necessary_tools = ['cl.exe', 'rc.exe', 'lib.exe']
        tools, _ = FindToolsInEnv(env, necessary_tools)
        for tool in necessary_tools:
            if tool not in tools:
                util.con_err(util.ConsoleRed,
                             "Could not find {} for {}".format(tool, bat_file))
                return None

        cc, _ = self.run_compiler('CC',
                                  'cl',
                                  'msvc',
                                  env,
                                  abs_path=tools['cl.exe'])
        if not cc:
            return None
        cxx, _ = self.run_compiler('CXX',
                                   'cl',
                                   'msvc',
                                   env,
                                   abs_path=tools['cl.exe'])
        if not cxx:
            return None

        # We use tuples here so the data is hashable without going through Pickle.
        tool_list = (
            ('cl', tools['cl.exe']),
            ('rc', tools['rc.exe']),
            ('lib', tools['lib.exe']),
        )
        env_data = {
            'env_cmds': env_cmds,
            'tools': tool_list,
        }
        return self.create_cli(cc, cxx, env_data)
Example #24
0
def ResolveFolder(parent, folder):
  parent_path = ''
  if parent:
    parent_path = parent.path
  path = os.path.normpath(os.path.join(parent_path, folder))

  if path.startswith('..'):
    util.con_err(
      util.ConsoleRed, 'Output path ',
      util.ConsoleBlue, path,
      util.ConsoleRed, ' is outside the build folder!',
      util.ConsoleNormal
    )
    raise Exception('Cannot generate folders outside the build folder')

  return parent_path, path
Example #25
0
 def pump(self):
     with process_manager.ChannelPoller(self.cx, self.workers_) as poller:
         while self.status_ == TaskMaster.BUILD_IN_PROGRESS:
             try:
                 proc, obj = poller.poll()
                 if obj['id'] not in self.messageMap:
                     raise Exception('Unhandled message type: {}'.format(
                         obj['id']))
                 self.messageMap[obj['id']](proc, obj)
             except EOFError:
                 # The process died. Very sad. Clean up and fail the build.
                 util.con_err(util.ConsoleBlue, '[{0}]'.format(proc.pid),
                              util.ConsoleNormal, ' ', util.ConsoleRed,
                              'Worker unexpectedly exited.')
                 self.terminateBuild(TaskMaster.BUILD_FAILED)
                 break
Example #26
0
    def create_cli(self, cc, cxx):
        # Ensure that the two compilers have the same vendor.
        if not cxx.vendor.equals(cc.vendor):
            message = 'C and C++ compiler are different: CC={0}, CXX={1}'
            message = message.format(cc.vendor, cxx.vendor)

            util.con_err(util.ConsoleRed, message, util.ConsoleNormal)
            raise Exception(message)

        if cxx.arch != cc.arch:
            message = "C architecture {0} does not match C++ architecture {1}".format(
                cc.arch, cxx.arch)
            util.con_err(util.ConsoleRed, message, util.ConsoleNormal)
            raise Exception(message)

        return compiler.CliCompiler(cxx.vendor, cc.argv, cxx.argv, self.gen_options_)
Example #27
0
  def build_internal(self):
    if self.options.show_graph:
      self.db.printGraph()
      return True

    if self.options.show_changed:
      dmg_list = damage.ComputeDamageGraph(self.db, only_changed=True)
      for entry in dmg_list:
        if not entry.isFile():
          continue
        print(entry.format())
      return True

    dmg_graph = damage.ComputeDamageGraph(self.db)
    if not dmg_graph:
      return False

    # If we get here, we have to compute damage.
    if self.options.show_damage:
      dmg_graph.printGraph()
      return True

    dmg_graph.filter_commands()

    if self.options.show_commands:
      dmg_graph.printGraph()
      return True
    
    builder = Builder(self, dmg_graph)
    if self.options.show_steps:
      builder.printSteps()
      return True

    if not builder.update():
      util.con_err(
        util.ConsoleHeader,
        'Build failed.',
        util.ConsoleNormal
      )
      return False

    util.con_out(
      util.ConsoleHeader,
      'Build succeeded.',
      util.ConsoleNormal
    )
    return True
Example #28
0
  def build_internal(self):
    if self.options.show_graph:
      self.db.printGraph()
      return True

    if self.options.show_changed:
      dmg_list = damage.ComputeDamageGraph(self.db, only_changed=True)
      for entry in dmg_list:
        if not entry.isFile():
          continue
        print(entry.format())
      return True

    dmg_graph = damage.ComputeDamageGraph(self.db)
    if not dmg_graph:
      return False

    # If we get here, we have to compute damage.
    if self.options.show_damage:
      dmg_graph.printGraph()
      return True

    dmg_graph.filter_commands()

    if self.options.show_commands:
      dmg_graph.printGraph()
      return True
    
    builder = Builder(self, dmg_graph)
    if self.options.show_steps:
      builder.printSteps()
      return True

    if not builder.update():
      util.con_err(
        util.ConsoleHeader,
        'Build failed.',
        util.ConsoleNormal
      )
      return False

    util.con_out(
      util.ConsoleHeader,
      'Build succeeded.',
      util.ConsoleNormal
    )
    return True
Example #29
0
  def reconfigure(self):
    # See if we need to reconfigure.
    files = []
    reconfigure_needed = False
    self.db.query_scripts(lambda row,path,stamp: files.append((path, stamp)))
    for path, stamp in files:
      if not os.path.exists(path) or os.path.getmtime(path) > stamp:
        reconfigure_needed = True
        break

    if not reconfigure_needed:
      return True

    util.con_out(
      util.ConsoleHeader,
      'Reparsing build scripts.',
      util.ConsoleNormal
    )

    # The database should be upgraded here, so we should always have an
    # API version set.
    api_version = self.db.query_var('api_version')
    assert api_version is not None

    if api_version == '2.0':
      from ambuild2.frontend.v2_0.amb2.gen import Generator
    elif api_version == '2.1':
      from ambuild2.frontend.v2_1.amb2 import Generator

    gen = Generator.FromVars(self.vars, self.db, self.options.refactor)
    try:
      gen.generate()
    except:
      traceback.print_exc()
      util.con_err(
        util.ConsoleRed,
        'Failed to reparse build scripts.',
        util.ConsoleNormal
      )
      return False

    # We flush the node cache after this, since database.py expects to get
    # never-before-seen items at the start. We could change this and make
    # nodes individually import, which might be cleaner.
    self.db.flush_caches()

    return True
Example #30
0
  def update_command(self, entry, type, folder, data, dirty, refactoring):
    if not data:
      blob = None
    else:
      blob = util.BlobType(util.CompatPickle(data))

    if entry.type == type and \
       entry.folder == folder and \
       entry.blob == data and \
       (dirty == nodetypes.ALWAYS_DIRTY) == (entry.dirty == nodetypes.ALWAYS_DIRTY):
      return False

    if refactoring:
      util.con_err(util.ConsoleRed, 'Command changed! \n',
                   util.ConsoleRed, 'Old: ',
                   util.ConsoleBlue, entry.format(),
                   util.ConsoleNormal)
      entry.type = type
      entry.folder = folder
      entry.blob = data
      util.con_err(util.ConsoleRed, 'New: ',
                   util.ConsoleBlue, entry.format(),
                   util.ConsoleNormal)
      raise Exception('Refactoring error: command changed')

    if not folder:
      folder_id = None
    else:
      folder_id = folder.id

    query = """
      update nodes
      set
        type = ?,
        folder = ?,
        data = ?,
        dirty = ?
      where id = ?
    """
    self.cn.execute(query, (type, folder_id, blob, dirty, entry.id))
    entry.type = type
    entry.folder = folder
    entry.blob = blob
    entry.dirty = dirty
    return True
Example #31
0
    def unmark_dirty(self, entry, stamp=None):
        query = "update nodes set dirty = 0, stamp = ? where id = ?"
        if not stamp:
            if entry.isCommand():
                stamp = 0.0
            else:
                try:
                    stamp = os.path.getmtime(entry.path)
                except:
                    traceback.print_exc()
                    util.con_err(
                        util.ConsoleRed,
                        'Could not unmark file as dirty; leaving dirty.',
                        util.ConsoleNormal)
                    return

        self.cn.execute(query, (stamp, entry.id))
        entry.dirty = False
        entry.stamp = stamp
Example #32
0
    def update_command(self, entry, type, folder, data, dirty, refactoring):
        if not data:
            blob = None
        else:
            blob = util.BlobType(util.CompatPickle(data))

        if entry.type == type and \
           entry.folder == folder and \
           entry.blob == data and \
           (dirty == nodetypes.ALWAYS_DIRTY) == (entry.dirty == nodetypes.ALWAYS_DIRTY):
            return False

        if refactoring:
            util.con_err(util.ConsoleRed, 'Command changed! \n',
                         util.ConsoleRed, 'Old: ', util.ConsoleBlue,
                         entry.format(), util.ConsoleNormal)
            entry.type = type
            entry.folder = folder
            entry.blob = data
            util.con_err(util.ConsoleRed, 'New: ', util.ConsoleBlue,
                         entry.format(), util.ConsoleNormal)
            raise Exception('Refactoring error: command changed')

        if not folder:
            folder_id = None
        else:
            folder_id = folder.id

        query = """
      update nodes
      set
        type = ?,
        folder = ?,
        data = ?,
        dirty = ?
      where id = ?
    """
        self.cn.execute(query, (type, folder_id, blob, dirty, entry.id))
        entry.type = type
        entry.folder = folder
        entry.blob = blob
        entry.dirty = dirty
        return True
Example #33
0
    def discoverEntries(self, discovered_paths):
        discovered_set = set()
        for path in discovered_paths:
            entry = self.cx.db.query_path(path)
            if not entry:
                entry = self.addDiscoveredSource(path)
                if not entry:
                    return None

            if entry.type != nodetypes.Source and entry.type != nodetypes.Output:
                util.con_err(util.ConsoleRed,
                             'Fatal error in DAG construction! Dependency is not a file input.',
                             util.ConsoleNormal)
                util.con_err(util.ConsoleRed, 'Path: ', util.ConsoleBlue, path, util.ConsoleNormal)
                return None

            discovered_set.add(entry)

        return discovered_set
Example #34
0
    def reconfigure(self):
        # See if we need to reconfigure.
        files = []
        reconfigure_needed = False
        self.db.query_scripts(lambda row, path, stamp: files.append(
            (path, stamp)))
        for path, stamp in files:
            if not os.path.exists(path) or os.path.getmtime(path) > stamp:
                reconfigure_needed = True
                break

        if not reconfigure_needed:
            return True

        util.con_out(util.ConsoleHeader, 'Reparsing build scripts.',
                     util.ConsoleNormal)

        # The database should be upgraded here, so we should always have an
        # API version set.
        api_version = self.db.query_var('api_version')
        assert api_version is not None

        if api_version == '2.0':
            from ambuild2.frontend.v2_0.amb2.gen import Generator
        elif api_version == '2.1':
            from ambuild2.frontend.v2_1.amb2 import Generator

        gen = Generator.FromVars(self.vars, self.db, self.options.refactor)
        try:
            gen.generate()
        except:
            traceback.print_exc()
            util.con_err(util.ConsoleRed, 'Failed to reparse build scripts.',
                         util.ConsoleNormal)
            return False

        # We flush the node cache after this, since database.py expects to get
        # never-before-seen items at the start. We could change this and make
        # nodes individually import, which might be cleaner.
        self.db.flush_caches()

        return True
Example #35
0
  def unmark_dirty(self, entry, stamp=None):
    query = "update nodes set dirty = 0, stamp = ? where id = ?"
    if not stamp:
      if entry.isCommand():
        stamp = 0.0
      else:
        try:
          stamp = os.path.getmtime(entry.path)
        except:
          traceback.print_exc()
          util.con_err(
            util.ConsoleRed,
            'Could not unmark file as dirty; leaving dirty.',
            util.ConsoleNormal
          )
          return

    self.cn.execute(query, (stamp, entry.id))
    entry.dirty = False
    entry.stamp = stamp
Example #36
0
def DetectCxx(env, options):
    cc = DetectCxxCompiler(env, 'CC')
    cxx = DetectCxxCompiler(env, 'CXX')

    # Ensure that the two compilers have the same vendor.
    if type(cc) is not type(cxx):
        message = 'C and C++ compiler vendors are not the same: CC={0}, CXX={1}'
        message = message.format(cc.name, cxx.name)

        util.con_err(util.ConsoleRed, message, util.ConsoleNormal)
        raise Exception(message)

    # Ensure that the two compilers have the same version.
    if cc.version != cxx.version:
        message = 'C and C++ compilers have different versions: CC={0}-{1}, CXX={2}-{3}'
        message = message.format(cc.name, cc.version, cxx.name, cxx.version)

        util.con_err(util.ConsoleRed, message, util.ConsoleNormal)
        raise Exception(message)

    return compilers.CxxCompiler(cc, cxx, options)
Example #37
0
def DetectCxx(target, env):
  cc = DetectCxxCompiler(env, 'CC')
  cxx = DetectCxxCompiler(env, 'CXX')

  # Ensure that the two compilers have the same vendor.
  if not cxx.vendor.equals(cc.vendor):
    message = 'C and C++ compiler are different: CC={0}, CXX={1}'
    message = message.format(cc.vendor, cxx.vendor)

    util.con_err(util.ConsoleRed, message, util.ConsoleNormal)
    raise Exception(message)

  if cxx.arch != cc.arch:
    message = "C architecture {0} does not match C++ architecture {1}".format(cc.arch, cxx.arch)
    util.con_err(util.ConsoleRed, message, util.ConsoleNormal)
    raise Exception(message)

  # :TODO: Check that the arch is == to target. We don't do this yet since
  # on Windows we can't use platform.architecture().

  return compiler.CliCompiler(cxx.vendor, cc.argv, cxx.argv)
Example #38
0
  def discoverEntries(self, discovered_paths):
    discovered_set = set()
    for path in discovered_paths:
      entry = self.cx.db.query_path(path)
      if not entry:
        entry = self.addDiscoveredSource(path)
        if not entry:
          return None

      if entry.type != nodetypes.Source and entry.type != nodetypes.Output:
        util.con_err(
          util.ConsoleRed,
          'Fatal error in DAG construction! Dependency is not a file input.',
          util.ConsoleNormal
        )
        util.con_err(util.ConsoleRed, 'Path: ', util.ConsoleBlue, path, util.ConsoleNormal)
        return None

      discovered_set.add(entry)

    return discovered_set
Example #39
0
def DetectCxx(env, options):
  cc = DetectCxxCompiler(env, 'CC')
  cxx = DetectCxxCompiler(env, 'CXX')

  # Ensure that the two compilers have the same vendor.
  if type(cc) is not type(cxx):
    message = 'C and C++ compiler vendors are not the same: CC={0}, CXX={1}'
    message = message.format(cc.name, cxx.name)

    util.con_err(util.ConsoleRed, message, util.ConsoleNormal)
    raise Exception(message)

  # Ensure that the two compilers have the same version.
  if cc.version != cxx.version:
    message = 'C and C++ compilers have different versions: CC={0}-{1}, CXX={2}-{3}'
    message = message.format(cc.name, cc.version, cxx.name, cxx.version)

    util.con_err(util.ConsoleRed, message, util.ConsoleNormal)
    raise Exception(message)

  return compilers.CxxCompiler(cc, cxx, options)
Example #40
0
  def validateOutputFolder(self, path):
    # Empty path is the root folder, which is null.
    if not len(path):
      return None

    # The folder must already exist.
    folder_entry = self.db.query_path(path)
    if not folder_entry:
      util.con_err(util.ConsoleRed, 'Path "',
                   util.ConsoleBlue, path,
                   util.ConsoleRed, '" specifies a folder that does not exist.',
                   util.ConsoleNormal)
      raise Exception('path specifies a folder that does not exist')

    if self.isValidFolderEntry(folder_entry):
      return folder_entry

    # If it's a folder or an output, we can give a better error message.
    if folder_entry.type == nodetypes.Output or folder_entry.type == nodetypes.Mkdir:
      util.con_err(util.ConsoleRed, 'Folder "',
                   util.ConsoleBlue, folder_entry.path,
                   util.ConsoleRed, '" was never created.',
                   util.ConsoleNormal)
      raise Exception('path {0} was never created', folder_entry.path)

    util.con_err(util.ConsoleRed, 'Attempted to use node "',
                 util.ConsoleBlue, folder_entry.format(),
                 util.ConsoleRed, '" as a path component.',
                 util.ConsoleNormal)
    raise Exception('illegal path component')
Example #41
0
  def addShellCommand(self,
                      context,
                      inputs,
                      argv,
                      outputs,
                      folder=-1,
                      dep_type=None,
                      weak_inputs=[],
                      shared_outputs=[]):
    if folder is -1:
      folder = context.localFolder

    if dep_type is None:
      node_type = nodetypes.Command
      data = argv
    else:
      node_type = nodetypes.Cxx
      if dep_type not in ['gcc', 'msvc', 'sun', 'fxc']:
        util.con_err(util.ConsoleRed, 'Invalid dependency spew type: ',
                     util.ConsoleBlue, dep_type,
                     util.ConsoleNormal)
        raise Exception('Invalid dependency spew type')
      data = {
        'type': dep_type,
        'argv': argv,
      }

    if argv is None:
      raise Exception('argv cannot be None')

    return self.addCommand(
      context = context,
      node_type = node_type,
      folder = folder,
      data = data,
      inputs = inputs,
      outputs = outputs,
      weak_inputs = weak_inputs,
      shared_outputs = shared_outputs
    )
Example #42
0
    def validateOutputFolder(self, path):
        # Empty path is the root folder, which is null.
        if not len(path):
            return None

        # The folder must already exist.
        folder_entry = self.db.query_path(path)
        if not folder_entry:
            util.con_err(util.ConsoleRed, 'Path "', util.ConsoleBlue, path,
                         util.ConsoleRed,
                         '" specifies a folder that does not exist.',
                         util.ConsoleNormal)
            raise Exception('path specifies a folder that does not exist')

        if self.isValidFolderEntry(folder_entry):
            return folder_entry

        # If it's a folder or an output, we can give a better error message.
        if folder_entry.type == nodetypes.Output or folder_entry.type == nodetypes.Mkdir:
            util.con_err(util.ConsoleRed, 'Folder "', util.ConsoleBlue,
                         folder_entry.path, util.ConsoleRed,
                         '" was never created.', util.ConsoleNormal)
            raise Exception('path {0} was never created', folder_entry.path)

        util.con_err(util.ConsoleRed, 'Attempted to use node "',
                     util.ConsoleBlue, folder_entry.format(), util.ConsoleRed,
                     '" as a path component.', util.ConsoleNormal)
        raise Exception('illegal path component')
Example #43
0
    def create_cli(self, cc, cxx, env_data = None):
        # Ensure that the two compilers have the same vendor.
        if not cxx.vendor.equals(cc.vendor):
            message = 'C and C++ compiler are different: CC={0}, CXX={1}'
            message = message.format(cc.vendor, cxx.vendor)

            util.con_err(util.ConsoleRed, message, util.ConsoleNormal)
            raise Exception(message)

        if cxx.arch != cc.arch:
            message = "C architecture \"{0}\" does not match C++ architecture \"{1}\"".format(
                cc.arch, cxx.arch)
            util.con_err(util.ConsoleRed, message, util.ConsoleNormal)
            raise Exception(message)

        if cxx.arch != self.target_.arch and self.target_override_:
            message = "Compiler architecture \"{0}\" does not match requested architecture \"{1}\"".format(
                cxx.arch, self.target_.arch)
            util.con_err(util.ConsoleRed, message, util.ConsoleNormal)
            raise Exception(message)

        if env_data is not None:
            if cxx.vendor.extra_props:
                env_data['props'] = util.BuildTupleFromDict(cxx.vendor.extra_props)
            env_data = util.BuildTupleFromDict(env_data)

        return compiler.CliCompiler(cxx.vendor,
                                    System(self.host_.platform, cxx.arch, cxx.subarch,
                                           self.target_.abi),
                                    cc.argv,
                                    cxx.argv,
                                    options = self.gen_options_,
                                    env_data = env_data)
Example #44
0
def DetectCxx(target, env, options):
    cc = DetectCxxCompiler(env, 'CC')
    cxx = DetectCxxCompiler(env, 'CXX')

    # Ensure that the two compilers have the same vendor.
    if not cxx.vendor.equals(cc.vendor):
        message = 'C and C++ compiler are different: CC={0}, CXX={1}'
        message = message.format(cc.vendor, cxx.vendor)

        util.con_err(util.ConsoleRed, message, util.ConsoleNormal)
        raise Exception(message)

    if cxx.arch != cc.arch:
        message = "C architecture {0} does not match C++ architecture {1}".format(
            cc.arch, cxx.arch)
        util.con_err(util.ConsoleRed, message, util.ConsoleNormal)
        raise Exception(message)

    # :TODO: Check that the arch is == to target. We don't do this yet since
    # on Windows we can't use platform.architecture().

    return compiler.CliCompiler(cxx.vendor, cc.argv, cxx.argv, options)
Example #45
0
  def addDiscoveredSource(self, path):
    if not os.path.isabs(path):
      util.con_err(
        util.ConsoleRed,
        'Encountered an error while computing new dependencies: ',
        'A new dependent file or path was discovered that has no corresponding build entry. ',
        'This probably means a build script did not explicitly mark a generated file as an output. ',
        'The build must abort since the ordering of these two steps is undefined. ',
        util.ConsoleNormal
      )
      util.con_err(
        util.ConsoleRed,
        'Path: ',
        util.ConsoleBlue,
        path,
        util.ConsoleNormal
      )
      return None

    rel_to_objdir = util.RelPathIfCommon(path, self.cx.buildPath)
    if rel_to_objdir:
      entry = self.cx.db.query_path(rel_to_objdir)
      if not entry:
        util.con_err(
          util.ConsoleRed,
          'Encountered an error while computing new dependencies: ',
          'A new dependent file was discovered, but it exists in the output folder, and ',
          'no corresponding command creates this file. One of the followeing might have ',
          'occurred: \n',
          ' (1) The file was created outside of AMBuild, which is not supported.\n',
          ' (2) The file was created by a custom AMBuild command, but was not specified as an output.\n',
          util.ConsoleNormal
        )
        util.con_err(
          util.ConsoleRed,
          'Path: ',
          util.ConsoleBlue,
          rel_to_objdir,
          util.ConsoleNormal
        )
        return None
      return entry

    return self.cx.db.add_source(path)
Example #46
0
    def Configure(self):
        if self.target_arch is None:
            self.options.add_option(
                "--target-arch",
                type="string",
                dest="target_arch",
                default=None,
                help="Override the target architecture.",
            )

        v_options, args = self.options.parse_args()

        # In order to support pickling, we need to rewrite |options| to not use
        # optparse.Values, since its implementation changes across Python versions.
        options = util.Expando()
        ignore_attrs = set(dir(Values))
        for attr in dir(v_options):
            if attr in ignore_attrs:
                continue
            setattr(options, attr, getattr(v_options, attr))

        # Propagate the overridden architecture.
        if self.target_arch is not None:
            assert getattr(options, "target_arch", None) is None
            options.target_arch = self.target_arch

        if options.list_gen:
            print("Available build system generators:")
            print("  {0:24} - AMBuild 2 (default)".format("ambuild2"))
            print("  {0:24} - Visual Studio".format("vs"))
            print("")
            print("Extra options:")
            print("  --vs-version=N        Visual Studio: IDE version (2010 or 10 default)")
            print("  --vs-split            Visual Studio: generate one project file per configuration")
            sys.exit(0)

        if options.no_color:
            util.DisableConsoleColors()

        source_abspath = os.path.normpath(os.path.abspath(self.sourcePath))
        build_abspath = os.path.normpath(os.path.abspath(self.buildPath))
        if source_abspath == build_abspath:
            if util.IsString(self.default_build_folder):
                objfolder = self.default_build_folder
            else:
                objfolder = self.default_build_folder(self)
            new_buildpath = os.path.join(self.buildPath, objfolder)

            util.con_err(
                util.ConsoleHeader, "Warning: build is being configured in the source tree.", util.ConsoleNormal
            )
            if os.path.exists(os.path.join(new_buildpath)):
                has_amb2 = os.path.exists(os.path.join(new_buildpath, ".ambuild2"))
                if not has_amb2 and len(os.listdir(new_buildpath)):
                    util.con_err(
                        util.ConsoleRed,
                        "Tried to use ",
                        util.ConsoleBlue,
                        objfolder,
                        util.ConsoleRed,
                        " as a build folder, but it is not empty!",
                        util.ConsoleNormal,
                    )
                    raise Exception("build folder has unrecognized files")

                util.con_err(
                    util.ConsoleHeader,
                    "Re-using build folder: ",
                    util.ConsoleBlue,
                    "{0}".format(objfolder),
                    util.ConsoleNormal,
                )
            else:
                util.con_err(
                    util.ConsoleHeader,
                    'Creating "',
                    util.ConsoleBlue,
                    "{0}".format(objfolder),
                    util.ConsoleHeader,
                    '" as a build folder.',
                    util.ConsoleNormal,
                )
                os.mkdir(new_buildpath)
            self.buildPath = new_buildpath

        if options.generator == "ambuild2":
            from ambuild2.frontend.v2_1.amb2 import gen

            builder = gen.Generator(self.sourcePath, self.buildPath, os.getcwd(), options, args)
        elif options.generator == "vs":
            from ambuild2.frontend.v2_1.vs import gen

            builder = gen.Generator(self.sourcePath, self.buildPath, os.getcwd(), options, args)
        else:
            sys.stderr.write("Unrecognized build generator: " + options.generator + "\n")
            sys.exit(1)

        with util.FolderChanger(self.buildPath):
            if not builder.generate():
                sys.stderr.write("Configure failed.\n")
                sys.exit(1)
Example #47
0
    def cleanup(self):
        for path in self.rm_list_:
            util.rm_path(path)

        for cmd_entry in self.old_commands_:
            if self.refactoring:
                util.con_err(util.ConsoleRed,
                             'Command removed during refactoring: \n',
                             util.ConsoleBlue, cmd_entry.format(),
                             util.ConsoleNormal)
                raise Exception('Refactoring error: command removed')
            self.db.drop_command(cmd_entry)

        for path in self.old_scripts_:
            self.db.drop_script(path)

        self.db.query_dead_sources(lambda e: self.db.drop_source(e))
        self.db.query_dead_shared_outputs(lambda e: self.db.drop_output(e))
        self.db.drop_unused_environments()

        class Node:
            def __init__(self):
                self.incoming = set()
                self.outgoing = set()

        # Build a tree of dead folders.
        tracker = {}
        for entry in self.old_folders_:
            if entry not in tracker:
                tracker[entry] = Node()

            if entry.folder is None:
                continue

            # If our parent is not a dead folder, don't create an edge. It should be
            # impossible for a/b to be dead, a/b/c to be alive, and a/b/c/d to be
            # dead, since a/b/c will implicitly keep a/b alive.
            if entry.folder not in self.old_folders_:
                continue

            if entry.folder not in tracker:
                tracker[entry.folder] = Node()

            parent = tracker[entry.folder]
            child = tracker[entry]
            parent.incoming.add(entry)
            child.outgoing.add(entry.folder)

        # Find the leaves. Sets start out >= 1 items. Remove them as they they
        # are empty.
        dead_folders = [
            entry for entry in self.old_folders_
            if len(tracker[entry].incoming) == 0
        ]
        while len(dead_folders):
            child_entry = dead_folders.pop()
            child_node = tracker[child_entry]

            if self.refactoring:
                util.con_err(util.ConsoleRed,
                             'Folder removed during refactoring: \n',
                             util.ConsoleBlue, child_entry.format(),
                             util.ConsoleNormal)
                raise Exception('Refactoring error: command removed')

            self.db.drop_folder(child_entry)
            for parent_entry in child_node.outgoing:
                parent_node = tracker[parent_entry]
                parent_node.incoming.remove(child_entry)
                if not len(parent_node.incoming):
                    dead_folders.append(parent_entry)
Example #48
0
    def addCommand(self,
                   context,
                   node_type,
                   folder,
                   data,
                   inputs,
                   outputs,
                   weak_inputs=None,
                   shared_outputs=None,
                   env_data=None):
        assert not folder or isinstance(folder, nodetypes.Entry)

        weak_inputs = weak_inputs or []
        shared_outputs = shared_outputs or []

        if inputs is self.cm.ALWAYS_DIRTY:
            if len(weak_inputs) != 0:
                message = "Always-dirty commands cannot have weak inputs"
                util.con_err(util.ConsoleRed, "{0}.".format(message),
                             util.ConsoleNormal)
                raise Exception(message)
            if node_type != nodetypes.Command:
                message = "Node type {0} cannot be always-dirty".format(
                    node_type)
                util.con_err(util.ConsoleRed, "{0}.".format(message),
                             util.ConsoleNormal)
                raise Exception(message)

        # Build the set of weak links.
        weak_links = set()
        for weak_input in weak_inputs:
            assert type(weak_input) is nodetypes.Entry
            assert weak_input.type != nodetypes.Source
            weak_links.add(weak_input)

        # Build the set of strong links.
        strong_links = set()
        if inputs is not context.cm.ALWAYS_DIRTY:
            for strong_input in inputs:
                strong_input = self.parseInput(context, strong_input)
                strong_links.add(strong_input)

        # Build the list of outputs.
        cmd_entry = None
        output_nodes = []
        for output in outputs:
            output_node = self.parseOutput(folder, output, nodetypes.Output)
            output_nodes.append(output_node)

            input_entry = self.db.query_command_of(output_node)
            if not input_entry:
                continue

            # Make sure this output won't be duplicated.
            if input_entry not in self.old_commands_:
                util.con_err(util.ConsoleRed, 'Command: ',
                             input_entry.format(), util.ConsoleNormal)
                raise Exception('Output has been duplicated: {0}'.format(
                    output_node.path))

            if not cmd_entry:
                cmd_entry = input_entry
        # end for

        # Build the list of shared outputs.
        shared_output_nodes = []
        for shared_output in shared_outputs:
            shared_output_node = self.parseOutput(folder, shared_output,
                                                  nodetypes.SharedOutput)
            shared_output_nodes.append(shared_output_node)

        output_links = set(output_nodes)
        shared_links = set(shared_output_nodes)

        # There should be no duplicates in either output list. These error messages
        # could be better.
        if len(output_nodes) > len(output_links):
            util.con_err(util.ConsoleRed,
                         'The output list contains duplicate files.',
                         util.ConsoleNormal)
            raise Exception('Shared output list contains duplicate files.')
        if len(shared_output_nodes) > len(shared_links):
            util.con_err(util.ConsoleRed,
                         'The output list contains duplicate files.',
                         util.ConsoleNormal)
            raise Exception('Shared output list contains duplicate files.')

        # The intersection of output_links and shared_links should be the empty set.
        duplicates = output_links.intersection(shared_links)
        if len(duplicates):
            bad_entry = duplicates.pop()
            util.con_err(util.ConsoleRed,
                         'An output has been duplicated as a shared output: ',
                         util.ConsoleBlue, bad_entry.path, util.ConsoleNormal)
            raise Exception(
                'An output has been duplicated as a shared output.')

        dirty = nodetypes.DIRTY
        if inputs == context.cm.ALWAYS_DIRTY:
            dirty = nodetypes.ALWAYS_DIRTY

        if cmd_entry:
            # Update the entry in the database.
            self.db.update_command(cmd_entry, node_type, folder, data, dirty,
                                   self.refactoring, env_data)

            # Disconnect any outputs that are no longer connected to this output.
            # It's okay to use output_links since there should never be duplicate
            # outputs.
            for outgoing in self.db.query_strong_outgoing(cmd_entry):
                if outgoing not in output_links:
                    self.db.drop_strong_edge(cmd_entry, outgoing)
                    self.db.drop_output(outgoing)
                else:
                    output_links.remove(outgoing)

            # Do the same for shared outputs. Since there is a many:1 relationship,
            # we can't drop shared outputs here. We save that for a cleanup step.
            for outgoing in self.db.query_shared_outputs(cmd_entry):
                if outgoing not in shared_links:
                    self.db.drop_shared_output_edge(cmd_entry, outgoing)
                else:
                    shared_links.remove(outgoing)

            # Remove us from the list of commands to delete.
            self.old_commands_.remove(cmd_entry)
        else:
            # Note that if there are no outputs, we will always add a new command,
            # and the old (identical) command will be deleted.
            cmd_entry = self.db.add_command(node_type, folder, data, dirty,
                                            env_data)

        # Local helper function to warn about refactoring problems.
        def refactoring_error(node):
            util.con_err(util.ConsoleRed, 'New input introduced: ',
                         util.ConsoleBlue, node.path + '\n',
                         util.ConsoleRed, 'Command: ', util.ConsoleBlue,
                         cmd_entry.format(), util.ConsoleNormal)
            raise Exception('Refactoring error: new input introduced')

        if len(output_links) and self.refactoring:
            refactoring_error(output_links.pop())
        if len(shared_links) and self.refactoring:
            refactoring_error(shared_links.pop())

        # Connect each output.
        for output_node in output_links:
            self.db.add_strong_edge(cmd_entry, output_node)
        for shared_output_node in shared_links:
            self.db.add_shared_output_edge(cmd_entry, shared_output_node)

        # Connect/disconnect strong inputs.
        strong_inputs = self.db.query_strong_inputs(cmd_entry)
        strong_added = strong_links - strong_inputs
        strong_removed = strong_inputs - strong_links

        if len(strong_added) and self.refactoring:
            refactoring_error(strong_added.pop())

        for strong_input in strong_added:
            self.db.add_strong_edge(strong_input, cmd_entry)
        for strong_input in strong_removed:
            self.db.drop_strong_edge(strong_input, cmd_entry)

        # Connect/disconnect weak inputs.
        weak_inputs = self.db.query_weak_inputs(cmd_entry)
        weak_added = weak_links - weak_inputs
        weak_removed = weak_inputs - weak_links

        if len(weak_added) and self.refactoring:
            refactoring_error(weak_added.pop())

        for weak_input in weak_added:
            self.db.add_weak_edge(weak_input, cmd_entry)
        for weak_input in weak_removed:
            self.db.drop_weak_edge(weak_input, cmd_entry)

        # If we got new outputs or inputs, we need to re-run the command.
        changed = len(output_links) + len(strong_added) + len(weak_added)
        if changed and cmd_entry.dirty == nodetypes.NOT_DIRTY:
            self.db.mark_dirty(cmd_entry)

        return cmd_entry, output_nodes
Example #49
0
    def parseOutput(self, cwd_entry, path, kind):
        if path[-1] == os.sep or path[
                -1] == os.altsep or path == '.' or path == '':
            util.con_err(util.ConsoleRed, 'Path "', util.ConsoleBlue, path,
                         util.ConsoleRed,
                         '" looks like a folder; a folder was not expected.',
                         util.ConsoleNormal)
            raise Exception('Expected folder, but path has a trailing slash')

        path = os.path.normpath(path)

        path, name = os.path.split(path)
        path = nodetypes.combine(cwd_entry, path)

        # We should have caught a case like 'x/' earlier.
        assert len(name)

        # If we resolved that there is no prefix path, then take this to mean the
        # root folder.
        if path:
            folder_entry = self.validateOutputFolder(path)
            output_path = os.path.join(path, name)
        else:
            folder_entry = None
            output_path = name

        entry = self.db.query_path(output_path)
        if not entry:
            if self.refactoring:
                util.con_err(util.ConsoleRed, 'New output file introduced: ',
                             util.ConsoleBlue, output_path, util.ConsoleNormal)
                raise Exception('Refactoring error')
            return self.db.add_output(folder_entry, output_path, kind)

        if entry.type == kind:
            return entry

        if entry.type == nodetypes.Mkdir:
            if entry not in self.old_folders_:
                util.con_err(util.ConsoleRed,
                             'A folder is being re-used as an output file: "',
                             util.ConsoleBlue, entry.path, util.ConsoleRed,
                             '"', util.ConsoleNormal)
                raise Exception(
                    'Attempted to re-use a folder as generated file')

            if self.refactoring:
                util.con_err(
                    util.ConsoleRed,
                    'A generated folder has changed to a generated file: ',
                    util.ConsoleBlue, entry.path, util.ConsoleNormal)
                raise Exception('Refactoring error')

            # We keep the node in old_folders_. This should be okay, since we've
            # changed the type to Output now. This way we can stick to one folder
            # deletion routine, since it's fairly complicated.
        elif entry.type == nodetypes.Output:
            # If we're asking for a shared output, make sure we can reuse this one.
            input_cmd = self.db.query_command_of(entry)
            if input_cmd and input_cmd not in self.old_commands_:
                util.con_err(util.ConsoleRed, 'First defined with command: ',
                             input_cmd.format(), util.ConsoleNormal)
                raise Exception(
                    'Existing output cannot be a shared output: {0}'.format(
                        entry.path))

            if self.refactoring:
                util.con_err(util.ConsoleRed,
                             'An output has changed to a shared output: ',
                             util.ConsoleBlue, entry.path, util.ConsoleNormal)
                raise Exception('Refactoring error')
        elif entry.type == nodetypes.SharedOutput:
            input_cmds = self.db.query_shared_commands_of(entry)
            for input_cmd in input_cmds:
                if input_cmd not in self.old_commands_:
                    util.con_err(
                        util.ConsoleRed,
                        'A shared output cannot be specified as an normal output.',
                        util.ConsoleNormal)
                    raise Exception(
                        'Existing shared output cannot be a normal output: {0}'
                        .format(entry.path))

            if self.refactoring:
                util.con_err(
                    util.ConsoleRed,
                    'A shared output has changed to a normal output: ',
                    util.ConsoleBlue, entry.path, util.ConsoleNormal)
                raise Exception('Refactoring error')
        else:
            util.con_err(
                util.ConsoleRed,
                'An existing node has been specified as an output file: "',
                util.ConsoleBlue, entry.format(), util.ConsoleRed, '"',
                util.ConsoleNormal)
            raise Exception(
                'Attempted to re-use an incompatible node as an output')

        self.db.change_to_output(entry, kind)
        return entry
Example #50
0
  def cleanup(self):
    for path in self.rm_list_:
      util.rm_path(path)

    for cmd_entry in self.old_commands_:
      if self.refactoring:
        util.con_err(util.ConsoleRed, 'Command removed during refactoring: \n',
                     util.ConsoleBlue, cmd_entry.format(),
                     util.ConsoleNormal)
        raise Exception('Refactoring error: command removed')
      self.db.drop_command(cmd_entry)

    for path in self.old_scripts_:
      self.db.drop_script(path)

    for group in self.old_groups_:
      self.db.drop_group(group)

    self.db.query_dead_sources(lambda e: self.db.drop_source(e))
    self.db.query_dead_shared_outputs(lambda e: self.db.drop_output(e))

    class Node:
      def __init__(self):
        self.incoming = set()
        self.outgoing = set()

    # Build a tree of dead folders.
    tracker = {}
    for entry in self.old_folders_:
      if entry not in tracker:
        tracker[entry] = Node()

      if entry.folder is None:
        continue

      # If our parent is not a dead folder, don't create an edge. It should be
      # impossible for a/b to be dead, a/b/c to be alive, and a/b/c/d to be
      # dead, since a/b/c will implicitly keep a/b alive.
      if entry.folder not in self.old_folders_:
        continue

      if entry.folder not in tracker:
        tracker[entry.folder] = Node()

      parent = tracker[entry.folder]
      child = tracker[entry]
      parent.incoming.add(entry)
      child.outgoing.add(entry.folder)

    # Find the leaves. Sets start out >= 1 items. Remove them as they they
    # are empty.
    dead_folders = [entry for entry in self.old_folders_ if len(tracker[entry].incoming) == 0]
    while len(dead_folders):
      child_entry = dead_folders.pop()
      child_node = tracker[child_entry]

      if self.refactoring:
        util.con_err(util.ConsoleRed, 'Folder removed during refactoring: \n',
                     util.ConsoleBlue, child_entry.format(),
                     util.ConsoleNormal)
        raise Exception('Refactoring error: command removed')

      self.db.drop_folder(child_entry)
      for parent_entry in child_node.outgoing:
        parent_node = tracker[parent_entry]
        parent_node.incoming.remove(child_entry)
        if not len(parent_node.incoming):
          dead_folders.append(parent_entry)
Example #51
0
  def addCommand(self, context, node_type, folder, data, inputs, outputs,
                 weak_inputs=[], shared_outputs=[]):
    assert not folder or isinstance(folder, nodetypes.Entry)

    # Build the set of weak links.
    weak_links = set()
    for weak_input in weak_inputs:
      assert type(weak_input) is nodetypes.Entry
      assert weak_input.type != nodetypes.Source
      weak_links.add(weak_input)

    # Build the set of strong links.
    strong_links = set()
    for strong_input in inputs:
      strong_input = self.parseInput(context, strong_input)
      strong_links.add(strong_input)

    # Build the list of outputs.
    cmd_entry = None
    output_nodes = []
    for output in outputs:
      output_node = self.parseOutput(folder, output, nodetypes.Output)
      output_nodes.append(output_node)

      input_entry = self.db.query_command_of(output_node)
      if not input_entry:
        continue

      # Make sure this output won't be duplicated.
      if input_entry not in self.old_commands_:
        util.con_err(util.ConsoleRed, 'Command: ', input_entry.format(), util.ConsoleNormal)
        raise Exception('Output has been duplicated: {0}'.format(output_node.path))

      if not cmd_entry:
        cmd_entry = input_entry
    # end for

    # Build the list of shared outputs.
    shared_output_nodes = []
    for shared_output in shared_outputs:
      shared_output_node = self.parseOutput(folder, shared_output, nodetypes.SharedOutput)
      shared_output_nodes.append(shared_output_node)

    output_links = set(output_nodes)
    shared_links = set(shared_output_nodes)

    # There should be no duplicates in either output list. These error messages
    # could be better.
    if len(output_nodes) > len(output_links):
      util.con_err(util.ConsoleRed, 'The output list contains duplicate files.',
                   util.ConsoleNormal)
      raise Exception('Shared output list contains duplicate files.')
    if len(shared_output_nodes) > len(shared_links):
      util.con_err(util.ConsoleRed, 'The output list contains duplicate files.',
                   util.ConsoleNormal)
      raise Exception('Shared output list contains duplicate files.')

    # The intersection of output_links and shared_links should be the empty set.
    duplicates = output_links.intersection(shared_links)
    if len(duplicates):
      bad_entry = duplicates.pop()
      util.con_err(util.ConsoleRed, 'An output has been duplicated as a shared output: ',
                   util.ConsoleBlue, bad_entry.path,
                   util.ConsoleNormal)
      raise Exception('An output has been duplicated as a shared output.')

    if cmd_entry:
      # Update the entry in the database.
      self.db.update_command(cmd_entry, node_type, folder, data, self.refactoring)

      # Disconnect any outputs that are no longer connected to this output.
      # It's okay to use output_links since there should never be duplicate
      # outputs.
      for outgoing in self.db.query_strong_outgoing(cmd_entry):
        if outgoing not in output_links:
          self.db.drop_strong_edge(cmd_entry, outgoing)
          self.db.drop_output(outgoing)
        else:
          output_links.remove(outgoing)

      # Do the same for shared outputs. Since there is a many:1 relationship,
      # we can't drop shared outputs here. We save that for a cleanup step.
      for outgoing in self.db.query_shared_outputs(cmd_entry):
        if outgoing not in shared_links:
          self.db.drop_shared_output_edge(cmd_entry, outgoing)
        else:
          shared_links.remove(outgoing)

      # Remove us from the list of commands to delete.
      self.old_commands_.remove(cmd_entry)
    else:
      # Note that if there are no outputs, we will always add a new command,
      # and the old (identical) command will be deleted.
      cmd_entry = self.db.add_command(node_type, folder, data)

    # Local helper function to warn about refactoring problems.
    def refactoring_error(node):
      util.con_err(util.ConsoleRed, 'New input introduced: ',
                   util.ConsoleBlue, node.path + '\n',
                   util.ConsoleRed, 'Command: ',
                   util.ConsoleBlue, cmd_entry.format(),
                   util.ConsoleNormal)
      raise Exception('Refactoring error: new input introduced')

    if len(output_links) and self.refactoring:
      refactoring_error(output_links.pop())
    if len(shared_links) and self.refactoring:
      refactoring_error(shared_links.pop())

    # Connect each output.
    for output_node in output_links:
      self.db.add_strong_edge(cmd_entry, output_node)
    for shared_output_node in shared_links:
      self.db.add_shared_output_edge(cmd_entry, shared_output_node)

    # Connect/disconnect strong inputs.
    strong_inputs = self.db.query_strong_inputs(cmd_entry)
    strong_added = strong_links - strong_inputs
    strong_removed = strong_inputs - strong_links 

    if len(strong_added) and self.refactoring:
      refactoring_error(strong_added.pop())

    for strong_input in strong_added:
      self.db.add_strong_edge(strong_input, cmd_entry)
    for strong_input in strong_removed:
      self.db.drop_strong_edge(strong_input, cmd_entry)

    # Connect/disconnect weak inputs.
    weak_inputs = self.db.query_weak_inputs(cmd_entry)
    weak_added = weak_links - weak_inputs
    weak_removed = weak_inputs - weak_links 

    if len(weak_added) and self.refactoring:
      refactoring_error(weak_added.pop())

    for weak_input in weak_added:
      self.db.add_weak_edge(weak_input, cmd_entry)
    for weak_input in weak_removed:
      self.db.drop_weak_edge(weak_input, cmd_entry)

    # If we got new outputs or inputs, we need to re-run the command.
    changed = len(output_links) + len(strong_added) + len(weak_added)
    if changed and not cmd_entry.dirty:
      self.db.mark_dirty(cmd_entry)

    return cmd_entry, output_nodes
Example #52
0
  def parseOutput(self, cwd_entry, path, kind):
    if path[-1] == os.sep or path[-1] == os.altsep or path == '.' or path == '':
      util.con_err(util.ConsoleRed, 'Path "',
                   util.ConsoleBlue, path,
                   util.ConsoleRed, '" looks like a folder; a folder was not expected.',
                   util.ConsoleNormal)
      raise Exception('Expected folder, but path has a trailing slash')

    path = os.path.normpath(path)

    path, name = os.path.split(path)
    path = nodetypes.combine(cwd_entry, path)

    # We should have caught a case like 'x/' earlier.
    assert len(name)

    # If we resolved that there is no prefix path, then take this to mean the
    # root folder.
    if path:
      folder_entry = self.validateOutputFolder(path)
      output_path = os.path.join(path, name)
    else:
      folder_entry = None
      output_path = name

    entry = self.db.query_path(output_path)
    if not entry:
      if self.refactoring:
        util.con_err(util.ConsoleRed, 'New output file introduced: ',
                     util.ConsoleBlue, output_path,
                     util.ConsoleNormal)
        raise Exception('Refactoring error')
      return self.db.add_output(folder_entry, output_path, kind)

    if entry.type == kind:
      return entry

    if entry.type == nodetypes.Mkdir:
      if entry not in self.old_folders_:
        util.con_err(util.ConsoleRed, 'A folder is being re-used as an output file: "',
                     util.ConsoleBlue, entry.path,
                     util.ConsoleRed, '"',
                     util.ConsoleNormal)
        raise Exception('Attempted to re-use a folder as generated file')

      if self.refactoring:
        util.con_err(util.ConsoleRed, 'A generated folder has changed to a generated file: ',
                     util.ConsoleBlue, entry.path,
                     util.ConsoleNormal)
        raise Exception('Refactoring error')

      # We keep the node in old_folders_. This should be okay, since we've
      # changed the type to Output now. This way we can stick to one folder
      # deletion routine, since it's fairly complicated.
    elif entry.type == nodetypes.Output:
      # If we're asking for a shared output, make sure we can reuse this one.
      input_cmd = self.db.query_command_of(entry)
      if input_cmd and input_cmd not in self.old_commands_:
        util.con_err(util.ConsoleRed, 'First defined with command: ', input_cmd.format(), util.ConsoleNormal)
        raise Exception('Existing output cannot be a shared output: {0}'.format(entry.path))

      if self.refactoring:
        util.con_err(util.ConsoleRed, 'An output has changed to a shared output: ',
                     util.ConsoleBlue, entry.path,
                     util.ConsoleNormal)
        raise Exception('Refactoring error')
    elif entry.type == nodetypes.SharedOutput:
      input_cmds = self.db.query_shared_commands_of(entry)
      for input_cmd in input_cmds:
        if input_cmd not in self.old_commands_:
          util.con_err(util.ConsoleRed, 'A shared output cannot be specified as an normal output.',
                       util.ConsoleNormal)
          raise Exception('Existing shared output cannot be a normal output: {0}'.format(entry.path))

      if self.refactoring:
        util.con_err(util.ConsoleRed, 'A shared output has changed to a normal output: ',
                     util.ConsoleBlue, entry.path,
                     util.ConsoleNormal)
        raise Exception('Refactoring error')
    else:
      util.con_err(util.ConsoleRed, 'An existing node has been specified as an output file: "',
                   util.ConsoleBlue, entry.format(),
                   util.ConsoleRed, '"',
                   util.ConsoleNormal)
      raise Exception('Attempted to re-use an incompatible node as an output')

    self.db.change_to_output(entry, kind)
    return entry
Example #53
0
  def Configure(self): 
    v_options, args = self.options.parse_args()

    # In order to support pickling, we need to rewrite |options| to not use
    # optparse.Values, since its implementation changes across Python versions.
    options = util.Expando()
    ignore_attrs = set(dir(Values))
    for attr in dir(v_options):
      if attr in ignore_attrs:
        continue
      setattr(options, attr, getattr(v_options, attr))

    if options.list_gen:
      print('Available build system generators:')
      print('  {0:24} - AMBuild 2 (default)'.format('ambuild2'))
      print('  {0:24} - Visual Studio'.format('vs'))
      print('')
      print('Extra options:')
      print('  --vs-version=N        Visual Studio: IDE version (2010 or 10 default)')
      print('  --vs-split            Visual Studio: generate one project file per configuration')
      sys.exit(0)

    if options.no_color:
      util.DisableConsoleColors()

    source_abspath = os.path.normpath(os.path.abspath(self.sourcePath))
    build_abspath = os.path.normpath(os.path.abspath(self.buildPath))
    if source_abspath == build_abspath:
      if util.IsString(self.default_build_folder):
        objfolder = self.default_build_folder
      else:
        objfolder = self.default_build_folder(self)
      new_buildpath = os.path.join(self.buildPath, objfolder)

      util.con_err(
        util.ConsoleHeader,
        'Warning: build is being configured in the source tree.',
        util.ConsoleNormal
      )
      if os.path.exists(os.path.join(new_buildpath)):
        has_amb2 = os.path.exists(os.path.join(new_buildpath, '.ambuild2'))
        if not has_amb2 and len(os.listdir(new_buildpath)):
          util.con_err(util.ConsoleRed, 'Tried to use ',
                       util.ConsoleBlue, objfolder,
                       util.ConsoleRed, ' as a build folder, but it is not empty!',
                       util.ConsoleNormal)
          raise Exception('build folder has unrecognized files')

        util.con_err(util.ConsoleHeader, 'Re-using build folder: ',
                     util.ConsoleBlue, '{0}'.format(objfolder),
                     util.ConsoleNormal)
      else:
        util.con_err(util.ConsoleHeader, 'Creating "',
                     util.ConsoleBlue, '{0}'.format(objfolder),
                     util.ConsoleHeader, '" as a build folder.',
                     util.ConsoleNormal)
        os.mkdir(new_buildpath)
      self.buildPath = new_buildpath

    if options.generator == 'ambuild2':
      from ambuild2.frontend.amb2 import gen
      builder = gen.Generator(self.sourcePath, self.buildPath, os.getcwd(), options, args)
    elif options.generator == 'vs':
      from ambuild2.frontend.vs import gen
      builder = gen.Generator(self.sourcePath, self.buildPath, os.getcwd(), options, args)
    else:
      sys.stderr.write('Unrecognized build generator: ' + options.generator + '\n')
      sys.exit(1)

    with util.FolderChanger(self.buildPath):
      if not builder.generate():
        sys.stderr.write('Configure failed.\n')
        sys.exit(1)
Example #54
0
  def generateFolder(self, parent, folder):
    parent_path, path = paths.ResolveFolder(parent, folder)

    # Quick check. If this folder is not in our old folder list, and it's in
    # the DB, then we already have an entry for it that has already negotiated
    # its parent paths.
    old_entry = self.db.query_path(path)
    if old_entry and self.isValidFolderEntry(old_entry):
      return old_entry

    components = []
    while folder:
      folder, name = os.path.split(folder)
      if not name:
        break
      components.append(name)

    path = parent_path
    while len(components):
      name = components.pop()
      path = os.path.join(path, name)
      entry = self.db.query_path(path)
      if not entry:
        if self.refactoring:
          util.con_err(util.ConsoleRed, 'New folder introduced: ',
                       util.ConsoleBlue, path,
                       util.ConsoleNormal)
          raise Exception('Refactoring error: new folder')
        entry = self.db.add_folder(parent, path)
      elif entry.type == nodetypes.Output or entry.type == nodetypes.SharedOutput:
        if entry.type == nodetypes.Output:
          cmd_entries = [self.db.query_command_of(entry)]
        elif entry.type == nodetypes.SharedOutput:
          cmd_entries = self.db.query_shared_commands_of(entry)

        for cmd_entry in cmd_entries:
          if cmd_entry not in self.old_commands_:
            util.con_err(util.ConsoleRed, 'Folder has the same path as an output file generated by:\n',
                         util.ConsoleBlue, cmd_entry.format(),
                         util.ConsoleNormal)
            raise Exception('Output has been duplicated: {0}'.format(entry.path))

        if self.refactoring:
          util.con_err(util.ConsoleRed, 'Path "',
                       util.ConsoleBlue, entry.path,
                       util.ConsoleRed, '" has changed from a file to a folder.',
                       util.ConsoleNormal)
          raise Exception('Refactoring error: path changed from file to folder')

        self.rm_list_.append(entry.path)
        self.db.change_to_folder(entry)
      elif entry.type == nodetypes.Mkdir:
        # We let the same folder be generated twice, so use discard, not remove.
        self.old_folders_.discard(entry)
      else:
        util.con_err(util.ConsoleRed, 'Folder has the same node signature as: ',
                     util.ConsoleBlue, entry.format(),
                     util.ConsoleNormal)
        raise Exception('Output has been duplicated: {0}'.format(entry.path))

      parent = entry

    return entry