Example #1
0
def Main():
  parser = BuildOptions()
  (options, args) = parser.parse_args()
  if not ProcessOptions(options):
    parser.print_help()
    return 1

  ch = logging.StreamHandler(sys.stdout)
  logger.addHandler(ch)
  logger.setLevel(logging.INFO)
  if options.logfile:
    fh = logging.FileHandler(options.logfile, mode='wb')
    logger.addHandler(fh)

  workspace = abspath(join(dirname(sys.argv[0]), '..'))
  suites = GetSuites(join(workspace, 'test'))
  repositories = [TestRepository(join(workspace, 'test', name)) for name in suites]
  repositories += [TestRepository(a) for a in options.suite]

  root = LiteralTestSuite(repositories)
  if len(args) == 0:
    paths = [SplitPath(t) for t in BUILT_IN_TESTS]
  else:
    paths = [ ]
    for arg in args:
      path = SplitPath(arg)
      paths.append(path)

  # Check for --valgrind option. If enabled, we overwrite the special
  # command flag with a command that uses the run-valgrind.py script.
  if options.valgrind:
    run_valgrind = join(workspace, "tools", "run-valgrind.py")
    options.special_command = "python -u " + run_valgrind + " @"

  shell = abspath(options.shell)
  buildspace = dirname(shell)

  processor = GetSpecialCommandProcessor(options.special_command)
  context = Context(workspace,
                    buildspace,
                    VERBOSE,
                    shell,
                    options.timeout,
                    processor,
                    options.suppress_dialogs,
                    options.store_unexpected_output)
  # First build the required targets
  if not options.no_build:
    reqs = [ ]
    for path in paths:
      reqs += root.GetBuildRequirements(path, context)
    reqs = list(set(reqs))
    if len(reqs) > 0:
      if options.j != 1:
        options.scons_flags += ['-j', str(options.j)]
      if not BuildRequirements(context, reqs, options.mode, options.scons_flags):
        return 1

  # Just return if we are only building the targets for running the tests.
  if options.build_only:
    return 0

  # Get status for tests
  sections = [ ]
  defs = { }
  root.GetTestStatus(context, sections, defs)
  config = Configuration(sections, defs)

  # List the tests
  all_cases = [ ]
  all_unused = [ ]
  unclassified_tests = [ ]
  globally_unused_rules = None
  for path in paths:
    for arch in options.arch:
      for mode in options.mode:
        vm = context.GetVm(arch, mode)
        if not exists(vm):
          print "Can't find shell executable: '%s'" % vm
          continue
        env = {
          'mode': mode,
          'system': utils.GuessOS(),
          'arch': arch,
        }
        test_list = root.ListTests([], path, context, arch, mode)
        unclassified_tests += test_list
        (cases, unused_rules, all_outcomes) = (
            config.ClassifyTests(test_list, env))
        if globally_unused_rules is None:
          globally_unused_rules = set(unused_rules)
        else:
          globally_unused_rules = (
              globally_unused_rules.intersection(unused_rules))
        all_cases += cases
        all_unused.append(unused_rules)

  if options.cat:
    visited = set()
    for test in unclassified_tests:
      key = tuple(test.path)
      if key in visited:
        continue
      visited.add(key)
      print "--- begin source: %s ---" % test.GetLabel()
      source = test.GetSource().strip()
      print source
      print "--- end source: %s ---" % test.GetLabel()
    return 0

  if options.warn_unused:
    for rule in globally_unused_rules:
      print "Rule for '%s' was not used." % '/'.join([str(s) for s in rule.path])

  tempdir = os.environ.get('NODE_TEST_DIR') or options.temp_dir
  if tempdir:
    try:
      os.makedirs(tempdir)
      os.environ['NODE_TEST_DIR'] = tempdir
    except OSError as exception:
      if exception.errno != errno.EEXIST:
        print "Could not create the temporary directory", options.temp_dir
        sys.exit(1)

  if options.report:
    PrintReport(all_cases)

  result = None
  def DoSkip(case):
    if SKIP in case.outcomes or SLOW in case.outcomes:
      return True
    return FLAKY in case.outcomes and options.flaky_tests == SKIP
Example #2
0
def main():
    parser = optparse.OptionParser()
    parser.add_option('--parallel',
                      dest='parallel',
                      action='store_true',
                      default=False,
                      help='Use fremontcut in parallel mode.')
    parser.add_option(
        '--systems',
        dest='systems',
        action='store',
        type='string',
        default='htmldart2js,htmldartium,_blink',
        help='Systems to generate (htmldart2js, htmldartium, _blink)')
    parser.add_option('--output-dir',
                      dest='output_dir',
                      action='store',
                      type='string',
                      default=None,
                      help='Directory to put the generated files')
    parser.add_option('--use-database-cache',
                      dest='use_database_cache',
                      action='store_true',
                      default=False,
                      help='''Use the cached database from the previous run to
                    improve startup performance''')
    parser.add_option('--update-dom-metadata',
                      dest='update_dom_metadata',
                      action='store_true',
                      default=False,
                      help='''Update the metadata list of DOM APIs''')
    parser.add_option('--verbose',
                      dest='logging_level',
                      action='store_false',
                      default=logging.WARNING,
                      help='Output all informational messages')
    parser.add_option('--examine',
                      dest='examine_idls',
                      action='store_true',
                      default=None,
                      help='Analyze IDL files')
    parser.add_option(
        '--logging',
        dest='logging',
        type='int',
        action='store',
        default=logging.NOTSET,
        help='Level of logging 20 is Info, 30 is Warnings, 40 is Errors')
    parser.add_option(
        '--gen-interop',
        dest='dart_js_interop',
        action='store_true',
        default=False,
        help='Use Javascript objects (dart:js) accessing the DOM in _blink')
    parser.add_option(
        '--no-cached-patches',
        dest='no_cached_patches',
        action='store_true',
        default=False,
        help='Do not generate the sdk/lib/js/cached_patches.dart file')

    (options, args) = parser.parse_args()

    current_dir = os.path.dirname(__file__)
    database_dir = os.path.join(current_dir, '..', 'database')
    logging.config.fileConfig(os.path.join(current_dir, 'logging.conf'))
    systems = options.systems.split(',')

    output_dir = options.output_dir or os.path.join(
        current_dir, '..', '..', '..', utils.GetBuildDir(utils.GuessOS()),
        'generated')

    dart2js_output_dir = None
    if 'htmldart2js' in systems:
        dart2js_output_dir = os.path.join(output_dir, 'dart2js')

    logging_level = options.logging_level \
      if options.logging == logging.NOTSET else options.logging

    start_time = time.time()

    UpdateCssProperties()

    # Parse the IDL and create the database.
    database = fremontcutbuilder.main(options.parallel,
                                      logging_level=logging_level,
                                      examine_idls=options.examine_idls)

    GenerateFromDatabase(database, dart2js_output_dir,
                         options.update_dom_metadata, logging_level,
                         options.dart_js_interop)

    file_generation_start_time = time.time()

    if 'htmldart2js' in systems:
        _logger.info('Generating dart2js single files.')

        for library_name in HTML_LIBRARY_NAMES:
            source = os.path.join(dart2js_output_dir,
                                  '%s_dart2js.dart' % library_name)
            GenerateSingleFile(
                source,
                os.path.join('..', '..', '..', 'sdk', 'lib', library_name,
                             'dart2js'))

    print '\nGenerating single file %s seconds' % round(
        time.time() - file_generation_start_time, 2)

    end_time = time.time()

    print '\nDone (dartdomgenerator) %s seconds' % round(
        end_time - start_time, 2)
Example #3
0
DRT_DIR = os.path.join('client', 'tests', 'drt')
DRT_VERSION = os.path.join(DRT_DIR, 'LAST_VERSION')
DRT_LATEST_PATTERN = (
    'gs://dartium-archive/latest/drt-%(osname)s-%(bot)s-*.zip')
DRT_PERMANENT_PATTERN = ('gs://dartium-archive/drt-%(osname)s-%(bot)s/drt-'
                         '%(osname)s-%(bot)s-%(num1)s.%(num2)s.zip')

DARTIUM_DIR = os.path.join('client', 'tests', 'dartium')
DARTIUM_VERSION = os.path.join(DARTIUM_DIR, 'LAST_VERSION')
DARTIUM_LATEST_PATTERN = (
    'gs://dartium-archive/latest/dartium-%(osname)s-%(bot)s-*.zip')
DARTIUM_PERMANENT_PATTERN = (
    'gs://dartium-archive/dartium-%(osname)s-%(bot)s/'
    'dartium-%(osname)s-%(bot)s-%(num1)s.%(num2)s.zip')

SDK_DIR = os.path.join(utils.GetBuildRoot(utils.GuessOS(), 'release', 'ia32'),
                       'dart-sdk')
SDK_VERSION = os.path.join(SDK_DIR, 'LAST_VERSION')
SDK_LATEST_PATTERN = 'gs://dart-archive/channels/dev/raw/latest/VERSION'
# TODO(efortuna): Once the x64 VM also is optimized, select the version
# based on whether we are running on a 32-bit or 64-bit system.
SDK_PERMANENT = ('gs://dart-archive/channels/dev/raw/%(version_num)s/sdk/' +
                 'dartsdk-%(osname)s-ia32-release.zip')

# Dictionary storing the earliest revision of each download we have stored.
LAST_VALID = {'dartium': 4285, 'chromedriver': 7823, 'sdk': 9761, 'drt': 5342}

sys.path.append(os.path.join(GSUTIL_DIR, 'third_party', 'boto'))
import boto

Example #4
0
# This script builds a Chrome App file (.crx) for Swarm
import os
import platform
import subprocess
import sys

DART_PATH = os.path.normpath(os.path.dirname(__file__) + '/../../..')
CLIENT_PATH = os.path.normpath(DART_PATH + '/client')

# Add the tools directory so we can find utils.py.
sys.path.append(os.path.abspath(DART_PATH + '/tools'))
import utils

buildRoot = CLIENT_PATH + '/' + utils.GetBuildRoot(
    utils.GuessOS(), 'debug', 'dartc')

def execute(*command):
  '''
  Executes the given command in a new process. If the command fails (returns
  non-zero) halts the script and returns that exit code.
  '''
  exitcode = subprocess.call(command)
  if exitcode != 0:
    sys.exit(exitcode)

def createChromeApp(buildRoot, antTarget, resultFile):
  buildDir = os.path.join(buildRoot, 'war')

  # Use ant to create the 'war' directory
  # TODO(jmesserly): we should factor out as much as possible from the ant file
Example #5
0
# Copyright (c) 2011, the Dart project authors.  Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
#

import os
import platform
import re
import shutil
import subprocess
import sys
import tempfile

import utils

OS_GUESS = utils.GuessOS()

HTML_CONTENTS = """
<html>
<head>
  <title> Test %(title)s </title>
  <style>
     .unittest-table { font-family:monospace; border:1px; }
     .unittest-pass { background: #6b3;}
     .unittest-fail { background: #d55;}
     .unittest-error { background: #a11;}
  </style>
</head>
<body>
  <h1> Running %(title)s </h1>
  <script type="text/javascript" src="%(controller_script)s"></script>
Example #6
0
# BSD-style license that can be found in the LICENSE file.
#

# A script to kill hanging processs. The tool will return non-zero if any
# process was actually found.
#

import optparse
import os
import signal
import subprocess
import sys

import utils

os_name = utils.GuessOS()

POSIX_INFO = 'ps -p %s -o args'

EXECUTABLE_NAMES = {
    'win32': {
        'chrome': 'chrome.exe',
        'content_shell': 'content_shell.exe',
        'dart': 'dart.exe',
        'iexplore': 'iexplore.exe',
        'firefox': 'firefox.exe',
        'git': 'git.exe',
        'svn': 'svn.exe',
        'fletch': 'fletch.exe',
        'fletch-vm': 'fletch-vm.exe',
    },
Example #7
0
def Main():
    """Main loop."""
    utils.ConfigureJava()
    parser = BuildOptions()
    (options, args) = parser.parse_args()
    if not ProcessOptions(options):
        parser.print_help()
        return 1

    client = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '..'))
    repositories = []
    for component in os.listdir(client) + ['.']:
        test_path = os.path.join(client, component, 'tests')
        if os.path.exists(test_path) and os.path.isdir(test_path):
            suites = GetSuites(test_path)
            repositories += [
                TestRepository(os.path.join(test_path, name))
                for name in suites
            ]
    repositories += [TestRepository(a) for a in options.suite]

    root = LiteralTestSuite(repositories)
    if args:
        paths = []
        for arg in args:
            path = _SplitPath(arg)
            paths.append(path)
    else:
        paths = [_SplitPath(t) for t in BUILT_IN_TESTS]

    # Check for --valgrind option. If enabled, we overwrite the special
    # command flag with a command that uses the tools/valgrind.py script.
    if options.valgrind:
        run_valgrind = os.path.join(client, 'runtime', 'tools', 'valgrind.py')
        options.special_command = 'python -u ' + run_valgrind + ' @'

    context = Context(client, options.verbose, options.os, options.timeout,
                      GetSpecialCommandProcessor(options.special_command),
                      options.suppress_dialogs, options.executable,
                      options.flags, options.keep_temporary_files,
                      options.batch)

    # Get status for tests
    sections = []
    defs = {}
    root.GetTestStatus(context, sections, defs)
    config = Configuration(sections, defs)

    # List the tests
    all_cases = []
    all_unused = []
    globally_unused_rules = None
    for path in paths:
        for mode in options.mode:
            for arch in options.arch:
                env = {
                    'mode': mode,
                    'system': utils.GuessOS(),
                    'arch': arch,
                    'checked': options.checked
                }
                test_list = root.ListTests([], path, context, mode, arch)
                (cases, unused_rules,
                 unused_outcomes) = config.ClassifyTests(test_list, env)
                if globally_unused_rules is None:
                    globally_unused_rules = set(unused_rules)
                else:
                    globally_unused_rules = (
                        globally_unused_rules.intersection(unused_rules))
                all_cases += cases
                all_unused.append(unused_rules)

    if options.report:
        PrintReport(all_cases)

    if options.list:
        PrintTests(all_cases)
        return 0

    result = None

    def DoSkip(case):
        return testing.SKIP in case.outcomes or testing.SLOW in case.outcomes

    cases_to_run = [c for c in all_cases if not DoSkip(c)]
    # Creating test cases may generate temporary files. Make sure
    # Skipped tests clean up these files.
    for c in all_cases:
        if DoSkip(c): c.case.Cleanup()

    if cases_to_run:
        try:
            start = time.time()
            if RunTestCases(cases_to_run, options.progress, options.tasks,
                            context):
                result = 0
            else:
                result = 1
            duration = time.time() - start
        except KeyboardInterrupt:
            print 'Exiting on KeyboardInterrupt'
            return 1
    else:
        print 'No tests to run.'
        return 0

    if options.time:
        print
        print '--- Total time: %s ---' % FormatTime(duration)
        timed_tests = [
            t.case for t in cases_to_run if not t.case.duration is None
        ]
        timed_tests.sort(lambda a, b: a.CompareTime(b))
        index = 1
        for entry in timed_tests[:20]:
            t = FormatTime(entry.duration)
            print '%4i (%s) %s' % (index, t, entry.GetLabel())
            index += 1

    return result
Example #8
0
  Google Cloud Storage for the documentation viewer.
"""

import optparse
import os
from os.path import join, dirname, abspath, exists
import platform
import subprocess
import sys
sys.path.append(abspath(join(dirname(__file__), '../../../tools')))
import utils

DART = abspath(
    join(
        dirname(__file__), '../../../%s/%s/dart-sdk/bin/dart' %
        (utils.BUILD_ROOT[utils.GuessOS()],
         utils.GetBuildConf('release', utils.GuessArchitecture()))))
PACKAGE_ROOT = abspath(
    join(
        dirname(__file__), '../../../%s/%s/packages/' %
        (utils.BUILD_ROOT[utils.GuessOS()],
         utils.GetBuildConf('release', utils.GuessArchitecture()))))
GSUTIL = utils.GetBuildbotGSUtilPath()
GS_SITE = 'gs://dartlang-docgen'
DESCRIPTION = 'Runs docgen.dart on the SDK libraries, and uploads them to Google \
    Cloud Storage for the dartdoc-viewer. '

# Allow us to override checking SVN's revision number. Useful for development
# so we can upload docs from a branch using an SDK that was built on a
# revision newer than when the branch was forked.
trustSVN = None
Example #9
0
#

# This script builds a Chrome App file (.crx) for Swarm
import os
import platform
import subprocess
import sys

DART_PATH = os.path.normpath(os.path.dirname(__file__) + '/../../..')
CLIENT_PATH = os.path.normpath(DART_PATH + '/client')

# Add the tools directory so we can find utils.py.
sys.path.append(os.path.abspath(DART_PATH + '/tools'))
import utils

buildRoot = CLIENT_PATH + '/' + utils.GetBuildRoot(utils.GuessOS(), 'debug',
                                                   'dartc')


def execute(*command):
    '''
  Executes the given command in a new process. If the command fails (returns
  non-zero) halts the script and returns that exit code.
  '''
    exitcode = subprocess.call(command)
    if exitcode != 0:
        sys.exit(exitcode)


def createChromeApp(buildRoot, antTarget, resultFile):
    buildDir = os.path.join(buildRoot, 'war')
Example #10
0
# Copyright (c) 2013, the Dart project authors.  Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.

# A script which makes it easy to execute common DOM-related tasks

import os
import subprocess
import sys
from sys import argv

sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import utils

dart_out_dir = utils.GetBuildRoot(utils.GuessOS(), 'release', 'ia32')
if utils.IsWindows():
    dart_bin = os.path.join(dart_out_dir, 'dart.exe')
else:
    dart_bin = os.path.join(dart_out_dir, 'dart')

dart_dir = os.path.abspath(
    os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir,
                 os.path.pardir))


def help():
    print(
        'Helper script to make it easy to perform common tasks encountered '
        'during the life of a Dart DOM developer.\n'
        '\n'
Example #11
0
def Main():
  global OUTPUT
  global BUILD

  parser = BuildOptions()
  (options, args) = parser.parse_args()

  if args:
    parser.print_help()
    return 1

  osName = utils.GuessOS()
  mode = 'debug'
  arch = utils.GuessArchitecture()

  if not options.build:
    print >> sys.stderr, 'Error: no --build option specified'
    exit(1)
  else:
    BUILD = options.build

  if not options.out:
    print >> sys.stderr, 'Error: no --out option specified'
    exit(1)
  else:
    # TODO(devoncarew): Currently we scrape the output path to determine the
    # mode and arch. This is fragile and should moved into one location
    # (utils.py?) or made more explicit.
    OUTPUT = options.out
    mode = ('release', 'debug')['Debug' in OUTPUT]
    arch = ('ia32', 'x64')['X64' in OUTPUT]

  # Use explicit mode and arch information.
  if options.mode:
    mode = options.mode
  if options.arch:
    arch = options.arch

  OUTPUT = os.path.abspath(OUTPUT)
  BUILD = os.path.abspath(BUILD)

  print "\nBuilding the editor"
  print "  config : %s, %s, %s" % (osName, arch, mode)
  print "  output : %s" % OUTPUT

  # Clean the editor output directory.
  print '\ncleaning %s' % OUTPUT
  shutil.rmtree(OUTPUT, True)

  # These are the valid eclipse build configurations that we can produce.
  # We synthesize these up from the OS_CONFIG and ARCH_CONFIG information.
  # macosx, cocoa, x86 & macosx, cocoa, x86_64
  # win32, win32, x86 & win32, win32, x86_64
  # linux, gtk, x86 & linux, gtk, x86_64

  buildConfig = OS_CONFIG[osName] + ', ' + ARCH_CONFIG[arch]

  print "\ninvoking build_rcp.xml with buildConfig = [%s]\n" % buildConfig

  sys.stdout.flush()
  sys.stderr.flush()

  buildScript = join('editor', 'tools', 'features',
                     'com.google.dart.tools.deploy.feature_releng',
                     'build_rcp.xml')
  build_cmd = [AntPath(),
      '-lib',
      join('third_party', 'bzip2', 'bzip2.jar'),
      '-Dbuild.out=' + OUTPUT,
      '-Dbuild.configs=' + buildConfig,
      '-Dbuild.root=' + GetEclipseBuildRoot(),
      '-Dbuild.downloads=' + GetDownloadCache(),
      '-Dbuild.source=' + os.path.abspath('editor'),
      '-Dbuild.dart.sdk=' + GetSdkPath(),
      '-Dbuild.no.properties=true',
      '-Dbuild.channel=' + utils.GetChannel(),
      '-Dbuild.revision=' + utils.GetSVNRevision(),
      '-Dbuild.version.qualifier=' + utils.GetEclipseVersionQualifier(),
      '-Ddart.version.full=' + utils.GetVersion(),
      '-buildfile',
      buildScript]
  print build_cmd
  buildRcpStatus = subprocess.call(build_cmd, shell=utils.IsWindows())

  if buildRcpStatus != 0:
    sys.exit(buildRcpStatus)

  # build_rcp.xml will put the built editor archive in the OUTPUT directory
  # (dart-editor-macosx.cocoa.x86.zip). It contains the editor application in a
  # dart/ subdirectory. We unzip the contents of the archive into OUTPUT. It
  # will use the ../dart-sdk directory as its SDK.
  archives = glob.glob(join(OUTPUT, 'd*.zip'))

  if archives:
    ProcessEditorArchive(arch, archives[0], OUTPUT)

  if os.path.exists(GetEditorTemp()):
    shutil.rmtree(GetEditorTemp())

  print('\nEditor build successful')
Example #12
0
def ShouldCopyAnalyzer():
    os = utils.GuessOS()
    return os == 'linux' or os == 'macos'
Example #13
0
def Main(argv):
    # Pull in all of the gpyi files which will be munged into the sdk.
    io_runtime_sources = \
      (eval(open("runtime/bin/io_sources.gypi").read()))['sources']

    HOME = dirname(dirname(realpath(__file__)))

    SDK_tmp = tempfile.mkdtemp()
    SDK = argv[1]

    # TODO(dgrove) - deal with architectures that are not ia32.

    if exists(SDK):
        rmtree(SDK)

    # Create and populate sdk/bin.
    BIN = join(SDK_tmp, 'bin')
    os.makedirs(BIN)

    # Copy the Dart VM binary and the Windows Dart VM link library
    # into sdk/bin.
    #
    # TODO(dgrove) - deal with architectures that are not ia32.
    build_dir = os.path.dirname(argv[1])
    dart_file_extension = ''
    analyzer_file_extension = ''
    if utils.GuessOS() == 'win32':
        dart_file_extension = '.exe'
        analyzer_file_extension = '.bat'  # TODO(zundel): test on Windows
        dart_import_lib_src = join(HOME, build_dir, 'dart.lib')
        dart_import_lib_dest = join(BIN, 'dart.lib')
        copyfile(dart_import_lib_src, dart_import_lib_dest)
    dart_src_binary = join(HOME, build_dir, 'dart' + dart_file_extension)
    dart_dest_binary = join(BIN, 'dart' + dart_file_extension)
    copyfile(dart_src_binary, dart_dest_binary)
    copymode(dart_src_binary, dart_dest_binary)
    if utils.GuessOS() != 'win32':
        subprocess.call(['strip', dart_dest_binary])

    if ShouldCopyAnalyzer():
        # Copy analyzer into sdk/bin
        ANALYZER_HOME = join(HOME, build_dir, 'analyzer')
        dart_analyzer_src_binary = join(ANALYZER_HOME, 'bin', 'dart_analyzer')
        dart_analyzer_dest_binary = join(
            BIN, 'dart_analyzer' + analyzer_file_extension)
        copyfile(dart_analyzer_src_binary, dart_analyzer_dest_binary)
        copymode(dart_analyzer_src_binary, dart_analyzer_dest_binary)

    # Create pub shell script.
    pub_src_script = join(HOME, 'utils', 'pub', 'sdk', 'pub')
    CopyShellScript(pub_src_script, BIN)

    #
    # Create and populate sdk/include.
    #
    INCLUDE = join(SDK_tmp, 'include')
    os.makedirs(INCLUDE)
    copyfile(join(HOME, 'runtime', 'include', 'dart_api.h'),
             join(INCLUDE, 'dart_api.h'))
    copyfile(join(HOME, 'runtime', 'include', 'dart_debugger_api.h'),
             join(INCLUDE, 'dart_debugger_api.h'))

    #
    # Create and populate sdk/lib.
    #

    LIB = join(SDK_tmp, 'lib')
    os.makedirs(LIB)

    #
    # Create and populate lib/io.
    #
    io_dest_dir = join(LIB, 'io')
    os.makedirs(io_dest_dir)
    os.makedirs(join(io_dest_dir, 'runtime'))
    for filename in io_runtime_sources:
        assert filename.endswith('.dart')
        if filename == 'io.dart':
            copyfile(join(HOME, 'runtime', 'bin', filename),
                     join(io_dest_dir, 'io_runtime.dart'))
        else:
            copyfile(join(HOME, 'runtime', 'bin', filename),
                     join(io_dest_dir, 'runtime', filename))

    # Construct lib/io/io_runtime.dart from whole cloth.
    dest_file = open(join(io_dest_dir, 'io_runtime.dart'), 'a')
    for filename in io_runtime_sources:
        assert filename.endswith('.dart')
        if filename == 'io.dart':
            continue
        dest_file.write('#source("runtime/' + filename + '");\n')
    dest_file.close()

    #
    # Create and populate lib/{core, crypto, isolate, json, uri, utf, ...}.
    #

    os.makedirs(join(LIB, 'html'))
    for library in [
            '_internal', 'core', 'coreimpl', 'crypto', 'isolate',
            join('html', 'dart2js'),
            join('html', 'dartium'), 'json', 'math', 'mirrors', 'scalarlist',
            'uri', 'utf'
    ]:
        copytree(join(HOME, 'lib', library),
                 join(LIB, library),
                 ignore=ignore_patterns('*.svn', 'doc', '*.py', '*.gypi',
                                        '*.sh'))

    # Create and copy pkg.
    PKG = join(SDK_tmp, 'pkg')
    os.makedirs(PKG)

    #
    # Create and populate pkg/{args, intl, logging, meta, unittest}
    #

    for library in [
            'args', 'htmlescape', 'dartdoc', 'intl', 'logging', 'meta',
            'unittest'
    ]:
        copytree(join(HOME, 'pkg', library),
                 join(PKG, library),
                 ignore=ignore_patterns('*.svn', 'doc', 'docs', '*.py',
                                        '*.gypi', '*.sh'))

    # TODO(dgrove): Remove this once issue 4788 is addressed.
    copytree(join(HOME, 'lib', 'compiler'),
             join(PKG, 'compiler'),
             ignore=ignore_patterns('*.svn', 'doc', '*.py', '*.gypi', '*.sh'))

    ReplaceInFiles([join(LIB, '_internal', 'libraries.dart')],
                   [('"compiler/', '"../pkg/compiler/')])

    ReplaceInFiles([join(PKG, 'compiler', 'implementation', 'lib', 'io.dart')],
                   [('../../runtime/bin', '../../lib/io/runtime')])

    # Fixup dartdoc
    # TODO(dgrove): Remove this once issue 4788 is addressed.
    ReplaceInFiles([
        join(PKG, 'dartdoc', 'lib', 'src', 'mirrors', 'dart2js_mirror.dart'),
        join(PKG, 'dartdoc', 'lib', 'mirrors_util.dart'),
        join(PKG, 'dartdoc', 'lib', 'classify.dart'),
        join(PKG, 'dartdoc', 'lib', 'src', 'client', 'client-live-nav.dart'),
        join(PKG, 'dartdoc', 'lib', 'src', 'client', 'client-static.dart'),
        join(PKG, 'dartdoc', 'lib', 'dartdoc.dart'),
    ], [
        ("../../lib/compiler", "../../pkg/compiler"),
    ])

    # Create and copy tools.
    UTIL = join(SDK_tmp, 'util')
    os.makedirs(UTIL)

    if ShouldCopyAnalyzer():
        # Create and copy Analyzer library into 'util'
        ANALYZER_DEST = join(UTIL, 'analyzer')
        os.makedirs(ANALYZER_DEST)

        analyzer_src_jar = join(ANALYZER_HOME, 'util', 'analyzer',
                                'dart_analyzer.jar')
        analyzer_dest_jar = join(ANALYZER_DEST, 'dart_analyzer.jar')
        copyfile(analyzer_src_jar, analyzer_dest_jar)

        jarsToCopy = [
            join("args4j", "2.0.12", "args4j-2.0.12.jar"),
            join("guava", "r09", "guava-r09.jar"),
            join("json", "r2_20080312", "json.jar")
        ]
        for jarToCopy in jarsToCopy:
            dest_dir = join(ANALYZER_DEST, os.path.dirname(jarToCopy))
            os.makedirs(dest_dir)
            dest_file = join(ANALYZER_DEST, jarToCopy)
            src_file = join(ANALYZER_HOME, 'util', 'analyzer', jarToCopy)
            copyfile(src_file, dest_file)

    # Create and populate util/pub.
    copytree(join(HOME, 'utils', 'pub'),
             join(UTIL, 'pub'),
             ignore=ignore_patterns('.svn', 'sdk'))

    # Copy in 7zip for Windows.
    if utils.GuessOS() == 'win32':
        copytree(join(HOME, 'third_party', '7zip'),
                 join(join(UTIL, 'pub'), '7zip'),
                 ignore=ignore_patterns('.svn'))

        ReplaceInFiles([
            join(UTIL, 'pub', 'io.dart'),
        ], [
            ("var pathTo7zip = '../../third_party/7zip/7za.exe';",
             "var pathTo7zip = '7zip/7za.exe';"),
        ])

    revision = utils.GetSVNRevision()

    # Copy dart2js.
    CopyDart2Js(build_dir, SDK_tmp, revision)

    # Write the 'revision' file
    if revision is not None:
        with open(os.path.join(SDK_tmp, 'revision'), 'w') as f:
            f.write(revision + '\n')
            f.close()

    Copy(join(HOME, 'README.dart-sdk'), join(SDK_tmp, 'README'))

    move(SDK_tmp, SDK)
Example #14
0
File: gn.py Project: figo0/sdk
#!/usr/bin/env python
# Copyright 2016 The Dart project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

import argparse
import multiprocessing
import os
import subprocess
import sys
import time
import utils

HOST_OS = utils.GuessOS()
HOST_ARCH = utils.GuessArchitecture()
SCRIPT_DIR = os.path.dirname(sys.argv[0])
DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..'))


def get_out_dir(mode, arch, target_os):
    return utils.GetBuildRoot(HOST_OS, mode, arch, target_os)


def to_command_line(gn_args):
    def merge(key, value):
        if type(value) is bool:
            return '%s=%s' % (key, 'true' if value else 'false')
        return '%s="%s"' % (key, value)

    return [merge(x, y) for x, y in gn_args.iteritems()]
Example #15
0
def main():
    parser = optparse.OptionParser()
    parser.add_option('--parallel',
                      dest='parallel',
                      action='store_true',
                      default=False,
                      help='Use fremontcut in parallel mode.')
    parser.add_option('--rebuild',
                      dest='rebuild',
                      action='store_true',
                      default=False,
                      help='Rebuild the database from IDL using fremontcut.')
    parser.add_option('--systems',
                      dest='systems',
                      action='store',
                      type='string',
                      default='htmldart2js,htmldartium',
                      help='Systems to generate (htmldart2js, htmldartium)')
    parser.add_option('--output-dir',
                      dest='output_dir',
                      action='store',
                      type='string',
                      default=None,
                      help='Directory to put the generated files')
    parser.add_option('--use-blink',
                      dest='dart_use_blink',
                      action='store_true',
                      default=False,
                      help='''Delegate all native calls to dart:blink''')
    parser.add_option('--use-database-cache',
                      dest='use_database_cache',
                      action='store_true',
                      default=False,
                      help='''Use the cached database from the previous run to
                    improve startup performance''')
    parser.add_option('--update-dom-metadata',
                      dest='update_dom_metadata',
                      action='store_true',
                      default=False,
                      help='''Update the metadata list of DOM APIs''')
    (options, args) = parser.parse_args()

    current_dir = os.path.dirname(__file__)
    database_dir = os.path.join(current_dir, '..', 'database')
    logging.config.fileConfig(os.path.join(current_dir, 'logging.conf'))
    systems = options.systems.split(',')

    output_dir = options.output_dir or os.path.join(
        current_dir, '..', '..', utils.GetBuildDir(utils.GuessOS(), None),
        'generated')

    dart2js_output_dir = None
    if 'htmldart2js' in systems:
        dart2js_output_dir = os.path.join(output_dir, 'dart2js')
    dartium_output_dir = None
    if 'htmldartium' in systems:
        dartium_output_dir = os.path.join(output_dir, 'dartium')

    if options.rebuild:
        # Parse the IDL and create the database.
        database = fremontcutbuilder.main(options.parallel)
    else:
        # Load the previously generated database.
        database = LoadDatabase(database_dir, options.use_database_cache)
    GenerateFromDatabase(database, dart2js_output_dir, dartium_output_dir,
                         options.update_dom_metadata, options.dart_use_blink)

    if 'htmldart2js' in systems:
        _logger.info('Generating dart2js single files.')
        for library_name in HTML_LIBRARY_NAMES:
            GenerateSingleFile(
                os.path.join(dart2js_output_dir,
                             '%s_dart2js.dart' % library_name),
                os.path.join('..', '..', '..', 'sdk', 'lib', library_name,
                             'dart2js'))
    if 'htmldartium' in systems:
        _logger.info('Generating dartium single files.')
        for library_name in HTML_LIBRARY_NAMES:
            GenerateSingleFile(
                os.path.join(dartium_output_dir,
                             '%s_dartium.dart' % library_name),
                os.path.join('..', '..', '..', 'sdk', 'lib', library_name,
                             'dartium'))
        GenerateSingleFile(
            os.path.join(dartium_output_dir, '_blink_dartium.dart'),
            os.path.join('..', '..', '..', 'sdk', 'lib', '_blink', 'dartium'))
Example #16
0
def Main():
    parser = BuildOptions()
    (options, args) = parser.parse_args()
    if not ProcessOptions(options):
        parser.print_help()
        return 1

    workspace = abspath(join(dirname(sys.argv[0]), '..'))
    suites = GetSuites(join(workspace, 'test'))
    repositories = [
        TestRepository(join(workspace, 'test', name)) for name in suites
    ]
    repositories += [TestRepository(a) for a in options.suite]

    root = LiteralTestSuite(repositories)
    if len(args) == 0:
        paths = [SplitPath(t) for t in BUILT_IN_TESTS]
    else:
        paths = []
        for arg in args:
            path = SplitPath(arg)
            paths.append(path)

    # Check for --valgrind option. If enabled, we overwrite the special
    # command flag with a command that uses the run-valgrind.py script.
    if options.valgrind:
        run_valgrind = join(workspace, "tools", "run-valgrind.py")
        options.special_command = "python -u " + run_valgrind + " @"

    shell = abspath(options.shell)
    buildspace = dirname(shell)

    processor = GetSpecialCommandProcessor(options.special_command)
    if options.use_http1:

        def wrap(processor):
            return lambda args: processor(args[:1] + ['--use-http1'] + args[1:]
                                          )

        processor = wrap(processor)

    context = Context(workspace, buildspace, VERBOSE, shell, options.timeout,
                      processor, options.suppress_dialogs,
                      options.store_unexpected_output, options)
    # First build the required targets
    if not options.no_build:
        reqs = []
        for path in paths:
            reqs += root.GetBuildRequirements(path, context)
        reqs = list(set(reqs))
        if len(reqs) > 0:
            if options.j != 1:
                options.scons_flags += ['-j', str(options.j)]
            if not BuildRequirements(context, reqs, options.mode,
                                     options.scons_flags):
                return 1

    # Just return if we are only building the targets for running the tests.
    if options.build_only:
        return 0

    # Get status for tests
    sections = []
    defs = {}
    root.GetTestStatus(context, sections, defs)
    config = Configuration(sections, defs)

    # List the tests
    all_cases = []
    all_unused = []
    unclassified_tests = []
    globally_unused_rules = None
    for path in paths:
        for mode in options.mode:
            if not exists(context.GetVm(mode)):
                print "Can't find shell executable: '%s'" % context.GetVm(mode)
                continue
            env = {
                'mode': mode,
                'system': utils.GuessOS(),
                'arch': options.arch,
                'simulator': options.simulator
            }
            test_list = root.ListTests([], path, context, mode)
            unclassified_tests += test_list
            (cases, unused_rules,
             all_outcomes) = config.ClassifyTests(test_list, env)
            if globally_unused_rules is None:
                globally_unused_rules = set(unused_rules)
            else:
                globally_unused_rules = globally_unused_rules.intersection(
                    unused_rules)
            all_cases += cases
            all_unused.append(unused_rules)

    if options.cat:
        visited = set()
        for test in unclassified_tests:
            key = tuple(test.path)
            if key in visited:
                continue
            visited.add(key)
            print "--- begin source: %s ---" % test.GetLabel()
            source = test.GetSource().strip()
            print source
            print "--- end source: %s ---" % test.GetLabel()
        return 0

    if options.warn_unused:
        for rule in globally_unused_rules:
            print "Rule for '%s' was not used." % '/'.join(
                [str(s) for s in rule.path])

    if options.report:
        PrintReport(all_cases)

    result = None

    def DoSkip(case):
        return SKIP in case.outcomes or SLOW in case.outcomes

    cases_to_run = [c for c in all_cases if not DoSkip(c)]
    if len(cases_to_run) == 0:
        print "No tests to run."
        return 0
    else:
        try:
            start = time.time()
            if RunTestCases(cases_to_run, options.progress, options.j):
                result = 0
            else:
                result = 1
            duration = time.time() - start
        except KeyboardInterrupt:
            print "Interrupted"
            return 1

    if options.time:
        # Write the times to stderr to make it easy to separate from the
        # test output.
        print
        sys.stderr.write("--- Total time: %s ---\n" % FormatTime(duration))
        timed_tests = [
            t.case for t in cases_to_run if not t.case.duration is None
        ]
        timed_tests.sort(lambda a, b: a.CompareTime(b))
        index = 1
        for entry in timed_tests[:20]:
            t = FormatTime(entry.duration)
            sys.stderr.write("%4i (%s) %s\n" % (index, t, entry.GetLabel()))
            index += 1

    return result
Example #17
0
def Main():
    parser = BuildOptions()
    (options, args) = parser.parse_args()
    if not ProcessOptions(options):
        parser.print_help()
        return 1

    workspace = abspath(join(dirname(sys.argv[0]), '..'))
    suites = GetSuites(join(workspace, 'test'))
    repositories = [
        TestRepository(join(workspace, 'test', name)) for name in suites
    ]
    repositories += [TestRepository(a) for a in options.suite]

    root = LiteralTestSuite(repositories)
    if len(args) == 0:
        paths = [SplitPath(t) for t in BUILT_IN_TESTS]
    else:
        paths = []
        for arg in args:
            path = SplitPath(arg)
            paths.append(path)

    # First build the required targets
    buildspace = abspath('.')
    context = Context(workspace, buildspace, VERBOSE,
                      join(buildspace, 'shell'), options.timeout,
                      GetSpecialCommandProcessor(options.special_command),
                      options.suppress_dialogs)
    if options.j != 1:
        options.scons_flags += ['-j', str(options.j)]
    if not options.no_build:
        reqs = []
        for path in paths:
            reqs += root.GetBuildRequirements(path, context)
        reqs = list(set(reqs))
        if len(reqs) > 0:
            if not BuildRequirements(context, reqs, options.mode,
                                     options.scons_flags):
                return 1

    # Get status for tests
    sections = []
    defs = {}
    root.GetTestStatus(context, sections, defs)
    config = Configuration(sections, defs)

    # List the tests
    all_cases = []
    all_unused = []
    unclassified_tests = []
    globally_unused_rules = None
    for path in paths:
        for mode in options.mode:
            env = {
                'mode': mode,
                'system': utils.GuessOS(),
                'arch': options.arch
            }
            test_list = root.ListTests([], path, context, mode)
            unclassified_tests += test_list
            (cases, unused_rules,
             all_outcomes) = config.ClassifyTests(test_list, env)
            if globally_unused_rules is None:
                globally_unused_rules = set(unused_rules)
            else:
                globally_unused_rules = globally_unused_rules.intersection(
                    unused_rules)
            all_cases += cases
            all_unused.append(unused_rules)

    if options.cat:
        visited = set()
        for test in unclassified_tests:
            key = tuple(test.path)
            if key in visited:
                continue
            visited.add(key)
            print "--- begin source: %s ---" % test.GetLabel()
            source = test.GetSource().strip()
            print source
            print "--- end source: %s ---" % test.GetLabel()
        return 0

    if options.warn_unused:
        for rule in globally_unused_rules:
            print "Rule for '%s' was not used." % '/'.join(
                [str(s) for s in rule.path])

    if options.report:
        PrintReport(all_cases)

    result = None
    if len(all_cases) == 0:
        print "No tests to run."
        return 0
    else:
        try:
            start = time.time()
            if RunTestCases(all_cases, options.progress, options.j):
                result = 0
            else:
                result = 1
            duration = time.time() - start
        except KeyboardInterrupt:
            print "Interrupted"
            return 1

    if options.time:
        # Write the times to stderr to make it easy to separate from the
        # test output.
        print
        sys.stderr.write("--- Total time: %s ---\n" % FormatTime(duration))
        timed_tests = [
            t.case for t in all_cases if not t.case.duration is None
        ]
        timed_tests.sort(lambda a, b: a.CompareTime(b))
        index = 1
        for entry in timed_tests[:20]:
            t = FormatTime(entry.duration)
            sys.stderr.write("%4i (%s) %s\n" % (index, t, entry.GetLabel()))
            index += 1

    return result