Esempio n. 1
0
 def test_multiple_snapshots_from_outside_buildroot(self):
     with temporary_dir() as temp_dir:
         with open(os.path.join(temp_dir, "roland"), "w") as f:
             f.write("European Burmese")
         with open(os.path.join(temp_dir, "susannah"), "w") as f:
             f.write("I don't know")
         scheduler = self.mk_scheduler(rules=create_fs_rules())
         snapshots = scheduler.capture_snapshots((
             PathGlobsAndRoot(PathGlobs(("roland", ), ()),
                              text_type(temp_dir)),
             PathGlobsAndRoot(PathGlobs(("susannah", ), ()),
                              text_type(temp_dir)),
             PathGlobsAndRoot(PathGlobs(("doesnotexist", ), ()),
                              text_type(temp_dir)),
         ))
         self.assertEqual(3, len(snapshots))
         self.assert_snapshot_equals(
             snapshots[0], ["roland"],
             DirectoryDigest(
                 text_type(
                     "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"
                 ), 80))
         self.assert_snapshot_equals(
             snapshots[1], ["susannah"],
             DirectoryDigest(
                 text_type(
                     "d3539cfc21eb4bab328ca9173144a8e932c515b1b9e26695454eeedbc5a95f6f"
                 ), 82))
         self.assert_snapshot_equals(snapshots[2], [],
                                     EMPTY_DIRECTORY_DIGEST)
Esempio n. 2
0
def javac_compile_process_result(javac_compile_req):
    java_files = javac_compile_req.javac_sources.java_files
    for java_file in java_files:
        if not java_file.endswith(".java"):
            raise ValueError(
                "Can only compile .java files but got {}".format(java_file))
    sources_snapshot = yield Get(Snapshot, PathGlobs,
                                 PathGlobs(java_files, ()))
    output_dirs = tuple(
        {os.path.dirname(java_file)
         for java_file in java_files})
    process_request = ExecuteProcessRequest(
        argv=javac_compile_req.argv_from_source_snapshot(sources_snapshot),
        input_files=sources_snapshot.directory_digest,
        output_directories=output_dirs,
        description='javac compilation')
    javac_proc_result = yield Get(ExecuteProcessResult, ExecuteProcessRequest,
                                  process_request)

    stdout = javac_proc_result.stdout
    stderr = javac_proc_result.stderr

    yield JavacCompileResult(
        text_type(stdout),
        text_type(stderr),
        javac_proc_result.output_directory_digest,
    )
Esempio n. 3
0
def gen_changes(input, branch):
    while True:
        line = input.readline()
        if not line:
            break

        logging.debug("Change: %s", line)

        m = re.match(r"^([0-9a-f]+) (.*)$", line.strip())
        c = {'revision': m.group(1),
             'branch': text_type(branch, encoding=encoding),
             }

        if category:
            c['category'] = text_type(category, encoding=encoding)

        if repository:
            c['repository'] = text_type(repository, encoding=encoding)

        if project:
            c['project'] = text_type(project, encoding=encoding)

        if codebase:
            c['codebase'] = text_type(codebase, encoding=encoding)

        grab_commit_info(c, m.group(1))
        changes.append(c)
Esempio n. 4
0
  def _execute_hermetic_compile(self, cmd, ctx):
    # For now, executing a compile remotely only works for targets that
    # do not have any dependencies or inner classes

    input_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)
    output_files = tuple(
      # Assume no extra .class files to grab. We'll fix up that case soon.
      # Drop the source_root from the file path.
      # Assumes `-d .` has been put in the command.
      os.path.relpath(f.replace('.java', '.class'), ctx.target.target_base)
      for f in input_snapshot.files if f.endswith('.java')
    )

    # TODO(#6071): Our ExecuteProcessRequest expects a specific string type for arguments,
    # which py2 doesn't default to. This can be removed when we drop python 2.
    argv = [text_type(arg) for arg in cmd]

    exec_process_request = ExecuteProcessRequest(
      argv=tuple(argv),
      input_files=input_snapshot.directory_digest,
      output_files=output_files,
      description='Compiling {} with javac'.format(ctx.target.address.spec),
    )
    exec_result = self.context.execute_process_synchronously_without_raising(
      exec_process_request,
      'javac',
      (WorkUnitLabel.TASK, WorkUnitLabel.JVM),
    )

    # Dump the output to the .pants.d directory where it's expected by downstream tasks.
    classes_directory = ctx.classes_dir.path
    self.context._scheduler.materialize_directories((
      DirectoryToMaterialize(text_type(classes_directory), exec_result.output_directory_digest),
    ))
Esempio n. 5
0
def grab_commit_info(c, rev):
    # Extract information about committer and files using git show
    f = os.popen("git show --raw --pretty=full %s" % rev, 'r')

    files = []
    comments = []

    while True:
        line = f.readline()
        if not line:
            break

        if line.startswith(4 * ' '):
            comments.append(line[4:])

        m = re.match(r"^:.*[MAD]\s+(.+)$", line)
        if m:
            logging.debug("Got file: %s", m.group(1))
            files.append(text_type(m.group(1), encoding=encoding))
            continue

        m = re.match(r"^Author:\s+(.+)$", line)
        if m:
            logging.debug("Got author: %s", m.group(1))
            c['who'] = text_type(m.group(1), encoding=encoding)

        if re.match(r"^Merge: .*$", line):
            files.append('merge')

    c['comments'] = ''.join(comments)
    c['files'] = files
    status = f.close()
    if status:
        logging.warning("git show exited with status %d", status)
Esempio n. 6
0
def grab_commit_info(c, rev):
    # Extract information about committer and files using git show
    f = subprocess.Popen(shlex.split("git show --raw --pretty=full %s" % rev),
                         stdout=subprocess.PIPE)

    files = []
    comments = []

    while True:
        line = f.stdout.readline().decode(encoding)
        if not line:
            break

        if line.startswith(4 * ' '):
            comments.append(line[4:])

        m = re.match(r"^:.*[MAD]\s+(.+)$", line)
        if m:
            logging.debug("Got file: %s", m.group(1))
            files.append(text_type(m.group(1)))
            continue

        m = re.match(r"^Author:\s+(.+)$", line)
        if m:
            logging.debug("Got author: %s", m.group(1))
            c['who'] = text_type(m.group(1))

        if re.match(r"^Merge: .*$", line):
            files.append('merge')

    c['comments'] = ''.join(comments)
    c['files'] = files
    status = f.terminate()
    if status:
        logging.warning("git show exited with status %d", status)
Esempio n. 7
0
def gen_changes(input, branch):
    while True:
        line = input.stdout.readline().decode(encoding)
        if not line:
            break

        logging.debug("Change: %s", line)

        m = re.match(r"^([0-9a-f]+) (.*)$", line.strip())
        c = {
            'revision': m.group(1),
            'branch': text_type(branch),
        }

        if category:
            c['category'] = text_type(category)

        if repository:
            c['repository'] = text_type(repository)

        if project:
            c['project'] = text_type(project)

        if codebase:
            c['codebase'] = text_type(codebase)

        grab_commit_info(c, m.group(1))
        changes.append(c)
Esempio n. 8
0
    def _make_range_domain(self, domain, column_name):
        width = (domain.max - domain.min) / domain.interval
        digits = Math.floor(Math.log10(width - 1))
        if digits == 0:
            value = "a.value"
        else:
            value = "+".join("1" + ("0" * j) + "*" +
                             text_type(chr(ord(b'a') + j)) + ".value"
                             for j in range(digits + 1))

        if domain.interval == 1:
            if domain.min == 0:
                domain = "SELECT " + value + " " + column_name + \
                         "\nFROM __digits__ a"
            else:
                domain = "SELECT (" + value + ") + " + quote_value(domain.min) + " " + column_name + \
                         "\nFROM __digits__ a"
        else:
            if domain.min == 0:
                domain = "SELECT " + value + " * " + quote_value(domain.interval) + " " + column_name + \
                         "\nFROM __digits__ a"
            else:
                domain = "SELECT (" + value + " * " + quote_value(domain.interval) + ") + " + quote_value(
                    domain.min) + " " + column_name + \
                         "\nFROM __digits__ a"

        for j in range(digits):
            domain += "\nJOIN __digits__ " + text_type(
                chr(ord(b'a') + j + 1)) + " ON 1=1"
        domain += "\nWHERE " + value + " < " + quote_value(width)
        return domain
Esempio n. 9
0
def forceScheduler2Data(sched):
    ret = dict(all_fields=[],
               name=text_type(sched.name),
               button_name=text_type(sched.buttonName),
               label=text_type(sched.label),
               builder_names=map(text_type, sched.builderNames))
    ret["all_fields"] = [field.getSpec() for field in sched.all_fields]
    return ret
Esempio n. 10
0
def forceScheduler2Data(sched):
    ret = dict(all_fields=[],
               name=text_type(sched.name),
               button_name=text_type(sched.buttonName),
               label=text_type(sched.label),
               builder_names=map(text_type, sched.builderNames))
    ret["all_fields"] = [field.getSpec() for field in sched.all_fields]
    return ret
Esempio n. 11
0
  def _compile_hermetic(self, jvm_options, ctx, classes_dir, zinc_args,
                        compiler_bridge_classpath_entry, dependency_classpath,
                        scalac_classpath_entries):
    zinc_relpath = fast_relpath(self._zinc.zinc, get_buildroot())

    snapshots = [
      self._zinc.snapshot(self.context._scheduler),
      ctx.target.sources_snapshot(self.context._scheduler),
    ]

    relevant_classpath_entries = dependency_classpath + [compiler_bridge_classpath_entry]
    directory_digests = tuple(
      entry.directory_digest for entry in relevant_classpath_entries if entry.directory_digest
    )
    if len(directory_digests) != len(relevant_classpath_entries):
      for dep in relevant_classpath_entries:
        if dep.directory_digest is None:
          logger.warning(
            "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
            "execution".format(dep)
          )

    snapshots.extend(
      classpath_entry.directory_digest for classpath_entry in scalac_classpath_entries
    )

    # TODO: Extract something common from Executor._create_command to make the command line
    # TODO: Lean on distribution for the bin/java appending here
    merged_input_digest = self.context._scheduler.merge_directories(
      tuple(s.directory_digest for s in snapshots) + directory_digests
    )
    argv = ['.jdk/bin/java'] + jvm_options + [
      '-cp', zinc_relpath,
      Zinc.ZINC_COMPILE_MAIN
    ] + zinc_args
    # TODO(#6071): Our ExecuteProcessRequest expects a specific string type for arguments,
    # which py2 doesn't default to. This can be removed when we drop python 2.
    argv = [text_type(arg) for arg in argv]

    req = ExecuteProcessRequest(
      argv=tuple(argv),
      input_files=merged_input_digest,
      output_directories=(classes_dir,),
      description="zinc compile for {}".format(ctx.target.address.spec),
      # TODO: These should always be unicodes
      # Since this is always hermetic, we need to use `underlying_dist`
      jdk_home=text_type(self._zinc.underlying_dist.home),
    )
    res = self.context.execute_process_synchronously_or_raise(
      req, self.name(), [WorkUnitLabel.COMPILER])

    # TODO: Materialize as a batch in do_compile or somewhere
    self.context._scheduler.materialize_directories((
      DirectoryToMaterialize(get_buildroot(), res.output_directory_digest),
    ))

    # TODO: This should probably return a ClasspathEntry rather than a Digest
    return res.output_directory_digest
Esempio n. 12
0
  def _runtool_hermetic(self, main, tool_name, args, distribution, tgt=None, input_files=tuple(), input_digest=None, output_dir=None):
    tool_classpath_abs = self.tool_classpath(tool_name)
    tool_classpath = fast_relpath_collection(tool_classpath_abs)

    classpath_for_cmd = os.pathsep.join(tool_classpath)
    cmd = [
      distribution.java,
    ]
    cmd.extend(self.get_options().jvm_options)
    cmd.extend(['-cp', classpath_for_cmd])
    cmd.extend([main])
    cmd.extend(args)

    pathglobs = list(tool_classpath)
    pathglobs.extend(f if os.path.isfile(f) else '{}/**'.format(f) for f in input_files)

    if pathglobs:
      root = PathGlobsAndRoot(
      PathGlobs(tuple(pathglobs)),
      text_type(get_buildroot()))
      # dont capture snapshot, if pathglobs is empty
      path_globs_input_digest = self.context._scheduler.capture_snapshots((root,))[0].directory_digest

    if path_globs_input_digest and input_digest:
      epr_input_files = self.context._scheduler.merge_directories(
          (path_globs_input_digest, input_digest))
    else:
      epr_input_files = path_globs_input_digest or input_digest

    epr = ExecuteProcessRequest(
      argv=tuple(cmd),
      input_files=epr_input_files,
      output_files=tuple(),
      output_directories=(output_dir,),
      timeout_seconds=15*60,
      description='run {} for {}'.format(tool_name, tgt),
      # TODO: These should always be unicodes
      # Since this is always hermetic, we need to use `underlying_dist`
      jdk_home=text_type(self._zinc.underlying_dist.home),
    )
    res = self.context.execute_process_synchronously_without_raising(
      epr,
      self.name(),
      [WorkUnitLabel.TOOL])

    if res.exit_code != 0:
      raise TaskError(res.stderr)

    if output_dir:
      dump_digest(output_dir, res.output_directory_digest)
      self.context._scheduler.materialize_directories((
        DirectoryToMaterialize(
          # NB the first element here is the root to materialize into, not the dir to snapshot
          text_type(get_buildroot()),
          res.output_directory_digest),
      ))
      # TODO drop a file containing the digest, named maybe output_dir.digest
    return res
Esempio n. 13
0
  def _runtool_hermetic(self, main, tool_name, args, distribution, tgt=None, input_files=tuple(), input_digest=None, output_dir=None):
    tool_classpath_abs = self.tool_classpath(tool_name)
    tool_classpath = fast_relpath_collection(tool_classpath_abs)

    # TODO(#6071): Our ExecuteProcessRequest expects a specific string type for arguments,
    # which py2 doesn't default to. This can be removed when we drop python 2.
    str_jvm_options = [text_type(opt) for opt in self.get_options().jvm_options]
    cmd = [
            distribution.java,
          ] + str_jvm_options + [
            '-cp', os.pathsep.join(tool_classpath),
            main,
          ] + args

    pathglobs = list(tool_classpath)
    pathglobs.extend(f if os.path.isfile(f) else '{}/**'.format(f) for f in input_files)

    if pathglobs:
      root = PathGlobsAndRoot(
        PathGlobs(tuple(pathglobs)),
        text_type(get_buildroot()))
      # dont capture snapshot, if pathglobs is empty
      path_globs_input_digest = self.context._scheduler.capture_snapshots((root,))[0].directory_digest

    epr_input_files = self.context._scheduler.merge_directories(
      ((path_globs_input_digest,) if path_globs_input_digest else ())
      + ((input_digest,) if input_digest else ()))

    epr = ExecuteProcessRequest(
      argv=tuple(cmd),
      input_files=epr_input_files,
      output_files=tuple(),
      output_directories=(output_dir,),
      timeout_seconds=15*60,
      description='run {} for {}'.format(tool_name, tgt),
      # TODO: These should always be unicodes
      # Since this is always hermetic, we need to use `underlying.home` because
      # ExecuteProcessRequest requires an existing, local jdk location.
      jdk_home=text_type(distribution.underlying_home),
    )
    res = self.context.execute_process_synchronously_without_raising(
      epr,
      self.name(),
      [WorkUnitLabel.TOOL])

    if res.exit_code != 0:
      raise TaskError(res.stderr, exit_code=res.exit_code)

    if output_dir:
      res.output_directory_digest.dump(output_dir)
      self.context._scheduler.materialize_directories((
        DirectoryToMaterialize(
          # NB the first element here is the root to materialize into, not the dir to snapshot
          text_type(get_buildroot()),
          res.output_directory_digest),
      ))
      # TODO drop a file containing the digest, named maybe output_dir.digest
    return res
Esempio n. 14
0
  def test_type_check_errors(self):
    self.maxDiff = None

    def format_string_type_check_message(format_string):
      return format_string.format(
        str_type='unicode' if PY2 else 'str',
        u='u' if PY2 else '')

    # single type checking failure
    expected_msg = (
      """type check error in class SomeTypedDatatype: error(s) type checking constructor arguments:
field 'val' was invalid: value [] (with type 'list') must satisfy this type constraint: Exactly(int).""")
    with self.assertRaisesWithMessage(TypeCheckError, expected_msg):
      SomeTypedDatatype([])

    # type checking failure with multiple arguments (one is correct)
    expected_msg = format_string_type_check_message(
      """type check error in class AnotherTypedDatatype: error(s) type checking constructor arguments:
field 'elements' was invalid: value {u}'should be list' (with type '{str_type}') must satisfy this type constraint: Exactly(list).""")
    with self.assertRaisesWithMessage(TypeCheckError, expected_msg):
      AnotherTypedDatatype(text_type('correct'), text_type('should be list'))

    # type checking failure on both arguments
    expected_msg = format_string_type_check_message(
        """type check error in class AnotherTypedDatatype: error(s) type checking constructor arguments:
field 'string' was invalid: value 3 (with type 'int') must satisfy this type constraint: Exactly({str_type}).
field 'elements' was invalid: value {u}'should be list' (with type '{str_type}') must satisfy this type constraint: Exactly(list).""")
    with self.assertRaisesWithMessage(TypeCheckError, expected_msg):
      AnotherTypedDatatype(3, text_type('should be list'))

    expected_msg = format_string_type_check_message(
        """type check error in class NonNegativeInt: error(s) type checking constructor arguments:
field 'an_int' was invalid: value {u}'asdf' (with type '{str_type}') must satisfy this type constraint: Exactly(int).""")
    with self.assertRaisesWithMessage(TypeCheckError, expected_msg):
      NonNegativeInt(text_type('asdf'))

    expected_msg = "type check error in class NonNegativeInt: value is negative: -3."
    with self.assertRaisesWithMessage(TypeCheckError, expected_msg):
      NonNegativeInt(-3)

    expected_msg = (
      """type check error in class WithSubclassTypeConstraint: error(s) type checking constructor arguments:
field 'some_value' was invalid: value 3 (with type 'int') must satisfy this type constraint: SubclassesOf(SomeBaseClass).""")
    with self.assertRaisesWithMessage(TypeCheckError, expected_msg):
      WithSubclassTypeConstraint(3)

    expected_msg = """\
type check error in class WithCollectionTypeConstraint: error(s) type checking constructor arguments:
field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int)): value 3 (with type 'int') must satisfy this type constraint: SubclassesOf(Iterable).
Note that objects matching {} are not considered iterable.""".format(_string_type_constraint)
    with self.assertRaisesWithMessage(TypeCheckError, expected_msg):
      WithCollectionTypeConstraint(3)

    expected_msg = format_string_type_check_message("""\
type check error in class WithCollectionTypeConstraint: error(s) type checking constructor arguments:
field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int)) matching iterable object [3, {u}'asdf']: value {u}'asdf' (with type '{str_type}') must satisfy this type constraint: Exactly(int).""")
    with self.assertRaisesWithMessage(TypeCheckError, expected_msg):
      WithCollectionTypeConstraint([3, "asdf"])
Esempio n. 15
0
 def register_all_scoped_names(parser):
   scope = parser.scope
   known_args = parser.known_args
   for arg in known_args:
     scoped_flag = self._ScopedFlagNameForFuzzyMatching(
       scope=text_type(scope),
       arg=text_type(arg),
     )
     all_scoped_flag_names.append(scoped_flag)
Esempio n. 16
0
 def _generate_args_for_targets(self, targets):
   """
   Generate a dict mapping target -> _GoTestTargetInfo so that the import path and gopath can be
   reconstructed for spawning test commands regardless of how the targets are partitioned.
   """
   return {
     t: self._GoTestTargetInfo(import_path=text_type(t.import_path),
                               gopath=text_type(self.get_gopath(t)))
     for t in targets
   }
Esempio n. 17
0
 def allEndpoints(self):
     """return the full spec of the connector as a list of dicts
     """
     paths = []
     for k, v in sorted(self.matcher.iterPatterns()):
         paths.append(dict(path=u"/".join(k),
                           plural=text_type(v.rtype.plural),
                           type=text_type(v.rtype.entityType.name),
                           type_spec=v.rtype.entityType.getSpec()))
     return paths
Esempio n. 18
0
 def allEndpoints(self):
     """return the full spec of the connector as a list of dicts
     """
     paths = []
     for k, v in sorted(self.matcher.iterPatterns()):
         paths.append(dict(path="/".join(k),
                           plural=text_type(v.rtype.plural),
                           type=text_type(v.rtype.entityType.name),
                           type_spec=v.rtype.entityType.getSpec()))
     return paths
Esempio n. 19
0
  def prime_store_with_roland_digest(self):
    """This method primes the store with a directory of a file named 'roland' and contents 'European Burmese'."""
    with temporary_dir() as temp_dir:
      with open(os.path.join(temp_dir, "roland"), "w") as f:
        f.write("European Burmese")
      globs = PathGlobs(("*",), ())
      snapshot = self.scheduler.capture_snapshots((PathGlobsAndRoot(globs, text_type(temp_dir)),))[0]

      expected_digest = Digest(text_type("63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"), 80)
      self.assert_snapshot_equals(snapshot, ["roland"], expected_digest)
    return expected_digest
Esempio n. 20
0
 def test_snapshot_from_outside_buildroot(self):
   with temporary_dir() as temp_dir:
     with open(os.path.join(temp_dir, "roland"), "w") as f:
       f.write("European Burmese")
     scheduler = self.mk_scheduler(rules=create_fs_rules())
     globs = PathGlobs(("*",), ())
     snapshot = scheduler.capture_snapshots((PathGlobsAndRoot(globs, text_type(temp_dir)),))[0]
     self.assert_snapshot_equals(snapshot, ["roland"], Digest(
       text_type("63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"),
       80
     ))
Esempio n. 21
0
 def test_snapshot_from_outside_buildroot(self):
   with temporary_dir() as temp_dir:
     with open(os.path.join(temp_dir, "roland"), "w") as f:
       f.write("European Burmese")
     scheduler = self.mk_scheduler(rules=create_fs_rules())
     globs = PathGlobs(("*",), ())
     snapshot = scheduler.capture_snapshots((PathGlobsAndRoot(globs, text_type(temp_dir)),))[0]
     self.assert_snapshot_equals(snapshot, ["roland"], DirectoryDigest(
       text_type("63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"),
       80
     ))
Esempio n. 22
0
def sort_table(result):
    """
    SORT ROWS IN TABLE, EVEN IF ELEMENTS ARE JSON
    """
    data = wrap([{text_type(i): v
                  for i, v in enumerate(row)} for row in result.data])
    sort_columns = jx.sort(set(jx.get_columns(data, leaves=True).name))
    data = jx.sort(data, sort_columns)
    result.data = [
        tuple(row[text_type(i)] for i in range(len(result.header)))
        for row in data
    ]
Esempio n. 23
0
 def prime_store_with_roland_digest(self):
   """This method primes the store with a directory of a file named 'roland' and contents 'European Burmese'."""
   with temporary_dir() as temp_dir:
     with open(os.path.join(temp_dir, "roland"), "w") as f:
       f.write("European Burmese")
     scheduler = self.mk_scheduler(rules=create_fs_rules())
     globs = PathGlobs(("*",), ())
     snapshot = scheduler.capture_snapshots((PathGlobsAndRoot(globs, text_type(temp_dir)),))[0]
     self.assert_snapshot_equals(snapshot, ["roland"], DirectoryDigest(
       text_type("63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"),
       80
     ))
Esempio n. 24
0
    def get_auth_headers(self):
        headers = {
            'X-Api-Timestamp': text_type(int(time.time())),
            'X-Api-Nonce': text_type(random.random()),
            'X-Api-Access-Key': text_type(self.access_key),
            'X-Api-Encrypt-Type': text_type(self.encrypt_type)
        }

        # 检查是否需要返回结果的签名
        if self.require_response_sign:
            headers['X-Api-Require-Response-Signature'] = 'true'

        return headers
Esempio n. 25
0
    def __new__(cls, module_name, version_spec=None):
        if not cls._allowed_module_name_pattern.match(module_name):
            raise cls.make_type_error(
                "invalid Zef module name '{}': must match '{}'.".format(
                    module_name, cls._allowed_module_name_pattern.pattern))
        else:
            module_name = text_type(module_name)

        if version_spec is not None:
            version_spec = text_type(version_spec)

        return super(ZefRequirement, cls).__new__(cls, module_name,
                                                  version_spec)
Esempio n. 26
0
    def get_auth_headers(self):
        headers = {
            'X-Api-Timestamp': text_type(int(time.time())),
            'X-Api-Nonce': text_type(random.random()),
            'X-Api-Access-Key': text_type(self.access_key),
            'X-Api-Encrypt-Type': text_type(self.encrypt_type)
        }

        # 检查是否需要返回结果的签名
        if self.require_response_sign:
            headers['X-Api-Require-Response-Signature'] = 'true'

        return headers
Esempio n. 27
0
def table2csv(table_data):
    """
    :param table_data: expecting a list of tuples
    :return: text in nice formatted csv
    """
    text_data = [
        tuple(value2json(vals, pretty=True) for vals in rows)
        for rows in table_data
    ]

    col_widths = [max(len(text) for text in cols) for cols in zip(*text_data)]
    template = ", ".join("{{" + text_type(i) + "|left_align(" + text_type(w) +
                         ")}}" for i, w in enumerate(col_widths))
    text = "\n".join(expand_template(template, d) for d in text_data)
    return text
  def test_write_file(self):
    request = ExecuteProcessRequest(
      argv=("/bin/bash", "-c", "echo -n 'European Burmese' > roland"),
      description="echo roland",
      output_files=("roland",),
      input_files=EMPTY_DIRECTORY_DIGEST,
    )

    execute_process_result = self.scheduler.product_request(
      ExecuteProcessResult,
      [request],
    )[0]

    self.assertEqual(
      execute_process_result.output_directory_digest,
      Digest(
        fingerprint=text_type("63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"),
        serialized_bytes_length=80,
      )
    )

    files_content_result = self.scheduler.product_request(
      FilesContent,
      [execute_process_result.output_directory_digest],
    )[0]

    self.assertEqual(
      files_content_result.dependencies,
      (FileContent("roland", b"European Burmese"),)
    )
Esempio n. 29
0
def ensure_text(value, encoding=sys.getdefaultencoding(), errors='strict',
                coerce=False):
    """Return the text representation of the given string.

    :param value bytes/str/unicode: string value
    :param encoding str: name of encoding used if `value` is not text
    :param errors str: decode option used if `value` is not text
    :param bool coerce: whether to attempt to coerce `value` to text
    :returns: text representation of `value`
    :rtype: `unicode` if Python 2; otherwise, `str`
    :raises TypeError: if `value` is not a str, unicode, nor bytes
    :raises UnicodeDecodeError: if `value` cannot be decoded

    The primary use case for this function is as a shortcut for a
    library providing support for Python 2 and 3 to ensure that a
    provided string value can be interpreted as text.

    """
    if isinstance(value, future.native_bytes):
        value = value.decode(encoding, errors)
    elif not isinstance(value, future.text_type):
        if not coerce:
            raise TypeError("{!r} is not a string type.".format(type(value)))
        value = future.text_type(value)
    return future.native(value)
Esempio n. 30
0
    def format(self, template="{basename}{range}{padding}{extension}"):
        """Return the file sequence as a formatted string according to
        the given template.

        Utilizes the python string format syntax.  Available keys include:
            * basename - the basename of the sequence.
            * range - the range of the sequence
            * padding - the detecting amount of padding.
            * extension - the file extension of the sequence.
            * start - the start frame.
            * end - the end frame.
            * length - the length of the frame range.
            * inverted - the inverted frame range. (returns "" if none)
            * dirname - the directory name.

        If asking for the inverted range value, and the new inverted range
        exceeded ``fileseq.constants.MAX_FRAME_SIZE``, a ``MaxSizeException``
        will be raised.

        Args:
            template (str):

        Returns:
            str:

        Raises:
            :class:`fileseq.exceptions.MaxSizeException`: If frame size exceeds
            `fileseq.constants.MAX_FRAME_SIZE``
        """
        try:
            return self._format(template)
        except UnicodeEncodeError:
            return self._format(futils.text_type(template))
Esempio n. 31
0
def _normalize_utf8_keys(kwargs):
    """When kwargs are passed literally in a source file, their keys are ascii: normalize."""
    if any(type(key) is binary_type for key in kwargs.keys()):
        # This is to preserve the original dict type for kwargs.
        dict_type = type(kwargs)
        return dict_type([(text_type(k), v) for k, v in kwargs.items()])
    return kwargs
Esempio n. 32
0
    def test_write_file(self):
        scheduler = self.mk_scheduler_in_example_fs(())

        request = ExecuteProcessRequest.create_with_empty_snapshot(
            argv=("/bin/bash", "-c", "echo -n 'European Burmese' > roland"),
            description="echo roland",
            output_files=("roland", ))

        execute_process_result = self.execute_expecting_one_result(
            scheduler, ExecuteProcessResult, request).value

        self.assertEquals(
            execute_process_result.output_directory_digest,
            DirectoryDigest(
                fingerprint=text_type(
                    "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"
                ),
                serialized_bytes_length=80,
            ))

        files_content_result = self.execute_expecting_one_result(
            scheduler, FilesContent,
            execute_process_result.output_directory_digest).value

        self.assertEquals((files_content_result.dependencies),
                          (FileContent("roland", "European Burmese"), ))
Esempio n. 33
0
 def extern_val_to_str(self, context_handle, val):
   """Given a Handle for `obj`, write str(obj) and return it."""
   c = self._ffi.from_handle(context_handle)
   v = c.from_value(val[0])
   # Consistently use the empty string to indicate None.
   v_str = '' if v is None else text_type(v)
   return c.utf8_buf(v_str)
Esempio n. 34
0
  def _capture_sources(self, targets_and_dirs):
    to_capture = []
    results_dirs = []
    filespecs = []

    for target, synthetic_target_dir in targets_and_dirs:
      if self.sources_globs is None:
        files = list(self.find_sources(target, synthetic_target_dir))
      else:
        files = self.sources_globs

      results_dir_relpath = os.path.relpath(synthetic_target_dir, get_buildroot())
      buildroot_relative_globs = tuple(os.path.join(results_dir_relpath, file) for file in files)
      buildroot_relative_excludes = tuple(
        os.path.join(results_dir_relpath, file)
          for file in self.sources_exclude_globs
      )
      to_capture.append(
        PathGlobsAndRoot(
          PathGlobs(buildroot_relative_globs, buildroot_relative_excludes),
          text_type(get_buildroot()),
        )
      )
      results_dirs.append(results_dir_relpath)
      filespecs.append(FilesetRelPathWrapper.to_filespec(buildroot_relative_globs))

    snapshots = self.context._scheduler.capture_snapshots(tuple(to_capture))

    return tuple(EagerFilesetWithSpec(
      results_dir_relpath,
      filespec,
      snapshot,
    ) for (results_dir_relpath, filespec, snapshot) in zip(results_dirs, filespecs, snapshots))
Esempio n. 35
0
 def do_run(self, sequence, map_fn, arg, scope):
     try:
         result = list(map(map_fn, sequence))
     except KeyError as k:
         message = "Missing field '%s'" % text_type(k)
         self.raise_rql_runtime_error(message)
     return result
Esempio n. 36
0
    def _find_revision(self, revision):
        please_stop = False
        locker = Lock()
        output = []
        queue = Queue("branches", max=2000)
        queue.extend(b for b in self.branches if b.locale == DEFAULT_LOCALE and b.name in ["try", "mozilla-inbound", "autoland"])
        queue.add(THREAD_STOP)

        problems = []
        def _find(please_stop):
            for b in queue:
                if please_stop:
                    return
                try:
                    url = b.url + "json-info?node=" + revision
                    rev = self.get_revision(Revision(branch=b, changeset={"id": revision}))
                    with locker:
                        output.append(rev)
                    Log.note("Revision found at {{url}}", url=url)
                except Exception as f:
                    problems.append(f)

        threads = []
        for i in range(3):
            threads.append(Thread.run("find changeset " + text_type(i), _find, please_stop=please_stop))

        for t in threads:
            with assert_no_exception:
                t.join()

        return output
Esempio n. 37
0
  def _capture_sources(self, targets_and_dirs):
    to_capture = []
    results_dirs = []
    filespecs = []

    for target, synthetic_target_dir in targets_and_dirs:
      files = self.sources_globs

      results_dir_relpath = os.path.relpath(synthetic_target_dir, get_buildroot())
      buildroot_relative_globs = tuple(os.path.join(results_dir_relpath, file) for file in files)
      buildroot_relative_excludes = tuple(
        os.path.join(results_dir_relpath, file)
          for file in self.sources_exclude_globs
      )
      to_capture.append(
        PathGlobsAndRoot(
          PathGlobs(buildroot_relative_globs, buildroot_relative_excludes),
          text_type(get_buildroot()),
        )
      )
      results_dirs.append(results_dir_relpath)
      filespecs.append(FilesetRelPathWrapper.to_filespec(buildroot_relative_globs))

    snapshots = self.context._scheduler.capture_snapshots(tuple(to_capture))

    return tuple(EagerFilesetWithSpec(
      results_dir_relpath,
      filespec,
      snapshot,
    ) for (results_dir_relpath, filespec, snapshot) in zip(results_dirs, filespecs, snapshots))
Esempio n. 38
0
 def extern_val_to_str(context_handle, val):
     """Given a Handle for `obj`, write str(obj) and return it."""
     c = ffi.from_handle(context_handle)
     v = c.from_value(val[0])
     # Consistently use the empty string to indicate None.
     v_str = '' if v is None else text_type(v)
     return c.utf8_buf(v_str)
Esempio n. 39
0
def _normalize_utf8_keys(kwargs):
  """When kwargs are passed literally in a source file, their keys are ascii: normalize."""
  if any(type(key) is binary_type for key in kwargs.keys()):
    # This is to preserve the original dict type for kwargs.
    dict_type = type(kwargs)
    return dict_type([(text_type(k), v) for k, v in kwargs.items()])
  return kwargs
Esempio n. 40
0
 def find_libs_path_globs(self, names):
   libs_abs = self._underlying.find_libs(names)
   libs_unrooted = [self._unroot_lib_path(l) for l in libs_abs]
   path_globs = PathGlobsAndRoot(
     PathGlobs(tuple(libs_unrooted)),
     text_type(self._underlying.home))
   return (libs_unrooted, path_globs)
Esempio n. 41
0
  def _execute_hermetic_compile(self, cmd, ctx):
    # For now, executing a compile remotely only works for targets that
    # do not have any dependencies or inner classes

    input_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)
    output_files = tuple(
      # Assume no extra .class files to grab. We'll fix up that case soon.
      # Drop the source_root from the file path.
      # Assumes `-d .` has been put in the command.
      os.path.relpath(f.path.replace('.java', '.class'), ctx.target.target_base)
      for f in input_snapshot.files if f.path.endswith('.java')
    )
    exec_process_request = ExecuteProcessRequest.create_from_snapshot(
      argv=tuple(cmd),
      snapshot=input_snapshot,
      output_files=output_files,
      description='Compiling {} with javac'.format(ctx.target.address.spec),
    )
    exec_result = self.context.execute_process_synchronously(
      exec_process_request,
      'javac',
      (WorkUnitLabel.TASK, WorkUnitLabel.JVM),
    )

    # Dump the output to the .pants.d directory where it's expected by downstream tasks.
    classes_directory = ctx.classes_dir
    self.context._scheduler.materialize_directories((
      DirectoryToMaterialize(text_type(classes_directory), exec_result.output_directory_digest),
    ))
Esempio n. 42
0
    def test_write_file(self):
        request = ExecuteProcessRequest(
            argv=("/bin/bash", "-c", "echo -n 'European Burmese' > roland"),
            description="echo roland",
            output_files=("roland", ),
            input_files=EMPTY_DIRECTORY_DIGEST,
        )

        execute_process_result = self.scheduler.product_request(
            ExecuteProcessResult,
            [request],
        )[0]

        self.assertEqual(
            execute_process_result.output_directory_digest,
            Digest(
                fingerprint=text_type(
                    "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"
                ),
                serialized_bytes_length=80,
            ))

        files_content_result = self.scheduler.product_request(
            FilesContent,
            [execute_process_result.output_directory_digest],
        )[0]

        self.assertEqual(files_content_result.dependencies,
                         (FileContent("roland", b"European Burmese"), ))
Esempio n. 43
0
    def __setitem__(self, key, value):
        # Enforce unicode compatibility.
        if PY2 and isinstance(value, native_str):
            # Allow Python 2's implicit string decoding, but fail now instead of when entry fields are used.
            # If encoding is anything but ascii, it should be decoded it to text before setting an entry field
            try:
                value = value.decode('ascii')
            except UnicodeDecodeError:
                raise EntryUnicodeError(key, value)
        elif isinstance(value, bytes):
            raise EntryUnicodeError(key, value)
        # Coerce any enriched strings (such as those returned by BeautifulSoup) to plain strings to avoid serialization
        # troubles.
        elif isinstance(value, text_type) and type(value) != text_type:  # pylint: disable=unidiomatic-typecheck
            value = text_type(value)

        # url and original_url handling
        if key == 'url':
            if not isinstance(value, (str, LazyLookup)):
                raise PluginError('Tried to set %r url to %r' % (self.get('title'), value))
            self.setdefault('original_url', value)

        # title handling
        if key == 'title':
            if not isinstance(value, (str, LazyLookup)):
                raise PluginError('Tried to set title to %r' % value)

        try:
            log.trace('ENTRY SET: %s = %r' % (key, value))
        except Exception as e:
            log.debug('trying to debug key `%s` value threw exception: %s' % (key, e))

        super(Entry, self).__setitem__(key, value)
Esempio n. 44
0
 def _run_bootstrapper(self, bridge_jar, context):
     bootstrapper = self._relative_to_buildroot(
         self._zinc_factory._compiler_bootstrapper(self._products), )
     bootstrapper_args = [
         '--out',
         self._relative_to_buildroot(bridge_jar),
         '--compiler-interface',
         self._relative_to_buildroot(self.compiler_interface),
         '--compiler-bridge-src',
         self._relative_to_buildroot(self.compiler_bridge),
         '--scala-compiler',
         self._relative_to_buildroot(self.scala_compiler),
         '--scala-library',
         self._relative_to_buildroot(self.scala_library),
         '--scala-reflect',
         self._relative_to_buildroot(self.scala_reflect),
     ]
     input_jar_snapshots = context._scheduler.capture_snapshots(
         (PathGlobsAndRoot(
             PathGlobs(tuple([bootstrapper] + bootstrapper_args[1::2])),
             text_type(get_buildroot()),
         ), ))
     argv = tuple(['.jdk/bin/java'] +
                  ['-cp', bootstrapper, Zinc.ZINC_BOOTSTRAPER_MAIN] +
                  bootstrapper_args)
     req = ExecuteProcessRequest(
         argv=argv,
         input_files=input_jar_snapshots[0].directory_digest,
         output_files=(self._relative_to_buildroot(bridge_jar), ),
         description='bootstrap compiler bridge.',
         # Since this is always hermetic, we need to use `underlying_dist`
         jdk_home=self.underlying_dist.home,
     )
     return context.execute_process_synchronously_or_raise(
         req, 'zinc-subsystem', [WorkUnitLabel.COMPILER])
Esempio n. 45
0
 def do_run(self, sequence, map_fn, arg, scope):
     try:
         result = list(map(map_fn, sequence))
     except KeyError as k:
         message = "Missing field '%s'" % text_type(k)
         self.raise_rql_runtime_error(message)
     return result
Esempio n. 46
0
def CMDpending(parser, args):
    """Lists pending jobs."""
    parser.add_option(
        '-b', '--builder', dest='builders', action='append', default=[],
        help='Builders to filter on')
    options, args, buildbot = parser.parse_args(args)
    if args:
        parser.error('Unrecognized parameters: %s' % ' '.join(args))
    if not options.builders:
        options.builders = buildbot.builders.keys
    for builder in options.builders:
        builder = buildbot.builders[builder]
        pending_builds = builder.data.get('pendingBuilds', 0)
        if not pending_builds:
            continue
        print('Builder %s: %d' % (builder.name, pending_builds))
        if not options.quiet:
            for pending in builder.pending_builds.data:
                if 'revision' in pending['source']:
                    print('  revision: %s' % pending['source']['revision'])
                for change in pending['source']['changes']:
                    print('  change:')
                    print('    comment: %r' %
                          text_type(change['comments'][:50]))
                    print('    who:     %s' % change['who'])
    return 0
Esempio n. 47
0
 def quote_value(self, value):
     """
     convert values to mysql code for the same
     mostly delegate directly to the mysql lib, but some exceptions exist
     """
     try:
         if value == None:
             return SQL("NULL")
         elif isinstance(value, SQL):
             if not value.param:
                 # value.template CAN BE MORE THAN A TEMPLATE STRING
                 return self.quote_sql(value.template)
             param = {k: self.quote_sql(v) for k, v in value.param.items()}
             return SQL(expand_template(value.template, param))
         elif isinstance(value, basestring):
             return SQL(self.db.literal(value))
         elif isinstance(value, Mapping):
             return SQL(self.db.literal(json_encode(value)))
         elif Math.is_number(value):
             return SQL(text_type(value))
         elif isinstance(value, datetime):
             return SQL("str_to_date('" +
                        value.strftime("%Y%m%d%H%M%S.%f") +
                        "', '%Y%m%d%H%i%s.%f')")
         elif isinstance(value, Date):
             return SQL("str_to_date('" + value.format("%Y%m%d%H%M%S.%f") +
                        "', '%Y%m%d%H%i%s.%f')")
         elif hasattr(value, '__iter__'):
             return SQL(self.db.literal(json_encode(value)))
         else:
             return self.db.literal(value)
     except Exception as e:
         Log.error("problem quoting SQL", e)
Esempio n. 48
0
  def _execute_hermetic_compile(self, cmd, ctx):
    # For now, executing a compile remotely only works for targets that
    # do not have any dependencies or inner classes

    input_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)
    output_files = tuple(
      # Assume no extra .class files to grab. We'll fix up that case soon.
      # Drop the source_root from the file path.
      # Assumes `-d .` has been put in the command.
      os.path.relpath(f.path.replace('.java', '.class'), ctx.target.target_base)
      for f in input_snapshot.files if f.path.endswith('.java')
    )
    exec_process_request = ExecuteProcessRequest(
      argv=tuple(cmd),
      input_files=input_snapshot.directory_digest,
      output_files=output_files,
      description='Compiling {} with javac'.format(ctx.target.address.spec),
    )
    exec_result = self.context.execute_process_synchronously(
      exec_process_request,
      'javac',
      (WorkUnitLabel.TASK, WorkUnitLabel.JVM),
    )

    # Dump the output to the .pants.d directory where it's expected by downstream tasks.
    classes_directory = ctx.classes_dir
    self.context._scheduler.materialize_directories((
      DirectoryToMaterialize(text_type(classes_directory), exec_result.output_directory_digest),
    ))
Esempio n. 49
0
def latin12unicode(value):
    if isinstance(value, text_type):
        Log.error("can not convert unicode from latin1")
    try:
        return text_type(value.decode('iso-8859-1'))
    except Exception as e:
        Log.error("Can not convert {{value|quote}} to unicode", value=value)
Esempio n. 50
0
 def test_snapshot_from_outside_buildroot_failure(self):
   with temporary_dir() as temp_dir:
     scheduler = self.mk_scheduler(rules=create_fs_rules())
     globs = PathGlobs(("*",), ())
     with self.assertRaises(Exception) as cm:
       scheduler.capture_snapshots((PathGlobsAndRoot(globs, text_type(os.path.join(temp_dir, "doesnotexist"))),))
     self.assertIn("doesnotexist", str(cm.exception))
Esempio n. 51
0
 def _run_bootstrapper(self, bridge_jar, context):
   bootstrapper = self._relative_to_buildroot(
     self._zinc_factory._compiler_bootstrapper(self._products),
   )
   bootstrapper_args = [
     '--out', self._relative_to_buildroot(bridge_jar),
     '--compiler-interface', self._relative_to_buildroot(self.compiler_interface),
     '--compiler-bridge-src', self._relative_to_buildroot(self.compiler_bridge),
     '--scala-compiler', self._relative_to_buildroot(self.scala_compiler),
     '--scala-library', self._relative_to_buildroot(self.scala_library),
     '--scala-reflect', self._relative_to_buildroot(self.scala_reflect),
   ]
   input_jar_snapshots = context._scheduler.capture_snapshots((PathGlobsAndRoot(
     PathGlobs(tuple([bootstrapper] + bootstrapper_args[1::2])),
     text_type(get_buildroot()),
   ),))
   argv = tuple(['.jdk/bin/java'] +
                ['-cp', bootstrapper, Zinc.ZINC_BOOTSTRAPER_MAIN] +
                bootstrapper_args
   )
   req = ExecuteProcessRequest(
     argv=argv,
     input_files=input_jar_snapshots[0].directory_digest,
     output_files=(self._relative_to_buildroot(bridge_jar),),
     description='bootstrap compiler bridge.',
     # Since this is always hermetic, we need to use `underlying_dist`
     jdk_home=self.underlying_dist.home,
   )
   return context.execute_process_synchronously_or_raise(req, 'zinc-subsystem', [WorkUnitLabel.COMPILER])
Esempio n. 52
0
  def test_materialize_directories(self):
    # I tried passing in the digest of a file, but it didn't make it to the
    # rust code due to all of the checks we have in place (which is probably a good thing).
    self.prime_store_with_roland_digest()

    with temporary_dir() as temp_dir:
      dir_path = os.path.join(temp_dir, "containing_roland")
      digest = Digest(
        text_type("63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"),
        80
      )
      self.scheduler.materialize_directories((DirectoryToMaterialize(text_type(dir_path), digest),))

      created_file = os.path.join(dir_path, "roland")
      with open(created_file, 'r') as f:
        content = f.read()
        self.assertEqual(content, "European Burmese")
Esempio n. 53
0
 def new(cls, master, name, type, logid, logEncoding):
     type = text_type(type)
     try:
         subcls = cls._byType[type]
     except KeyError:
         raise RuntimeError("Invalid log type %r" % (type,))
     decoder = Log._decoderFromString(logEncoding)
     return subcls(master, name, type, logid, decoder)
Esempio n. 54
0
 def test_empty(self):
   """Test that parsing an empty BUILD file results in an empty AddressFamily."""
   address_mapper = AddressMapper(JsonParser(TestTable()))
   af = run_rule(parse_address_family, address_mapper, Dir('/dev/null'), {
       (Snapshot, PathGlobs): lambda _: Snapshot(DirectoryDigest(text_type("abc"), 10), (File('/dev/null/BUILD'),)),
       (FilesContent, DirectoryDigest): lambda _: FilesContent([FileContent('/dev/null/BUILD', '')]),
     })
   self.assertEquals(len(af.objects_by_name), 0)
Esempio n. 55
0
 def add_change(chtuple):
     src, chdict = None, None
     if chtuple:
         src, chdict = chtuple
     if chdict:
         return self.master.data.updates.addChange(src=text_type(src), **chdict)
     else:
         log.msg("no change found in maildir file '%s'" % filename)
Esempio n. 56
0
 def test_download(self):
   with self.isolated_local_store():
     with http_server(StubHandler) as port:
       url = UrlToFetch("http://localhost:{}/CNAME".format(port), self.pantsbuild_digest)
       snapshot, = self.scheduler.product_request(Snapshot, subjects=[url])
       self.assert_snapshot_equals(snapshot, ["CNAME"], Digest(
         text_type("16ba2118adbe5b53270008790e245bbf7088033389461b08640a4092f7f647cf"),
         81
       ))
 def __new__(cls, pkg_spec, include_relpath=None, lib_relpath=None, lib_names=None):
   """
   :param str pkg_spec: A string specifying a conan package at a specific version, as per
                        https://docs.conan.io/en/latest/using_packages/conanfile_txt.html#requires
   :param str include_relpath: The relative path from the package root directory to where C/C++
                               headers are located.
   :param str lib_relpath: The relative path from the package root directory to where native
                           libraries are located.
   :param list lib_names: Strings containing the libraries to add to the linker command
                          line. Collected into the `native_lib_names` field of a
                          `packaged_native_library()` target.
   """
   return super(ConanRequirement, cls).__new__(
     cls,
     text_type(pkg_spec),
     include_relpath=text_type(include_relpath or 'include'),
     lib_relpath=text_type(lib_relpath or 'lib'),
     lib_names=tuple(lib_names or ()))
Esempio n. 58
0
    def addLog(self, name, type='s', logEncoding=None):
        d = self.master.data.updates.addLog(self.stepid,
                                            util.ascii2unicode(name),
                                            text_type(type))

        @d.addCallback
        def newLog(logid):
            return self._newLog(name, type, logid, logEncoding)
        return d