def get_patch():
     return patch.PatchSet([
         patch.FilePatchDelete('foo2', True),
         patch.FilePatchDiff('foo', GIT_NEW, []),
         patch.FilePatchBinary('foo1', 'data', [], True),
         patch.FilePatchDiff('foo3', GIT_PATCH, []),
     ])
Beispiel #2
0
 def testPatchsetOrder(self):
   # Deletes must be last.
   # File renames/move/copy must be first.
   patches = [
       patch.FilePatchDiff('chrome/file.cc', RAW.PATCH, []),
       patch.FilePatchDiff(
           'tools\\clang_check/README.chromium', GIT.DELETE, []),
       patch.FilePatchDiff('tools/run_local_server.sh', GIT.RENAME, []),
       patch.FilePatchDiff(
           'chromeos\\views/webui_menu_widget.h', GIT.RENAME_PARTIAL, []),
       patch.FilePatchDiff('pp', GIT.COPY, []),
       patch.FilePatchDiff('foo', GIT.NEW, []),
       patch.FilePatchDelete('other/place/foo', True),
       patch.FilePatchBinary('bar', 'data', [], is_new=False),
   ]
   expected = [
       'pp',
       'chromeos/views/webui_menu_widget.h',
       'tools/run_local_server.sh',
       'bar',
       'chrome/file.cc',
       'foo',
       'other/place/foo',
       'tools/clang_check/README.chromium',
   ]
   patchset = patch.PatchSet(patches)
   self.assertEquals(expected, patchset.filenames)
Beispiel #3
0
 def get_patch(self, _issue, _patchset):
   self.patchsets.append(patch.PatchSet([
       patch.FilePatchDiff('chrome/file.cc', SVN_PATCH, []),
       patch.FilePatchDelete('other/place/foo', True),
       patch.FilePatchBinary('foo', 'data', [], True),
   ]))
   return self.patchsets[-1]
Beispiel #4
0
 def _check_move(self, co):
   """Makes sure file moves are handled correctly."""
   co.prepare(None)
   patchset = patch.PatchSet([
       patch.FilePatchDelete('chromeos/views/DOMui_menu_widget.h', False),
       patch.FilePatchDiff(
         'chromeos/views/webui_menu_widget.h', GIT.RENAME_PARTIAL, []),
   ])
   co.apply_patch(patchset)
   # Make sure chromeos/views/DOMui_menu_widget.h is deleted and
   # chromeos/views/webui_menu_widget.h is correctly created.
   root = os.path.join(self.root_dir, self.name)
   tree = self.get_trunk(False)
   del tree['chromeos/views/DOMui_menu_widget.h']
   tree['chromeos/views/webui_menu_widget.h'] = (
       '// Copyright (c) 2011\n'
       '// Use of this source code\n'
       '// found in the LICENSE file.\n'
       '\n'
       '#ifndef WEB\n'
       '#define WEB\n'
       '#pragma once\n'
       '\n'
       '#include <string>\n'
       '#endif\n')
   #print patchset[0].get()
   #print fake_repos.read_tree(root)
   self.assertTree(tree, root)
Beispiel #5
0
 def get_patches(self):
   return patch.PatchSet([
       patch.FilePatchDiff('new_dir/subdir/new_file', GIT.NEW_SUBDIR, []),
       patch.FilePatchDiff('chrome/file.cc', GIT.PATCH, []),
       # TODO(maruel): Test with is_new == False.
       patch.FilePatchBinary('bin_file', '\x00', [], is_new=True),
       patch.FilePatchDelete('extra', False),
   ])
Beispiel #6
0
 def testBackSlash(self):
     mangled_patch = RAW.PATCH.replace('chrome/', 'chrome\\')
     patches = patch.PatchSet([
         patch.FilePatchDiff('chrome\\file.cc', mangled_patch, []),
         patch.FilePatchDelete('other\\place\\foo', True),
     ])
     expected = ['chrome/file.cc', 'other/place/foo']
     self.assertEquals(expected, patches.filenames)
     self.assertEquals(RAW.PATCH, patches.patches[0].get())
 def get_patches(self):
   return patch.PatchSet([
       patch.FilePatchDiff(
           'svn_utils_test.txt', GIT_PATCH, []),
       # TODO(maruel): Test with is_new == False.
       patch.FilePatchBinary('bin_file', '\x00', [], is_new=True),
       patch.FilePatchDelete('extra', False),
       patch.FilePatchDiff('new_dir/subdir/new_file', PATCH_ADD, []),
   ])
Beispiel #8
0
 def testRelPathEmpty(self):
     patches = patch.PatchSet([
         patch.FilePatchDiff('chrome\\file.cc', RAW.PATCH, []),
         patch.FilePatchDelete('other\\place\\foo', True),
     ])
     patches.set_relpath('')
     self.assertEquals(['chrome/file.cc', 'other/place/foo'],
                       [f.filename for f in patches])
     self.assertEquals([None, None], [f.source_filename for f in patches])
Beispiel #9
0
 def testRelPathBad(self):
   patches = patch.PatchSet([
       patch.FilePatchDiff('chrome\\file.cc', RAW.PATCH, []),
       patch.FilePatchDelete('other\\place\\foo', True),
   ])
   try:
     patches.set_relpath('..')
     self.fail()
   except patch.UnsupportedPatchFormat:
     pass
Beispiel #10
0
  def testRelPath(self):
    patches = patch.PatchSet([
        patch.FilePatchDiff('pp', GIT.COPY, []),
        patch.FilePatchDiff(
            'chromeos\\views/webui_menu_widget.h', GIT.RENAME_PARTIAL, []),
        patch.FilePatchDiff('tools/run_local_server.sh', GIT.RENAME, []),
        patch.FilePatchBinary('bar', 'data', [], is_new=False),
        patch.FilePatchDiff('chrome/file.cc', RAW.PATCH, []),
        patch.FilePatchDiff('foo', GIT.NEW, []),
        patch.FilePatchDelete('other/place/foo', True),
        patch.FilePatchDiff(
            'tools\\clang_check/README.chromium', GIT.DELETE, []),
    ])
    expected = [
        'pp',
        'chromeos/views/webui_menu_widget.h',
        'tools/run_local_server.sh',
        'bar',
        'chrome/file.cc',
        'foo',
        'other/place/foo',
        'tools/clang_check/README.chromium',
    ]
    self.assertEquals(expected, patches.filenames)

    # Test patch #4.
    orig_name = patches.patches[4].filename
    orig_source_name = patches.patches[4].source_filename or orig_name
    patches.set_relpath(os.path.join('a', 'bb'))
    # Expect posixpath all the time.
    expected = [posixpath.join('a', 'bb', x) for x in expected]
    self.assertEquals(expected, patches.filenames)
    # Make sure each header is updated accordingly.
    header = []
    new_name = posixpath.join('a', 'bb', orig_name)
    new_source_name = posixpath.join('a', 'bb', orig_source_name)
    for line in RAW.PATCH.splitlines(True):
      if line.startswith('@@'):
        break
      if line[:3] == '---':
        line = line.replace(orig_source_name, new_source_name)
      if line[:3] == '+++':
        line = line.replace(orig_name, new_name)
      header.append(line)
    header = ''.join(header)
    self.assertEquals(header, patches.patches[4].diff_header)
Beispiel #11
0
 def test_fail_missing_hunk_line(self):
     fp = open(join(TESTS, "data/failing/missing-hunk-line.diff"), 'rb')
     pto = patch.PatchSet()
     self.assertNotEqual(pto.parse(fp), True)
     fp.close()
Beispiel #12
0
 def test_fail_not_a_patch(self):
     fp = open(join(TESTS, "data/failing/not-a-patch.log"), 'rb')
     res = patch.PatchSet().parse(fp)
     self.assertFalse(res)
     fp.close()
Beispiel #13
0
    def get_patch(self, issue, patchset):
        """Returns a PatchSet object containing the details to apply this patch."""
        props = self.get_patchset_properties(issue, patchset) or {}
        out = []
        for filename, state in props.get('files', {}).iteritems():
            logging.debug('%s' % filename)
            # If not status, just assume it's a 'M'. Rietveld often gets it wrong and
            # just has status: null. Oh well.
            status = state.get('status') or 'M'
            if status[0] not in ('A', 'D', 'M', 'R'):
                raise patch.UnsupportedPatchFormat(
                    filename,
                    'Change with status \'%s\' is not supported.' % status)

            svn_props = self.parse_svn_properties(
                state.get('property_changes', ''), filename)

            if state.get('is_binary'):
                if status[0] == 'D':
                    if status[0] != status.strip():
                        raise patch.UnsupportedPatchFormat(
                            filename,
                            'Deleted file shouldn\'t have property change.')
                    out.append(
                        patch.FilePatchDelete(filename, state['is_binary']))
                else:
                    content = self.get_file_content(issue, patchset,
                                                    state['id'])
                    if not content or content == 'None':
                        # As a precaution due to a bug in upload.py for git checkout, refuse
                        # empty files. If it's empty, it's not a binary file.
                        raise patch.UnsupportedPatchFormat(
                            filename,
                            'Binary file is empty. Maybe the file wasn\'t uploaded in the '
                            'first place?')
                    out.append(
                        patch.FilePatchBinary(filename,
                                              content,
                                              svn_props,
                                              is_new=(status[0] == 'A')))
                continue

            try:
                diff = self.get_file_diff(issue, patchset, state['id'])
            except urllib2.HTTPError as e:
                if e.code == 404:
                    raise patch.UnsupportedPatchFormat(
                        filename, 'File doesn\'t have a diff.')
                raise

            # FilePatchDiff() will detect file deletion automatically.
            p = patch.FilePatchDiff(filename, diff, svn_props)
            out.append(p)
            if status[0] == 'A':
                # It won't be set for empty file.
                p.is_new = True
            if (len(status) > 1 and status[1] == '+'
                    and not (p.source_filename or p.svn_properties)):
                raise patch.UnsupportedPatchFormat(
                    filename, 'Failed to process the svn properties')

        return patch.PatchSet(out)
Beispiel #14
0
class Rietveld(object):
  """Accesses rietveld."""
  def __init__(self, url, email, password, extra_headers=None):
    self.url = url.rstrip('/')
    # Email and password are accessed by commit queue, keep them.
    self.email = email
    self.password = password
    # TODO(maruel): It's not awesome but maybe necessary to retrieve the value.
    # It happens when the presubmit check is ran out of process, the cookie
    # needed to be recreated from the credentials. Instead, it should pass the
    # email and the cookie.
    if email and password:
      get_creds = lambda: (email, password)
      self.rpc_server = upload.HttpRpcServer(
            self.url,
            get_creds,
            extra_headers=extra_headers or {})
    else:
      if email == '':
        # If email is given as an empty string, then assume we want to make
        # requests that do not need authentication.  Bypass authentication by
        # setting the auth_function to None.
        self.rpc_server = upload.HttpRpcServer(url, None)
      else:
        self.rpc_server = upload.GetRpcServer(url, email)

    self._xsrf_token = None
    self._xsrf_token_time = None

  def xsrf_token(self):
    if (not self._xsrf_token_time or
        (time.time() - self._xsrf_token_time) > 30*60):
      self._xsrf_token_time = time.time()
      self._xsrf_token = self.get(
          '/xsrf_token',
          extra_headers={'X-Requesting-XSRF-Token': '1'})
    return self._xsrf_token

  def get_pending_issues(self):
    """Returns an array of dict of all the pending issues on the server."""
    # TODO: Convert this to use Rietveld::search(), defined below.
    return json.loads(
        self.get('/search?format=json&commit=2&closed=3&'
                 'keys_only=True&limit=1000&order=__key__'))['results']

  def close_issue(self, issue):
    """Closes the Rietveld issue for this changelist."""
    logging.info('closing issue %d' % issue)
    self.post("/%d/close" % issue, [('xsrf_token', self.xsrf_token())])

  def get_description(self, issue):
    """Returns the issue's description.

    Converts any CRLF into LF and strip extraneous whitespace.
    """
    return '\n'.join(self.get('/%d/description' % issue).strip().splitlines())

  def get_issue_properties(self, issue, messages):
    """Returns all the issue's metadata as a dictionary."""
    url = '/api/%d' % issue
    if messages:
      url += '?messages=true'
    data = json.loads(self.get(url))
    data['description'] = '\n'.join(data['description'].strip().splitlines())
    return data

  def get_patchset_properties(self, issue, patchset):
    """Returns the patchset properties."""
    url = '/api/%d/%d' % (issue, patchset)
    return json.loads(self.get(url))

  def get_file_content(self, issue, patchset, item):
    """Returns the content of a new file.

    Throws HTTP 302 exception if the file doesn't exist or is not a binary file.
    """
    # content = 0 is the old file, 1 is the new file.
    content = 1
    url = '/%d/binary/%d/%d/%d' % (issue, patchset, item, content)
    return self.get(url)

  def get_file_diff(self, issue, patchset, item):
    """Returns the diff of the file.

    Returns a useless diff for binary files.
    """
    url = '/download/issue%d_%d_%d.diff' % (issue, patchset, item)
    return self.get(url)

  def get_patch(self, issue, patchset):
    """Returns a PatchSet object containing the details to apply this patch."""
    props = self.get_patchset_properties(issue, patchset) or {}
    out = []
    for filename, state in props.get('files', {}).iteritems():
      logging.debug('%s' % filename)
      # If not status, just assume it's a 'M'. Rietveld often gets it wrong and
      # just has status: null. Oh well.
      status = state.get('status') or 'M'
      if status[0] not in ('A', 'D', 'M', 'R'):
        raise patch.UnsupportedPatchFormat(
            filename, 'Change with status \'%s\' is not supported.' % status)

      svn_props = self.parse_svn_properties(
          state.get('property_changes', ''), filename)

      if state.get('is_binary'):
        if status[0] == 'D':
          if status[0] != status.strip():
            raise patch.UnsupportedPatchFormat(
                filename, 'Deleted file shouldn\'t have property change.')
          out.append(patch.FilePatchDelete(filename, state['is_binary']))
        else:
          content = self.get_file_content(issue, patchset, state['id'])
          if not content:
            # As a precaution due to a bug in upload.py for git checkout, refuse
            # empty files. If it's empty, it's not a binary file.
            raise patch.UnsupportedPatchFormat(
                filename,
                'Binary file is empty. Maybe the file wasn\'t uploaded in the '
                'first place?')
          out.append(patch.FilePatchBinary(
              filename,
              content,
              svn_props,
              is_new=(status[0] == 'A')))
        continue

      try:
        diff = self.get_file_diff(issue, patchset, state['id'])
      except urllib2.HTTPError, e:
        if e.code == 404:
          raise patch.UnsupportedPatchFormat(
              filename, 'File doesn\'t have a diff.')
        raise

      # FilePatchDiff() will detect file deletion automatically.
      p = patch.FilePatchDiff(filename, diff, svn_props)
      out.append(p)
      if status[0] == 'A':
        # It won't be set for empty file.
        p.is_new = True
      if (len(status) > 1 and
          status[1] == '+' and
          not (p.source_filename or p.svn_properties)):
        raise patch.UnsupportedPatchFormat(
            filename, 'Failed to process the svn properties')

    return patch.PatchSet(out)
Beispiel #15
0
class Rietveld(object):
    """Accesses rietveld."""
    def __init__(self,
                 url,
                 auth_config,
                 email=None,
                 extra_headers=None,
                 maxtries=None):
        self.url = url.rstrip('/')
        self.rpc_server = upload.GetRpcServer(self.url, auth_config, email)

        self._xsrf_token = None
        self._xsrf_token_time = None

        self._maxtries = maxtries or 40

    def xsrf_token(self):
        if (not self._xsrf_token_time
                or (time.time() - self._xsrf_token_time) > 30 * 60):
            self._xsrf_token_time = time.time()
            self._xsrf_token = self.get(
                '/xsrf_token', extra_headers={'X-Requesting-XSRF-Token': '1'})
        return self._xsrf_token

    def get_pending_issues(self):
        """Returns an array of dict of all the pending issues on the server."""
        # TODO: Convert this to use Rietveld::search(), defined below.
        return json.loads(
            self.get('/search?format=json&commit=2&closed=3&'
                     'keys_only=True&limit=1000&order=__key__'))['results']

    def close_issue(self, issue):
        """Closes the Rietveld issue for this changelist."""
        logging.info('closing issue %d' % issue)
        self.post("/%d/close" % issue, [('xsrf_token', self.xsrf_token())])

    def get_description(self, issue):
        """Returns the issue's description.

    Converts any CRLF into LF and strip extraneous whitespace.
    """
        return '\n'.join(
            self.get('/%d/description' % issue).strip().splitlines())

    def get_issue_properties(self, issue, messages):
        """Returns all the issue's metadata as a dictionary."""
        url = '/api/%d' % issue
        if messages:
            url += '?messages=true'
        data = json.loads(self.get(url, retry_on_404=True))
        data['description'] = '\n'.join(
            data['description'].strip().splitlines())
        return data

    def get_depends_on_patchset(self, issue, patchset):
        """Returns the patchset this patchset depends on if it exists."""
        url = '/%d/patchset/%d/get_depends_on_patchset' % (issue, patchset)
        resp = None
        try:
            resp = json.loads(self.post(url, []))
        except (urllib2.HTTPError, ValueError):
            # The get_depends_on_patchset endpoint does not exist on this Rietveld
            # instance yet. Ignore the error and proceed.
            # TODO(rmistry): Make this an error when all Rietveld instances have
            # this endpoint.
            pass
        return resp

    def get_patchset_properties(self, issue, patchset):
        """Returns the patchset properties."""
        url = '/api/%d/%d' % (issue, patchset)
        return json.loads(self.get(url))

    def get_file_content(self, issue, patchset, item):
        """Returns the content of a new file.

    Throws HTTP 302 exception if the file doesn't exist or is not a binary file.
    """
        # content = 0 is the old file, 1 is the new file.
        content = 1
        url = '/%d/binary/%d/%d/%d' % (issue, patchset, item, content)
        return self.get(url)

    def get_file_diff(self, issue, patchset, item):
        """Returns the diff of the file.

    Returns a useless diff for binary files.
    """
        url = '/download/issue%d_%d_%d.diff' % (issue, patchset, item)
        return self.get(url)

    def get_patch(self, issue, patchset):
        """Returns a PatchSet object containing the details to apply this patch."""
        props = self.get_patchset_properties(issue, patchset) or {}
        out = []
        for filename, state in props.get('files', {}).iteritems():
            logging.debug('%s' % filename)
            # If not status, just assume it's a 'M'. Rietveld often gets it wrong and
            # just has status: null. Oh well.
            status = state.get('status') or 'M'
            if status[0] not in ('A', 'D', 'M', 'R'):
                raise patch.UnsupportedPatchFormat(
                    filename,
                    'Change with status \'%s\' is not supported.' % status)

            svn_props = self.parse_svn_properties(
                state.get('property_changes', ''), filename)

            if state.get('is_binary'):
                if status[0] == 'D':
                    if status[0] != status.strip():
                        raise patch.UnsupportedPatchFormat(
                            filename,
                            'Deleted file shouldn\'t have property change.')
                    out.append(
                        patch.FilePatchDelete(filename, state['is_binary']))
                else:
                    content = self.get_file_content(issue, patchset,
                                                    state['id'])
                    if not content or content == 'None':
                        # As a precaution due to a bug in upload.py for git checkout, refuse
                        # empty files. If it's empty, it's not a binary file.
                        raise patch.UnsupportedPatchFormat(
                            filename,
                            'Binary file is empty. Maybe the file wasn\'t uploaded in the '
                            'first place?')
                    out.append(
                        patch.FilePatchBinary(filename,
                                              content,
                                              svn_props,
                                              is_new=(status[0] == 'A')))
                continue

            try:
                diff = self.get_file_diff(issue, patchset, state['id'])
            except urllib2.HTTPError, e:
                if e.code == 404:
                    raise patch.UnsupportedPatchFormat(
                        filename, 'File doesn\'t have a diff.')
                raise

            # FilePatchDiff() will detect file deletion automatically.
            p = patch.FilePatchDiff(filename, diff, svn_props)
            out.append(p)
            if status[0] == 'A':
                # It won't be set for empty file.
                p.is_new = True
            if (len(status) > 1 and status[1] == '+'
                    and not (p.source_filename or p.svn_properties)):
                raise patch.UnsupportedPatchFormat(
                    filename, 'Failed to process the svn properties')

        return patch.PatchSet(out)
Beispiel #16
0
class Rietveld(object):
  """Accesses rietveld."""
  def __init__(self, url, email, password, extra_headers=None):
    self.url = url
    # TODO(maruel): It's not awesome but maybe necessary to retrieve the value.
    # It happens when the presubmit check is ran out of process, the cookie
    # needed to be recreated from the credentials. Instead, it should pass the
    # email and the cookie.
    self.email = email
    self.password = password
    if email and password:
      get_creds = lambda: (email, password)
      self.rpc_server = upload.HttpRpcServer(
            self.url,
            get_creds,
            extra_headers=extra_headers or {})
    else:
      self.rpc_server = upload.GetRpcServer(url, email)
    self._xsrf_token = None
    self._xsrf_token_time = None

  def xsrf_token(self):
    if (not self._xsrf_token_time or
        (time.time() - self._xsrf_token_time) > 30*60):
      self._xsrf_token_time = time.time()
      self._xsrf_token = self.get(
          '/xsrf_token',
          extra_headers={'X-Requesting-XSRF-Token': '1'})
    return self._xsrf_token

  def get_pending_issues(self):
    """Returns an array of dict of all the pending issues on the server."""
    return json.loads(self.get(
        '/search?format=json&commit=2&closed=3&keys_only=True&limit=1000')
        )['results']

  def close_issue(self, issue):
    """Closes the Rietveld issue for this changelist."""
    logging.info('closing issue %s' % issue)
    self.post("/%d/close" % issue, [('xsrf_token', self.xsrf_token())])

  def get_description(self, issue):
    """Returns the issue's description."""
    return self.get('/%d/description' % issue)

  def get_issue_properties(self, issue, messages):
    """Returns all the issue's metadata as a dictionary."""
    url = '/api/%s' % issue
    if messages:
      url += '?messages=true'
    return json.loads(self.get(url))

  def get_patchset_properties(self, issue, patchset):
    """Returns the patchset properties."""
    url = '/api/%s/%s' % (issue, patchset)
    return json.loads(self.get(url))

  def get_file_content(self, issue, patchset, item):
    """Returns the content of a new file.

    Throws HTTP 302 exception if the file doesn't exist or is not a binary file.
    """
    # content = 0 is the old file, 1 is the new file.
    content = 1
    url = '/%s/image/%s/%s/%s' % (issue, patchset, item, content)
    return self.get(url)

  def get_file_diff(self, issue, patchset, item):
    """Returns the diff of the file.

    Returns a useless diff for binary files.
    """
    url = '/download/issue%s_%s_%s.diff' % (issue, patchset, item)
    return self.get(url)

  def get_patch(self, issue, patchset):
    """Returns a PatchSet object containing the details to apply this patch."""
    props = self.get_patchset_properties(issue, patchset) or {}
    out = []
    for filename, state in props.get('files', {}).iteritems():
      logging.debug('%s' % filename)
      status = state.get('status')
      if not status:
        raise patch.UnsupportedPatchFormat(
            filename, 'File\'s status is None, patchset upload is incomplete.')
      if status[0] not in ('A', 'D', 'M'):
        raise patch.UnsupportedPatchFormat(
            filename, 'Change with status \'%s\' is not supported.' % status)

      svn_props = self.parse_svn_properties(
          state.get('property_changes', ''), filename)

      if state.get('is_binary'):
        if status[0] == 'D':
          if status[0] != status.strip():
            raise patch.UnsupportedPatchFormat(
                filename, 'Deleted file shouldn\'t have property change.')
          out.append(patch.FilePatchDelete(filename, state['is_binary']))
        else:
          out.append(patch.FilePatchBinary(
              filename,
              self.get_file_content(issue, patchset, state['id']),
              svn_props,
              is_new=(status[0] == 'A')))
        continue

      try:
        diff = self.get_file_diff(issue, patchset, state['id'])
      except urllib2.HTTPError, e:
        if e.code == 404:
          raise patch.UnsupportedPatchFormat(
              filename, 'File doesn\'t have a diff.')
        raise

      # FilePatchDiff() will detect file deletion automatically.
      p = patch.FilePatchDiff(filename, diff, svn_props)
      out.append(p)
      if status[0] == 'A':
        # It won't be set for empty file.
        p.is_new = True
      if (len(status) > 1 and
          status[1] == '+' and
          not (p.source_filename or p.svn_properties)):
        raise patch.UnsupportedPatchFormat(
            filename, 'Failed to process the svn properties')

    return patch.PatchSet(out)
Beispiel #17
0
 def test_fail_context_format(self):
     fp = open(join(TESTS, "data/failing/context-format.diff"), 'rb')
     res = patch.PatchSet().parse(fp)
     self.assertFalse(res)
     fp.close()
Beispiel #18
0
    def _clean_and_download(self, downloads: List[Download], cache: str,
                            srcdir: str) -> List[str]:

        libdir = join(self.root, "lib")
        incdir = join(self.root, "include")

        add_libdir = False
        add_incdir = False

        # Remove downloaded/generated artifacts first
        shutil.rmtree(libdir, ignore_errors=True)
        shutil.rmtree(incdir, ignore_errors=True)
        shutil.rmtree(srcdir, ignore_errors=True)

        dlopen_libnames = self.get_dlopen_library_names()
        libnames_full = []

        for dl in downloads:
            # extract the whole thing into a directory when using for sources
            if dl.sources is not None:
                download_and_extract_zip(dl.url, srcdir, cache)
                sources = [join(srcdir, normpath(s)) for s in dl.sources]
                self.extension.sources.extend(sources)
                if dl.patches:
                    import patch

                    for p in dl.patches:
                        patch_path = join(self.setup_root, normpath(p.patch))
                        ps = patch.PatchSet()
                        with open(patch_path, "rb") as fp:
                            if not ps.parse(fp):
                                raise ValueError(
                                    f"Error parsing patch '{patch_path}'")

                        if not ps.apply(strip=p.strip, root=srcdir):
                            raise ValueError(
                                f"Error applying patch '{patch_path}")
            elif dl.sources is not None:
                raise ValueError(
                    "sources must be None if use_sources is False!")
            elif dl.patches is not None:
                raise ValueError(
                    "patches must be None if use_sources is False!")

            if dl.libs or dl.dlopenlibs:
                add_libdir = True
                extract_names = []
                os.makedirs(libdir)

                libext = dl.libexts.get(self.platform.libext,
                                        self.platform.libext)
                linkext = dl.linkexts.get(self.platform.linkext,
                                          self.platform.linkext)
                if dl.libs:
                    for lib in dl.libs:
                        if lib not in dlopen_libnames:
                            name = f"{self.platform.libprefix}{lib}{libext}"
                            libnames_full.append(name)
                            extract_names.append(name)
                            if libext != linkext:
                                extract_names.append(
                                    f"{self.platform.libprefix}{lib}{linkext}")

                if dl.dlopenlibs:
                    libnames_full += [
                        f"{self.platform.libprefix}{lib}{libext}"
                        for lib in dl.dlopenlibs
                    ]
                    if libext != linkext:
                        extract_names += [
                            f"{self.platform.libprefix}{lib}{linkext}"
                            for lib in dl.dlopenlibs
                        ]

                to = {
                    posixpath.join(dl.libdir, libname): join(libdir, libname)
                    for libname in extract_names
                }
            else:
                to = {}

            if dl.incdir is not None:
                to[dl.incdir] = self.incdir
                add_incdir = True

            download_and_extract_zip(dl.url, to, cache)

        if add_incdir:
            for f in glob.glob(join(glob.escape(incdir), "**"),
                               recursive=True):
                self._add_generated_file(f)

        if add_libdir:
            for f in glob.glob(join(glob.escape(libdir), "**"),
                               recursive=True):
                self._add_generated_file(f)

        return libnames_full