def check_patch_url(patch_url):
    try:
        url = urlparse.urlparse(patch_url)
    except ValueError:
        logging.error('invalid patch_url "%s": malformed url' % patch_url)
        return False

    # http is only supported when using loopback and private IPs (for dev/test)
    if config.testing() and url.scheme in ("http", "https"):
        try:
            ip = ipaddress.ip_address(
                unicode(socket.gethostbyname(url.hostname)))
            if ip.is_loopback or ip.is_private:
                return True
        except (socket.gaierror, ValueError) as e:
            # Failed to resolve hostname, reject
            logging.error('invalid patch_url "%s": %s' % (patch_url, e))
            return False
        logging.error('invalid patch_url "%s": public http url' % patch_url)
        return False

    # Deployed environments must use the s3 scheme.  s3://bucket/path/to/file
    if url.scheme != "s3":
        logging.error('invalid patch_url "%s": not a s3:// url' % patch_url)
        return False

    # Allow patches only from buckets configured in config.json.
    if url.hostname not in config.get("patch_url_buckets", {}):
        logging.error('invalid patch_url "%s": not whitelisted by config' %
                      patch_url)
        return False

    return True
Example #2
0
def tree_is_open(tree):
    # treestatus running in dev/CI is an older version, with slightly
    # different request and response structures.
    is_test_env = config.testing()

    r = None
    try:
        if is_test_env:
            r = requests.get('http://treestatus/%s?format=json' % tree)
        else:
            r = requests.get(TREESTATUS_URL + tree)

        if r.status_code == 200:
            if is_test_env:
                return r.json()['status'] == 'open'
            else:
                return r.json()['result']['status'] == 'open'

        elif r.status_code == 404:
            # We assume unrecognized trees are open
            return True

        else:
            logger.error('Unexpected response from treestatus API '
                         'for tree "%s": %s' % (tree, r.status_code))
    except KeyError:
        logger.error('Malformed response from treestatus API '
                     'for tree "%s"' % tree)
        if r is not None:
            logger.debug(r.json())
    except Exception as e:
        logger.error('Failed to determine treestatus for %s: %s' % (tree, e))

    return False
def autoland_status(request_id):
    if not config.testing():
        abort(404)

    dbconn = get_dbconn()
    cursor = dbconn.cursor()

    query = """
        select destination, request, landed, result
        from Transplant
        where id = %(request_id)s
    """

    try:
        cursor.execute(query, ({"request_id": int(request_id)}))
    except ValueError:
        abort(404)

    row = cursor.fetchone()
    if row:
        destination, req, landed, result = row

        status = req.copy()
        del status["pingback_url"]
        status["destination"] = destination
        status["landed"] = landed
        status["result"] = result if landed else ""
        status["error_msg"] = result if not landed else ""

        return jsonify(status)

    abort(404)
Example #4
0
    def apply_changes(self, remote_tip):
        dirty_files = self.dirty_files()
        if dirty_files:
            logger.error('repo is not clean: %s' % ' '.join(dirty_files))
            raise Exception("We're sorry - something has gone wrong while "
                            "landing your commits. The repository contains "
                            "unexpected changes. "
                            "Please file a bug.")

        self.run_hg(['update', remote_tip])

        if config.testing() and self.patch:
            # Dev/Testing permits passing in a patch within the request.
            self._apply_patch_from_io_buff(io.BytesIO(self.patch))

        else:
            for patch_url in self.patch_urls:
                if patch_url.startswith('s3://'):
                    # Download patch from s3 to a temp file.
                    io_buf = self._download_from_s3(patch_url)

                else:
                    # Download patch directly from url.  Using a temp file here
                    # instead of passing the url to 'hg import' to make
                    # testing's code path closer to production's.
                    io_buf = self._download_from_url(patch_url)

                self._apply_patch_from_io_buff(io_buf)

        return self.run_hg(['log', '-r', '.', '-T', '{node}'])
Example #5
0
    def _apply_patch_from_io_buff(self, io_buf):
        patch = PatchHelper(io_buf)

        # In production we require each patch to require a `Diff Start Line` header.
        # In test this is tricky because mercurial doesn't generate this header.
        if not config.testing() and not patch.diff_start_line:
            raise Exception("invalid patch: missing `Diff Start Line` header")

        # Import then commit to ensure correct parsing of the
        # commit description.
        desc_temp = tempfile.NamedTemporaryFile()
        diff_temp = tempfile.NamedTemporaryFile()
        with desc_temp, diff_temp:
            patch.write_commit_description(desc_temp)
            desc_temp.flush()
            patch.write_diff(diff_temp)
            diff_temp.flush()

            # Apply the patch, with file rename detection (similarity).
            # Using 95 as the similarity to match automv's default.
            #
            # XXX Using `hg import` here is less than ideal because it isn't
            # using a 3-way merge. It would be better to use
            # `hg import --exact` then `hg rebase`, however we aren't
            # guaranteed to have the changeset's parent in the local repo.
            self.run_hg(['import', '-s', '95', '--no-commit', diff_temp.name])

            # Commit using the extracted date, user, and commit desc.
            # --landing_system is provided by the set_landing_system hgext.
            self.run_hg([
                'commit', '--date',
                patch.header('Date'), '--user',
                patch.header('User'), '--landing_system',
                self.landing_system_id, '--logfile', desc_temp.name
            ])
Example #6
0
def tree_is_open(logger, tree):
    treestatus_url = TREESTATUS_URL
    if config.testing():
        treestatus_url = 'http://treestatus/'

    # Map integration branches to their short form name
    m = re.match('ssh://hg\.mozilla\.org/integration/([^/]+)', tree)
    if m and m.groups():
        tree = m.groups()[0]

    r = None
    try:
        r = requests.get(treestatus_url + tree, verify=False)
        if r.status_code == 200:
            return r.json()['result']['status'] == 'open'
        elif r.status_code == 404:
            # We assume unrecognized trees are open
            return True
        else:
            logger.error('Unexpected response from treestatus API '
                         'for tree "%s": %s' % (tree, r.status_code))
    except KeyError:
        logger.error('Malformed response from treestatus API '
                     'for tree "%s"' % tree)
        if r is not None:
            logger.debug(r.json())
    except Exception as e:
        logger.error('Failed to determine treestatus for %s: %s' % (tree, e))

    return False
Example #7
0
    def push_try(self, trysyntax):
        # Don't let unicode leak into command arguments.
        assert isinstance(trysyntax, str), "trysyntax arg is not str"

        target_cset = self.update_repo()

        self.apply_changes(target_cset)

        if not trysyntax.startswith("try: "):
            trysyntax = "try: %s" % trysyntax

        commit_cmd = [
            "--encoding=utf-8",
            "--config",
            "ui.allowemptycommit=true",
            "commit",
            "-m",
            trysyntax,
        ]
        if config.testing() and os.getenv("IS_TESTING", None):
            # When running integration testing we need to pin the date
            # for this commit.  Mercurial's command server uses HGPLAIN
            # so we can't set this with [defaults].
            commit_cmd.extend(["--date", "0 0"])

        rev = self.run_hg_cmds([
            commit_cmd,
            ["push", "-r", ".", "-f", "try"],
            ["log", "-r", "tip", "-T", "{node}"],
        ])

        return rev
Example #8
0
def tree_is_open(tree):
    treestatus_url = os.getenv(
        "TREESTATUS_URL",
        TREESTATUS_TEST_URL if config.testing() else TREESTATUS_PROD_URL,
    )

    r = None
    try:
        r = requests.get(treestatus_url % tree)

        if r.status_code == 200:
            res = r.json()
            if "result" in res:
                res = res["result"]
            return res["status"] == "open"

        elif r.status_code == 404:
            # We assume unrecognized trees are open
            return True

        else:
            logger.error("Unexpected response from treestatus API "
                         'for tree "%s": %s' % (tree, r.status_code))
    except KeyError:
        logger.error("Malformed response from treestatus API "
                     'for tree "%s"' % tree)
        if r is not None:
            logger.debug(r.json())
    except Exception as e:
        logger.error("Failed to determine treestatus for %s: %s" % (tree, e))

    return False
Example #9
0
    def push_try(self, trysyntax):
        # Don't let unicode leak into command arguments.
        assert isinstance(trysyntax, str), "trysyntax arg is not str"

        remote_tip = self.update_repo()

        self.apply_changes(remote_tip)

        if not trysyntax.startswith("try: "):
            trysyntax = "try: %s" % trysyntax

        commit_cmd = [
            '--encoding=utf-8',
            '--config',
            'ui.allowemptycommit=true',
            'commit',
            '-m',
            trysyntax,
        ]
        if config.testing() and os.getenv('IS_TESTING', None):
            # When running integration testing we need to pin the date
            # for this commit.  Mercurial's command server uses HGPLAIN
            # so we can't set this with [defaults].
            commit_cmd.extend(['--date', '0 0'])

        rev = self.run_hg_cmds([
            commit_cmd,
            ['push', '-r', '.', '-f', 'try'],
            ['log', '-r', 'tip', '-T', '{node}'],
        ])

        return rev
def hi_there():
    env = "Test" if config.testing() else "Production"
    result = "Welcome to Autoland %s\n" % env

    headers = [("Content-Type", "text/plain"),
               ("Content-Length", str(len(result)))]

    return Response(result, status=200, headers=headers)
Example #11
0
def tree_is_open(tree):
    if config.testing():
        return True

    try:
        r = requests.get(TREESTATUS_URL + tree + '?format=json', verify=False)
        if r.status_code == 200:
            return r.json()['status'] == 'open'
        elif r.status_code == 404:
            # We assume unrecognized trees are open
            return True
    except (KeyError, requests.exceptions.ConnectionError):
        return False
def tree_is_open(tree):
    treestatus_url = TREESTATUS_URL
    if config.testing():
        treestatus_url = 'http://treestatus/'

    # Map integration branches to their short form name
    m = re.match('ssh://hg\.mozilla\.org/integration/([^/]+)', tree)
    if m and m.groups():
        tree = m.groups()[0]

    try:
        r = requests.get(treestatus_url + tree + '?format=json', verify=False)
        if r.status_code == 200:
            return r.json()['status'] == 'open'
        elif r.status_code == 404:
            # We assume unrecognized trees are open
            return True
    except (KeyError, requests.exceptions.ConnectionError):
        return False
def validate_request(req):
    if req.json is None:
        raise ValueError("missing json")
    request_json = req.json

    required = {"ldap_username", "tree", "rev", "pingback_url", "destination"}
    optional = set()

    is_try = "trysyntax" in request_json
    is_patch = "patch_urls" in request_json
    if config.testing() and not is_patch:
        is_patch = "patch" in request_json

    if (not is_patch) and not ("trysyntax" in request_json
                               or "commit_descriptions" in request_json):
        raise ValueError("one of trysyntax or commit_descriptions must be "
                         "specified")

    if not is_try and not is_patch:
        # Repo transplant.
        required.add("commit_descriptions")
        optional.add("push_bookmark")

    elif not is_try and is_patch:
        # Patch transplant.
        if config.testing():
            optional.add("patch_urls")
            optional.add("patch")
        else:
            required.add("patch_urls")
        optional.add("push_bookmark")

    elif is_try and not is_patch:
        # Repo try.
        required.add("trysyntax")

    elif is_try and is_patch:
        # Patch try.
        raise ValueError("trysyntax is not supported with patch_urls")

    request_fields = set(request_json.keys())

    missing = required - request_fields
    if missing:
        raise ValueError(
            "missing required field%s: %s" %
            ("" if len(missing) == 1 else "s", ", ".join(sorted(missing))))

    extra = request_fields - (required | optional)
    if extra:
        raise ValueError(
            "unexpected field%s: %s" %
            ("" if len(extra) == 1 else "s", ", ".join(sorted(extra))))

    if not check_pingback_url(request_json["pingback_url"]):
        raise ValueError("bad pingback_url")

    if is_patch:
        if config.testing() and ("patch_urls" in request_json
                                 and "patch" in request_json):
            raise ValueError("cannot specify both patch_urls and patch")

        if "patch_urls" in request_json:
            for patch_url in request_json["patch_urls"]:
                if not check_patch_url(patch_url):
                    raise ValueError("bad patch_url")

        if "patch" in request_json:
            try:
                base64.b64decode(request_json["patch"])
            except TypeError:
                raise ValueError("malformed base64 in patch")
Example #14
0
def handle_pending_transplants(dbconn):
    cursor = dbconn.cursor()
    now = datetime.datetime.now()
    query = """
        SELECT id, destination, request
        FROM Transplant
        WHERE landed IS NULL
              AND (last_updated IS NULL OR last_updated<=%(time)s)
        ORDER BY created
    """
    transplant_retry_delay = TRANSPLANT_RETRY_DELAY
    if config.testing():
        transplant_retry_delay = datetime.timedelta(seconds=1)

    cursor.execute(query, ({'time': now - transplant_retry_delay}))

    current_treestatus = {}
    finished_revisions = []
    mozreview_updates = []
    retry_revisions = []

    def handle_tree_retry(reason, transplant_id, tree, rev, destination,
                          trysyntax):
        retry_revisions.append((now, transplant_id))
        data = {
            'request_id': transplant_id,
            'tree': tree,
            'rev': rev,
            'destination': destination,
            'trysyntax': trysyntax,
            'landed': False,
            'error_msg': '',
            'result': reason,
        }
        mozreview_updates.append([transplant_id, json.dumps(data)])

    # This code is a bit messy because we have to deal with the fact that the
    # the tree could close between the call to tree_is_open and when we
    # actually attempt the revision.
    #
    # We keep a list of revisions to retry called retry_revisions which we
    # append to whenever we detect a closed tree. These revisions have their
    # last_updated field updated so we will retry them after a suitable delay.
    #
    # The other list we keep is for transplant attempts that either succeeded
    # or failed due to a reason other than a closed tree, which is called
    # finished_revisions. Successful or not, we're finished with them, they
    # will not be retried.
    for row in cursor.fetchall():
        transplant_id, destination, request = row

        # Many of these values are used as command arguments. So convert
        # to binary because command arguments aren't unicode.
        destination = destination.encode('ascii')
        requester = request['ldap_username']
        tree = request['tree'].encode('ascii')
        rev = request['rev'].encode('ascii')
        trysyntax = request.get('trysyntax', '')
        push_bookmark = request.get('push_bookmark', '').encode('ascii')
        commit_descriptions = request.get('commit_descriptions')
        repo_config = config.get_repo(tree)
        if not repo_config['tree']:
            # Trees not present on treestatus cannot be closed.
            tree_open = True
        else:
            # When pushing to try we need to check if try is open, not the
            # tree for the source repo.
            tree_name = 'try' if trysyntax else repo_config['tree']
            tree_open = current_treestatus.setdefault(
                destination, treestatus.tree_is_open(tree_name))

        if not tree_open:
            handle_tree_retry('Tree %s is closed - retrying later.' % tree,
                              transplant_id, tree, rev, destination, trysyntax)
            continue

        attempts = 0
        started = datetime.datetime.now()
        landed = False
        while attempts < MAX_TRANSPLANT_ATTEMPTS:
            logger.info('initiating transplant from tree: %s rev: %s '
                        'to destination: %s, attempt %s' %
                        (tree, rev, destination, attempts + 1))

            # TODO: We should break the transplant call into two steps, one
            #       to pull down the commits to transplant, and another
            #       one to rebase it and attempt to push so we don't
            #       duplicate work unnecessarily if we have to rebase more
            #       than once.
            os.environ['AUTOLAND_REQUEST_USER'] = requester
            try:
                with Transplant(tree, destination, rev) as tp:
                    if trysyntax:
                        result = tp.push_try(str(trysyntax))
                    elif push_bookmark:
                        result = tp.push_bookmark(commit_descriptions,
                                                  push_bookmark)
                    else:
                        result = tp.push(commit_descriptions)
                landed = True
            except Exception as e:
                result = str(e)
                landed = False
            finally:
                del os.environ['AUTOLAND_REQUEST_USER']

            logger.info('transplant from tree: %s rev: %s attempt: %s: %s' %
                        (tree, rev, attempts + 1, result))

            if landed or 'abort: push creates new remote head' not in result:
                break

            attempts += 1

        if landed:
            logger.info('transplant successful - new revision: %s' % result)

        else:
            if 'is CLOSED!' in result:
                reason = 'Tree %s is closed - retrying later.' % tree
                logger.info('transplant failed: %s' % reason)
                current_treestatus[destination] = False
                handle_tree_retry(reason, transplant_id, tree, rev,
                                  destination, trysyntax)
                continue

            elif 'APPROVAL REQUIRED' in result:
                reason = 'Tree %s is set to "approval required" - retrying ' \
                         'later.' % tree
                logger.info('transplant failed: %s' % reason)
                current_treestatus[destination] = False
                handle_tree_retry(reason, transplant_id, tree, rev,
                                  destination, trysyntax)
                continue

            elif 'abort: push creates new remote head' in result:
                logger.info('transplant failed: we lost a push race')
                retry_revisions.append((now, transplant_id))
                continue

            elif 'unresolved conflicts (see hg resolve' in result:
                logger.info('transplant failed - manual rebase required: '
                            'tree: %s rev: %s destination: %s error: %s' %
                            (tree, rev, destination, result))
                # This is the only autoland error for which we expect the
                # user to take action. We should make things nicer than the
                # raw mercurial error.
                header = ("We're sorry, Autoland could not rebase your "
                          "commits for you automatically. Please manually "
                          "rebase your commits and try again.\n\n")
                result = header + result

            else:
                logger.info('transplant failed: tree: %s rev: %s '
                            'destination: %s error: %s' %
                            (tree, rev, destination, result))

        completed = datetime.datetime.now()
        logger.info('elapsed transplant time: %s' % (completed - started))

        # set up data to be posted back to mozreview
        data = {
            'request_id': transplant_id,
            'tree': tree,
            'rev': rev,
            'destination': destination,
            'trysyntax': trysyntax,
            'landed': landed,
            'error_msg': '',
            'result': ''
        }

        if landed:
            data['result'] = result
        else:
            data['error_msg'] = result

        mozreview_updates.append([transplant_id, json.dumps(data)])

        finished_revisions.append([landed, result, transplant_id])

    if retry_revisions:
        query = """
            update Transplant set last_updated=%s
            where id=%s
        """
        cursor.executemany(query, retry_revisions)
        dbconn.commit()

    if finished_revisions:
        query = """
            update Transplant set landed=%s,result=%s
            where id=%s
        """
        cursor.executemany(query, finished_revisions)
        dbconn.commit()

    if mozreview_updates:
        query = """
            insert into MozreviewUpdate(transplant_id,data)
            values(%s,%s)
        """
        cursor.executemany(query, mozreview_updates)
        dbconn.commit()
Example #15
0
def handle_pending_transplants(dbconn):
    cursor = dbconn.cursor()
    now = datetime.datetime.now()
    query = """
        SELECT id, destination, request
        FROM Transplant
        WHERE landed IS NULL
              AND (last_updated IS NULL OR last_updated<=%(time)s)
        ORDER BY created
    """
    transplant_retry_delay = TRANSPLANT_RETRY_DELAY
    if config.testing():
        transplant_retry_delay = datetime.timedelta(seconds=1)

    cursor.execute(query, ({"time": now - transplant_retry_delay}))

    current_treestatus = {}
    finished_revisions = []
    mozreview_updates = []
    retry_revisions = []

    def handle_tree_retry(reason, transplant_id, tree, rev, destination,
                          trysyntax):
        retry_revisions.append((now, transplant_id))
        data = {
            "request_id": transplant_id,
            "tree": tree,
            "rev": rev,
            "destination": destination,
            "trysyntax": trysyntax,
            "landed": False,
            "error_msg": "",
            "result": reason,
        }
        mozreview_updates.append([transplant_id, json.dumps(data)])

    # This code is a bit messy because we have to deal with the fact that the
    # the tree could close between the call to tree_is_open and when we
    # actually attempt the revision.
    #
    # We keep a list of revisions to retry called retry_revisions which we
    # append to whenever we detect a closed tree. These revisions have their
    # last_updated field updated so we will retry them after a suitable delay.
    #
    # The other list we keep is for transplant attempts that either succeeded
    # or failed due to a reason other than a closed tree, which is called
    # finished_revisions. Successful or not, we're finished with them, they
    # will not be retried.
    for row in cursor.fetchall():
        transplant_id, destination, request = row

        # Many of these values are used as command arguments. So convert
        # to binary because command arguments aren't unicode.
        destination = destination.encode("ascii")
        requester = request["ldap_username"]
        tree = request["tree"].encode("ascii")
        rev = request["rev"].encode("ascii")
        trysyntax = request.get("trysyntax", "")
        push_bookmark = request.get("push_bookmark", "").encode("ascii")
        commit_descriptions = request.get("commit_descriptions")
        patch_urls = [u.encode("ascii") for u in request.get("patch_urls", [])]

        repo_config = config.get_repo(tree)

        if trysyntax:
            # When pushing to try we need to check if try is open, not the
            # tree for the source repo.
            tree_name = "try"
        else:
            tree_name = repo_config.get("tree")

        if not tree_name:
            # Trees not present on treestatus cannot be closed.
            tree_open = True
        else:
            tree_open = current_treestatus.setdefault(
                destination, treestatus.tree_is_open(tree_name))

        if not tree_open:
            handle_tree_retry(
                "Tree %s is closed - retrying later." % tree,
                transplant_id,
                tree,
                rev,
                destination,
                trysyntax,
            )
            continue

        attempts = 0
        started = datetime.datetime.now()
        landed = False
        while attempts < MAX_TRANSPLANT_ATTEMPTS:
            logger.info("initiating transplant from tree: %s rev: %s "
                        "to destination: %s, attempt %s" %
                        (tree, rev, destination, attempts + 1))

            os.environ["AUTOLAND_REQUEST_USER"] = requester
            try:
                if config.testing() and request.get("patch"):
                    tp = PatchTransplant(
                        tree,
                        destination,
                        rev,
                        None,
                        base64.b64decode(request.get("patch")),
                    )

                elif patch_urls:
                    tp = PatchTransplant(tree, destination, rev, patch_urls)

                else:
                    tp = RepoTransplant(tree, destination, rev,
                                        commit_descriptions)

                with tp:
                    if trysyntax:
                        result = tp.push_try(str(trysyntax))
                    elif push_bookmark:
                        result = tp.push_bookmark(push_bookmark)
                    else:
                        result = tp.push()
                landed = True
            except Exception as e:
                logger.exception(e)
                result = str(e)
                landed = False
            finally:
                del os.environ["AUTOLAND_REQUEST_USER"]

            logger.info("transplant from tree: %s rev: %s attempt: %s: %s" %
                        (tree, rev, attempts + 1, result))

            if landed or "abort: push creates new remote head" not in result:
                break

            attempts += 1

        if landed:
            logger.info("transplant successful - new revision: %s" % result)

        else:
            if "is CLOSED!" in result:
                reason = "Tree %s is closed - retrying later." % tree
                logger.info("transplant failed: %s" % reason)
                current_treestatus[destination] = False
                handle_tree_retry(reason, transplant_id, tree, rev,
                                  destination, trysyntax)
                continue

            elif "APPROVAL REQUIRED" in result:
                reason = ('Tree %s is set to "approval required" - retrying '
                          "later." % tree)
                logger.info("transplant failed: %s" % reason)
                current_treestatus[destination] = False
                handle_tree_retry(reason, transplant_id, tree, rev,
                                  destination, trysyntax)
                continue

            elif ("abort: push creates new remote head" in result
                  or "repository changed while pushing" in result):
                logger.info("transplant failed: we lost a push race")
                logger.info(result)
                retry_revisions.append((now, transplant_id))
                continue

            elif ("unresolved conflicts (see hg resolve" in result
                  or "hunk FAILED -- saving rejects to file" in result
                  or "hunks FAILED -- saving rejects to file" in result):
                logger.info("transplant failed - manual rebase required: "
                            "tree: %s rev: %s destination: %s error: %s" %
                            (tree, rev, destination, result))
                # This is the only autoland error for which we expect the
                # user to take action. We should make things nicer than the
                # raw mercurial error.
                header = ("We're sorry, Autoland could not rebase your "
                          "commits for you automatically. Please manually "
                          "rebase your commits and try again.\n\n")
                result = header + result

            else:
                logger.info("transplant failed: tree: %s rev: %s "
                            "destination: %s error: %s" %
                            (tree, rev, destination, result))

        completed = datetime.datetime.now()
        logger.info("elapsed transplant time: %s" % (completed - started))

        # set up data to be posted back to mozreview
        data = {
            "request_id": transplant_id,
            "tree": tree,
            "rev": rev,
            "destination": destination,
            "trysyntax": trysyntax,
            "landed": landed,
            "error_msg": "",
            "result": "",
        }

        if landed:
            data["result"] = result
        else:
            data["error_msg"] = result

        mozreview_updates.append([transplant_id, json.dumps(data)])

        finished_revisions.append([landed, result, transplant_id])

    if retry_revisions:
        query = """
            update Transplant set last_updated=%s
            where id=%s
        """
        cursor.executemany(query, retry_revisions)
        dbconn.commit()

    if finished_revisions:
        query = """
            update Transplant set landed=%s,result=%s
            where id=%s
        """
        cursor.executemany(query, finished_revisions)
        dbconn.commit()

    if mozreview_updates:
        query = """
            insert into MozreviewUpdate(transplant_id,data)
            values(%s,%s)
        """
        cursor.executemany(query, mozreview_updates)
        dbconn.commit()
def handle_pending_mozreview_pullrequests(logger, dbconn):
    gh = github.connect()
    if not gh:
        return

    bzurl = config.get('bugzilla')['url']

    cursor = dbconn.cursor()

    query = """
        select id,ghuser,repo,pullrequest,destination,bzuserid,bzcookie,bugid,
               pingback_url
        from MozreviewPullRequest
        where landed is null
    """
    cursor.execute(query)

    finished_revisions = []
    mozreview_updates = []
    for row in cursor.fetchall():
        (transplant_id, ghuser, repo, pullrequest, destination, bzuserid,
         bzcookie, bugid, pingback_url) = row

        logger.info('attempting to import pullrequest: %s' % transplant_id)

        # see if we can extract the bug from the commit message
        if bugid is None:
            title, body = github.retrieve_issue(gh, ghuser, repo, pullrequest)
            bugs = parse_bugs(title)
            if bugs:
                bugid = bugs[0]
                logger.info('using bug %s from issue title' % bugid)
                finished_revisions.append([bugid, None, None, transplant_id])

        # still no luck, attempt to autofile a bug on the user's behalf
        if bugid is None:
            logger.info('attempting to autofile bug for: %s' % transplant_id)

            b = bugsy.Bugsy(userid=bzuserid, cookie=bzcookie,
                            bugzilla_url=bzurl)
            if not b:
                logger.info('could not connect to bugzilla instance at %s for '
                            'pullrequest id %s' % (bzurl, transplant_id))
                error = 'could not connect to bugzilla. bad credentials?'
            else:
                bug = bugsy.Bug()

                # Summary is required, the rest have defaults or are optional
                bug.summary = title

                if config.testing():
                    bug.product = 'TestProduct'
                    bug.component = 'TestComponent'
                else:
                    # TODO: determine a better product & component than the
                    # defaults provided by Bugsy
                    pass

                pr_url = github.url_for_pullrequest(ghuser,repo, pullrequest)
                bug.add_comment('%s\n\nImported from: %s' % (body, pr_url))

                try:
                    b.put(bug)
                    bugid = bug.id
                    logger.info('created bug: %s ' % bugid)
                    finished_revisions.append([bugid, None, None, transplant_id])
                except bugsy.BugsyException as e:
                    logger.info('transplant failed: could not create new bug: %s '
                                % e.msg)
                    finished_revisions.append([None, False, e.msg, transplant_id])

                    # set up data to be posted back to mozreview
                    data = {
                        'request_id': transplant_id,
                        'bugid': None,
                        'landed': False,
                        'error_msg': 'could not create new bug: ' + e.msg,
                        'result': ''
                    }

                    mozreview_updates.append([transplant_id, pingback_url,
                                              json.dumps(data)])

        landed, result = transplant.transplant_to_mozreview(gh, destination,
                                                            ghuser, repo,
                                                            pullrequest,
                                                            bzuserid, bzcookie,
                                                            bugid)

        if landed:
            logger.info(('transplanted from'
                         ' https://github.com/%s/%s/pull/%s'
                         ' to destination: %s new revision: %s') %
                        (ghuser, repo, pullrequest, destination, result))
        else:
            logger.info(('transplant failed'
                         ' https://github.com/%s/%s/pull/%s'
                         ' destination: %s error: %s') %
                        (ghuser, repo, pullrequest, destination, result))

        finished_revisions.append([bugid, landed, result, transplant_id])

        # set up data to be posted back to mozreview
        data = {
            'request_id': transplant_id,
            'bugid': bugid,
            'landed': landed,
            'error_msg': '',
            'result': ''
        }

        if landed:
            data['result'] = result
        else:
            data['error_msg'] = result

        mozreview_updates.append([transplant_id, pingback_url, json.dumps(data)])

    if finished_revisions:
        query = """
            update MozreviewPullRequest set bugid=%s,landed=%s,result=%s
            where id=%s
        """
        cursor.executemany(query, finished_revisions)
        dbconn.commit()

    if mozreview_updates:
        query = """
            insert into MozreviewUpdate(request_id,pingback_url,data)
            values(%s,%s,%s)
        """
        cursor.executemany(query, mozreview_updates)
        dbconn.commit()
def handle_pending_transplants(logger, dbconn):
    cursor = dbconn.cursor()
    now = datetime.datetime.now()
    query = """
        select id, destination, request
        from Transplant
        where landed is null and (last_updated is null
            or last_updated<=%(time)s)
    """
    transplant_retry_delay = TRANSPLANT_RETRY_DELAY
    if config.testing():
        transplant_retry_delay = datetime.timedelta(seconds=1)

    cursor.execute(query, ({'time': now - transplant_retry_delay}))

    current_treestatus = {}
    finished_revisions = []
    mozreview_updates = []
    retry_revisions = []

    def handle_treeclosed(transplant_id, tree, rev, destination, trysyntax,
                          pingback_url):
        retry_revisions.append((now, transplant_id))

        data = {
            'request_id': transplant_id,
            'tree': tree,
            'rev': rev,
            'destination': destination,
            'trysyntax': trysyntax,
            'landed': False,
            'error_msg': '',
            'result': 'Tree %s is closed - retrying later.' % tree
        }
        mozreview_updates.append([transplant_id, json.dumps(data)])

    # This code is a bit messy because we have to deal with the fact that the
    # the tree could close between the call to tree_is_open and when we
    # actually attempt the revision.
    #
    # We keep a list of revisions to retry called retry_revisions which we
    # append to whenever we detect a closed tree. These revisions have their
    # last_updated field updated so we will retry them after a suitable delay.
    #
    # The other list we keep is for transplant attempts that either succeeded
    # or failed due to a reason other than a closed tree, which is called
    # finished_revisions. Successful or not, we're finished with them, they
    # will not be retried.
    for row in cursor.fetchall():
        transplant_id, destination, request = row

        requester = request['ldap_username']
        tree = request['tree']
        rev = request['rev']
        trysyntax = request.get('trysyntax', '')
        push_bookmark = request.get('push_bookmark', '')
        pingback_url = request.get('pingback_url', '')
        commit_descriptions = request.get('commit_descriptions')
        tree_open = current_treestatus.setdefault(destination,
                                                  treestatus.tree_is_open(destination))

        if not tree_open:
            handle_treeclosed(transplant_id, tree, rev, destination,
                              trysyntax, pingback_url)
            continue

        attempts = 0
        logger.info('initiating transplant from tree: %s rev: %s '
                    'to destination: %s' % (tree, rev, destination))
        started = datetime.datetime.now()
        while attempts < MAX_TRANSPLANT_ATTEMPTS:
            # TODO: We should break the transplant call into two steps, one
            #       to pull down the commits to transplant, and another
            #       one to rebase it and attempt to push so we don't
            #       duplicate work unnecessarily if we have to rebase more
            #       than once.
            os.environ['AUTOLAND_REQUEST_USER'] = requester
            landed, result = transplant.transplant(logger, tree,
                                                   destination, rev,
                                                   trysyntax, push_bookmark,
                                                   commit_descriptions)
            del os.environ['AUTOLAND_REQUEST_USER']

            if landed or 'abort: push creates new remote head' not in result:
                break

            attempts += 1

        if landed:
            logger.info('transplant successful - new revision: %s' % result)
        else:
            if 'is CLOSED!' in result:
                logger.info('transplant failed: tree: %s is closed - '
                            ' retrying later.' % tree)
                current_treestatus[destination] = False
                handle_treeclosed(transplant_id, tree, rev, destination,
                                  trysyntax, pingback_url)
                continue
            elif 'abort: push creates new remote head' in result:
                logger.info('transplant failed: we lost a push race')
                retry_revisions.append((now, transplant_id))
                continue
            elif 'unresolved conflicts (see hg resolve' in result:
                logger.info('transplant failed - manual rebase required: '
                            'tree: %s rev: %s destination: %s error: %s' %
                            (tree, rev, destination, result))
                # This is the only autoland error for which we expect the
                # user to take action. We should make things nicer than the
                # raw mercurial error.
                # TODO: sad trombone sound
                header = ('We\'re sorry, Autoland could not rebase your '
                          'commits for you automatically. Please manually '
                          'rebase your commits and try again.\n\n')
                result = header + result
            else:
                logger.info('transplant failed: tree: %s rev: %s '
                            'destination: %s error: %s' %
                            (tree, rev, destination, result))

        completed = datetime.datetime.now()
        logger.info('elapsed transplant time: %s' % (completed - started))

        # set up data to be posted back to mozreview
        data = {
            'request_id': transplant_id,
            'tree': tree,
            'rev': rev,
            'destination': destination,
            'trysyntax': trysyntax,
            'landed': landed,
            'error_msg': '',
            'result': ''
        }

        if landed:
            data['result'] = result
        else:
            data['error_msg'] = result

        mozreview_updates.append([transplant_id, json.dumps(data)])

        finished_revisions.append([landed, result, transplant_id])

    if retry_revisions:
        query = """
            update Transplant set last_updated=%s
            where id=%s
        """
        cursor.executemany(query, retry_revisions)
        dbconn.commit()

    if finished_revisions:
        query = """
            update Transplant set landed=%s,result=%s
            where id=%s
        """
        cursor.executemany(query, finished_revisions)
        dbconn.commit()

    if mozreview_updates:
        query = """
            insert into MozreviewUpdate(transplant_id,data)
            values(%s,%s)
        """
        cursor.executemany(query, mozreview_updates)
        dbconn.commit()
Example #18
0
    def _apply_patch_from_io_buff(self, io_buf):
        patch = PatchHelper(io_buf)

        # In production we require each patch to require a `Diff Start Line` header.
        # In test this is tricky because mercurial doesn't generate this header.
        if not config.testing() and not patch.diff_start_line:
            raise Exception("invalid patch: missing `Diff Start Line` header")

        # Import then commit to ensure correct parsing of the
        # commit description.
        desc_temp = tempfile.NamedTemporaryFile()
        diff_temp = tempfile.NamedTemporaryFile()
        with desc_temp, diff_temp:
            patch.write_commit_description(desc_temp)
            desc_temp.flush()
            patch.write_diff(diff_temp)
            diff_temp.flush()

            # XXX Using `hg import` here is less than ideal because it isn't
            # using a 3-way merge. It would be better to use
            # `hg import --exact` then `hg rebase`, however we aren't
            # guaranteed to have the changeset's parent in the local repo.

            try:
                # Fall back to 'patch' if hg's internal code fails (to work around
                # hg bugs/limitations).

                # In tests if the patch contains a 'Fail HG Import' header we simulate
                # a failure from hg's internal code.
                if config.testing() and patch.header("Fail HG Import"):
                    logger.info("testing: forcing patch fallback")

                    # Create junk to ensure it gets cleaned up.
                    import glob

                    filename = [
                        f for f in glob.glob("%s/*" % self.path)
                        if os.path.isfile(f)
                    ][0]
                    with open(filename, "w") as f:
                        f.write("junk\n")

                    raise Exception(
                        "1 out of 1 hunk FAILED -- saving rejects to file")

                # Apply the patch, with file rename detection (similarity).
                # Using 95 as the similarity to match automv's default.
                self.run_hg(
                    ["import", "-s", "95", "--no-commit", diff_temp.name])

            except Exception as e:
                msg = str(e)
                if ("hunk FAILED -- saving rejects to file" in msg
                        or "hunks FAILED -- saving rejects to file" in msg):
                    # Try again using 'patch' instead of hg's internal patch utility.

                    # But first reset to a clean repo as hg's attempt might have
                    # been partially applied.
                    self.clean_repo(strip_non_public_commits=False)

                    logger.info("import failed, trying with 'patch': %s" % e)
                    try:
                        self.run_hg(["import"] + ["-s", "95"] +
                                    ["--no-commit"] +
                                    ["--config", "ui.patch=patch"] +
                                    [diff_temp.name])
                        # When using an external patch util mercurial won't
                        # automatically handle add/remove/renames.
                        self.run_hg(["addremove", "-s", "95"])
                    except hglib.error.CommandError as hg_error:
                        raise Exception(hg_error.out)
                else:
                    raise

            # Commit using the extracted date, user, and commit desc.
            # --landing_system is provided by the set_landing_system hgext.
            self.run_hg(["commit"] + ["--date", patch.header("Date")] +
                        ["--user", patch.header("User")] +
                        ["--landing_system", self.landing_system_id] +
                        ["--logfile", desc_temp.name])
Example #19
0
def validate_request(request):
    if request.json is None:
        raise ValueError('missing json')
    request_json = request.json

    required = {'ldap_username', 'tree', 'rev', 'pingback_url', 'destination'}
    optional = set()

    is_try = 'trysyntax' in request_json
    is_patch = 'patch_urls' in request_json
    if config.testing() and not is_patch:
        is_patch = 'patch' in request_json

    if (not is_patch) and not ('trysyntax' in request_json
                               or 'commit_descriptions' in request_json):
        raise ValueError('one of trysyntax or commit_descriptions must be '
                         'specified')

    if not is_try and not is_patch:
        # Repo transplant.
        required.add('commit_descriptions')
        optional.add('push_bookmark')

    elif not is_try and is_patch:
        # Patch transplant.
        if config.testing():
            optional.add('patch_urls')
            optional.add('patch')
        else:
            required.add('patch_urls')
        optional.add('push_bookmark')

    elif is_try and not is_patch:
        # Repo try.
        required.add('trysyntax')

    elif is_try and is_patch:
        # Patch try.
        raise ValueError('trysyntax is not supported with patch_urls')

    request_fields = set(request_json.keys())

    missing = required - request_fields
    if missing:
        raise ValueError(
            'missing required field%s: %s' %
            ('' if len(missing) == 1 else 's', ', '.join(sorted(missing))))

    extra = request_fields - (required | optional)
    if extra:
        raise ValueError(
            'unexpected field%s: %s' %
            ('' if len(extra) == 1 else 's', ', '.join(sorted(extra))))

    if not check_pingback_url(request_json['pingback_url']):
        raise ValueError('bad pingback_url')

    if is_patch:
        if config.testing() and ('patch_urls' in request_json
                                 and 'patch' in request_json):
            raise ValueError('cannot specify both patch_urls and patch')

        if 'patch_urls' in request_json:
            for patch_url in request_json['patch_urls']:
                if not check_patch_url(patch_url):
                    raise ValueError('bad patch_url')

        if 'patch' in request_json:
            try:
                base64.b64decode(request_json['patch'])
            except TypeError:
                raise ValueError('malformed base64 in patch')
Example #20
0
def connect():
    if config.testing():
        return MockGithub3()

    credentials = config.get('github')
    return github3.login(credentials['user'], password=credentials['passwd'])