Example #1
0
 def scrapeNightlies(self, connection, product_name, date):
     nightly_url = urljoin(self.config.base_url, product_name, 'nightly',
                           date.strftime('%Y'), date.strftime('%m'), '')
     cursor = connection.cursor()
     dir_prefix = date.strftime('%Y-%m-%d')
     nightlies = getLinks(nightly_url, startswith=dir_prefix)
     for nightly in nightlies:
         for info in getNightly(nightly, nightly_url):
             platform, repository, version, kvpairs, bad_lines = info
             for bad_line in bad_lines:
                 self.config.logger.warning("Bad line for %s (%r)", nightly,
                                            bad_line)
             build_type = 'Nightly'
             if version.endswith('a2'):
                 build_type = 'Aurora'
             if kvpairs.get('buildID'):
                 build_id = kvpairs['buildID']
                 buildutil.insert_build(cursor,
                                        product_name,
                                        version,
                                        platform,
                                        build_id,
                                        build_type,
                                        None,
                                        repository,
                                        ignore_duplicates=True)
Example #2
0
def scrapeB2G(config, cursor, product_name, urllib=urllib2, date=None):
    month = date.strftime('%m')
    b2g_url = '%s/%s/%s/' % (config.base_url, product_name,
                       'manifests')

    try:
        day = date.strftime('%d')
        dir_prefix = '%s-%s-%s' % (date.year, month, day)
        # I have no idea what this first level of directories means :/
        # TODO get info about that and update this search
        version_dirs = getLinks(b2g_url, startswith='1.', urllib=urllib)
        for version_dir in version_dirs:
            # /1.0.0/2013/01/2013-01-27-07/*.json
            prod_url = '%s/%s/%s/%s/' % (b2g_url, version_dir, date.year, month)
            nightlies = getLinks(prod_url, startswith=dir_prefix, urllib=urllib)
            for nightly in nightlies:
                for info in getB2G(nightly, prod_url):
                    (platform, repository, version, kvpairs) = info
                    build_id = kvpairs['buildid']
                    build_type = kvpairs['build_type']
                    buildutil.insert_build(cursor, product_name, version, platform,
                                           build_id, build_type, kvpairs.get('beta_number', None), repository,
                                           ignore_duplicates=True)

    except urllib.URLError:
        util.reportExceptionAndContinue(logger)
Example #3
0
def scrapeReleases(config, cursor, product_name, urllib=urllib2):
    prod_url = '%s/%s/' % (config.base_url, product_name)

    # releases are sometimes in nightly, sometimes in candidates dir.
    # look in both.
    for directory in ('nightly', 'candidates'):
        if not getLinks(prod_url, startswith=directory, urllib=urllib):
            logger.debug('Dir %s not found for %s' % (directory, product_name))
            continue

        url = '%s/%s/%s/' % (config.base_url, product_name, directory)

        try:
            releases = getLinks(url, endswith='-candidates/',
                                urllib=urllib)
            for release in releases:
                for info in getRelease(release, url):
                    (platform, version, build_number, kvpairs) = info
                    build_type = 'Release'
                    beta_number = None
                    repository = 'mozilla-release'
                    if 'b' in version:
                        build_type = 'Beta'
                        version, beta_number = version.split('b')
                        repository = 'mozilla-beta'
                    build_id = kvpairs['buildID']
                    buildutil.insert_build(cursor, product_name, version,
                                           platform, build_id, build_type,
                                           beta_number, repository)
        except urllib.URLError:
            util.reportExceptionAndContinue(logger)
Example #4
0
 def _insert_build(self, cursor, *args, **kwargs):
     if self.config.dry_run:
         print "INSERT BUILD"
         print args
         print kwargs
     else:
         buildutil.insert_build(cursor, *args, **kwargs)
Example #5
0
    def create(self, **kwargs):
        """
        Create a new build for a product.

        See http://socorro.readthedocs.org/en/latest/middleware.html#builds

        Required keyword arguments:
        product - Concerned product, e.g. firefox
        version - Concerned version, e.g. 9.0a1
        platform - Platform for this build, e.g. win32
        build_id - Build ID for this build (yyyymmdd######)
        build_type - Type of build, e.g. Nightly, Beta, Aurora, Release

        Required if build_type is Beta:
        beta_number - The beta number, e.g. 9.0b#

        Optional keyword arguments:
        repository - Repository this build came from

        Return: (product_name, version)
        """

        # Parse arguments
        filters = [
            ("product", None, "str"),
            ("version", None, "str"),
            ("platform", None, "str"),
            ("build_id", None, "str"),
            ("build_type", None, "str"),
            ("beta_number", None, "str"),
            ("repository", "", "str")
        ]
        params = external_common.parse_arguments(filters, kwargs)

        self._require_parameters(params, "product", "version", "platform",
                                     "build_id", "build_type")

        if params["build_type"].lower() == "beta":
            self._require_parameters(params, "beta_number")

        try:
            connection = self.database.connection()
            cursor = connection.cursor()

            buildutil.insert_build(cursor, params["product"],
                                   params["version"], params["platform"],
                                   params["build_id"], params["build_type"],
                                   params["beta_number"],
                                   params["repository"])
        except psycopg2.Error:
            logger.error("Failed inserting build data into PostgresSQL",
                         exc_info=True)
            connection.rollback()
            raise
        else:
            connection.commit()
        finally:
            connection.close()

        return (params["product"], params["version"])
Example #6
0
def scrapeNightlies(config, cursor, product_name, urllib=urllib2, date=None):
    month = date.strftime('%m')
    nightly_url = '%s/%s/%s/%s/%s/' % (config.base_url, product_name,
                                       'nightly', date.year, month)

    try:

        day = date.strftime('%d')
        dir_prefix = '%s-%s-%s' % (date.year, month, day)
        nightlies = getLinks(nightly_url, startswith=dir_prefix, urllib=urllib)
        for nightly in nightlies:
            for info in getNightly(nightly, nightly_url):
                (platform, repository, version, kvpairs) = info
                build_id = kvpairs['buildID']
                build_type = 'Nightly'
                if version.endswith('a2'):
                    build_type = 'Aurora'
                buildutil.insert_build(cursor,
                                       product_name,
                                       version,
                                       platform,
                                       build_id,
                                       build_type,
                                       None,
                                       repository,
                                       ignore_duplicates=True)

    except urllib.URLError:
        util.reportExceptionAndContinue(logger)
Example #7
0
    def create(self, **kwargs):
        """
        Create a new build for a product.

        See http://socorro.readthedocs.org/en/latest/middleware.html#builds

        Required keyword arguments:
        product - Concerned product, e.g. firefox
        version - Concerned version, e.g. 9.0a1
        platform - Platform for this build, e.g. win32
        build_id - Build ID for this build (yyyymmdd######)
        build_type - Type of build, e.g. Nightly, Beta, Aurora, Release

        Required if build_type is Beta:
        beta_number - The beta number, e.g. 9.0b#

        Optional keyword arguments:
        repository - Repository this build came from

        Return: (product_name, version)
        """

        # Parse arguments
        filters = [
            ("product", None, "str"),
            ("version", None, "str"),
            ("platform", None, "str"),
            ("build_id", None, "int"),
            ("build_type", None, "str"),
            ("beta_number", None, "int"),
            ("repository", "", "str")
        ]
        params = external_common.parse_arguments(filters, kwargs)

        self._require_parameters(params, "product", "version", "platform",
                                     "build_id", "build_type")

        if params["build_type"].lower() == "beta":
            self._require_parameters(params, "beta_number")

        connection = None
        try:
            connection = self.database.connection()
            cursor = connection.cursor()

            buildutil.insert_build(cursor, params["product"],
                                   params["version"], params["platform"],
                                   params["build_id"], params["build_type"],
                                   params["beta_number"],
                                   params["repository"])
        except psycopg2.Error, e:
            error = str(e)
            logger.error("Failed inserting build data into PostgresSQL, "
                         "reason: %s" % error,
                         exc_info=True)
            connection.rollback()

            if "CONTEXT" in error:
                error = error[0:error.index("CONTEXT")]
            raise InsertionError(error)
Example #8
0
    def scrapeB2G(self, connection, product_name, date):

        if not product_name == 'b2g':
            return
        cursor = connection.cursor()
        b2g_manifests = urljoin(self.config.base_url, product_name,
                            'manifests')

        dir_prefix = date.strftime('%Y-%m-%d')
        version_dirs = getLinks(b2g_manifests, startswith='1.')
        for version_dir in version_dirs:
            prod_url = urljoin(b2g_manifests, version_dir,
                               date.strftime('%Y'), date.strftime('%m'))
            nightlies = getLinks(prod_url, startswith=dir_prefix)

            for nightly in nightlies:
                for info in getB2G(nightly, prod_url, backfill_date=None, logger=self.config.logger):
                    (platform, repository, version, kvpairs) = info
                    build_id = kvpairs['buildid']
                    build_type = kvpairs['build_type']
                    buildutil.insert_build(
                        cursor,
                        product_name,
                        version,
                        platform,
                        build_id,
                        build_type,
                        kvpairs.get('beta_number', None),
                        repository,
                        ignore_duplicates=True
                    )
Example #9
0
def scrapeReleases(config, cursor, product_name, urllib=urllib2):
    prod_url = "%s/%s/" % (config.base_url, product_name)

    # releases are sometimes in nightly, sometimes in candidates dir.
    # look in both.
    for directory in ("nightly", "candidates"):
        if not getLinks(prod_url, startswith=directory, urllib=urllib):
            logger.debug("Dir %s not found for %s" % (directory, product_name))
            continue

        url = "%s/%s/%s/" % (config.base_url, product_name, directory)

        try:
            releases = getLinks(url, endswith="-candidates/", urllib=urllib)
            for release in releases:
                for info in getRelease(release, url):
                    (platform, version, build_number, kvpairs) = info
                    build_type = "Release"
                    beta_number = None
                    repository = "mozilla-release"
                    if "b" in version:
                        build_type = "Beta"
                        version, beta_number = version.split("b")
                        repository = "mozilla-beta"
                    build_id = kvpairs["buildID"]
                    buildutil.insert_build(
                        cursor, product_name, version, platform, build_id, build_type, beta_number, repository
                    )
        except urllib.URLError:
            util.reportExceptionAndContinue(logger)
Example #10
0
    def scrapeReleases(self, connection, product_name):
        prod_url = urljoin(self.config.base_url, product_name, '')
        # releases are sometimes in nightly, sometimes in candidates dir.
        # look in both.
        logger = self.config.logger
        cursor = connection.cursor()
        for directory in ('nightly', 'candidates'):
            if not getLinks(prod_url, startswith=directory):
                logger.debug('Dir %s not found for %s',
                             directory, product_name)
                continue

            url = urljoin(self.config.base_url, product_name, directory, '')
            releases = getLinks(url, endswith='-candidates/')
            for release in releases:
                for info in getRelease(release, url):
                    platform, version, build_number, kvpairs = info
                    build_type = 'Release'
                    beta_number = None
                    repository = 'mozilla-release'
                    if 'b' in version:
                        build_type = 'Beta'
                        version, beta_number = version.split('b')
                        repository = 'mozilla-beta'
                    build_id = kvpairs['buildID']
                    buildutil.insert_build(cursor,
                                           product_name,
                                           version,
                                           platform,
                                           build_id,
                                           build_type,
                                           beta_number,
                                           repository,
                                           ignore_duplicates=True)
Example #11
0
 def _insert_build(self, cursor, *args, **kwargs):
     if self.config.dry_run:
         print "INSERT BUILD"
         print args
         print kwargs
     else:
         buildutil.insert_build(cursor, *args, **kwargs)
Example #12
0
    def scrapeB2G(self, connection, product_name, date):

        if not product_name == 'b2g':
            return
        cursor = connection.cursor()
        b2g_manifests = urljoin(self.config.base_url, product_name,
                                'manifests', 'nightly')

        dir_prefix = date.strftime('%Y-%m-%d')
        version_dirs = getLinks(b2g_manifests, startswith='1.')
        for version_dir in version_dirs:
            prod_url = urljoin(b2g_manifests, version_dir, date.strftime('%Y'),
                               date.strftime('%m'))
            nightlies = getLinks(prod_url, startswith=dir_prefix)

            for nightly in nightlies:
                b2gs = getB2G(nightly,
                              prod_url,
                              backfill_date=None,
                              logger=self.config.logger)
                for info in b2gs:
                    (platform, repository, version, kvpairs) = info
                    build_id = kvpairs['buildid']
                    build_type = kvpairs['build_type']
                    buildutil.insert_build(cursor,
                                           product_name,
                                           version,
                                           platform,
                                           build_id,
                                           build_type,
                                           kvpairs.get('beta_number', None),
                                           repository,
                                           ignore_duplicates=True)
Example #13
0
 def scrapeNightlies(self, connection, product_name, date):
     nightly_url = urljoin(self.config.base_url, product_name, 'nightly',
                           date.strftime('%Y'),
                           date.strftime('%m'),
                           '')
     cursor = connection.cursor()
     dir_prefix = date.strftime('%Y-%m-%d')
     nightlies = getLinks(nightly_url, startswith=dir_prefix)
     for nightly in nightlies:
         for info in getNightly(nightly, nightly_url):
             platform, repository, version, kvpairs, bad_lines = info
             for bad_line in bad_lines:
                 self.config.logger.warning(
                     "Bad line for %s (%r)",
                     nightly, bad_line
                 )
             build_type = 'Nightly'
             if version.endswith('a2'):
                 build_type = 'Aurora'
             if kvpairs.get('buildID'):
                 build_id = kvpairs['buildID']
                 buildutil.insert_build(
                     cursor,
                     product_name,
                     version,
                     platform,
                     build_id,
                     build_type,
                     None,
                     repository,
                     ignore_duplicates=True
                 )
Example #14
0
 def _insert_build(self, cursor, *args, **kwargs):
     if self.config.dry_run:
         print "INSERT BUILD"
         for arg in args:
             print "\t", repr(arg)
         for key in kwargs:
             print "\t%s=" % key, repr(kwargs[key])
     else:
         buildutil.insert_build(cursor, *args, **kwargs)
Example #15
0
 def _insert_build(self, cursor, *args, **kwargs):
     if self.config.dry_run:
         print "INSERT BUILD"
         for arg in args:
             print "\t", repr(arg)
         for key in kwargs:
             print "\t%s=" % key, repr(kwargs[key])
     else:
         buildutil.insert_build(cursor, *args, **kwargs)
Example #16
0
    def test_insert_build(self):
        cursor = self.connection.cursor()

        # Test 1: successfully insert a build
        buildutil.insert_build(cursor, 'Firefox', 'VERSIONAME5',
              'PLATFORMNAME5', '20110101', 'Release', '5', 'REPO5')
        actual = self.build_exists(cursor, 'Firefox',
              'VERSIONAME5', 'PLATFORMNAME5', '20110101', 'Release',
              '5', 'REPO5')
        self.assertTrue(actual)

        # Test 2: fail at inserting a build
        buildutil.insert_build(cursor, 'Unknown', 'VERSIONAME5', 'PLATFORMNAME5',
                  '20110101', 'Release', '5', 'REPO5')
        actual = self.build_exists(cursor, 'Unknown',
              'VERSIONAME5', 'PLATFORMNAME5', '20110101', 'Release',
              '5', 'REPO5')
        self.assertFalse(actual)
Example #17
0
    def test_insert_build(self):
        me.cur = me.conn.cursor(cursor_factory=psy.LoggingCursor)
        me.cur.setLogger(me.fileLogger)

        sql = """DELETE FROM releases_raw
                 WHERE product_name = 'PRODUCTNAME5'"""
        me.cur.execute(sql)
        me.cur.connection.commit()

        try:
            buildutil.insert_build(me.cur, 'PRODUCTNAME5', 'VERSIONAME5',
                  'PLATFORMNAME5', '5', 'BUILDTYPE5', '5', 'REPO5')
            actual = buildutil.build_exists(me.cur, 'PRODUCTNAME5',
                  'VERSIONAME5', 'PLATFORMNAME5', '5', 'BUILDTYPE5',
                  '5', 'REPO5')
            assert actual == 1, "expected 1, got %s" % (actual)
        except Exception, x:
            print "Exception in do_insert_build() ... Error: ", type(x), x
            socorro.lib.util.reportExceptionAndAbort(me.fileLogger)
Example #18
0
    def test_insert_build(self):
        cursor = self.connection.cursor()

        # Test 1: successfully insert a build
        buildutil.insert_build(cursor, 'Firefox', 'VERSIONAME5',
                               'PLATFORMNAME5', '20110101', 'Release', '5',
                               'REPO5')
        actual = self.build_exists(cursor, 'Firefox', 'VERSIONAME5',
                                   'PLATFORMNAME5', '20110101', 'Release', '5',
                                   'REPO5')
        self.assertTrue(actual)

        # Test 2: fail at inserting a build
        buildutil.insert_build(cursor, 'Unknown', 'VERSIONAME5',
                               'PLATFORMNAME5', '20110101', 'Release', '5',
                               'REPO5')
        actual = self.build_exists(cursor, 'Unknown', 'VERSIONAME5',
                                   'PLATFORMNAME5', '20110101', 'Release', '5',
                                   'REPO5')
        self.assertFalse(actual)
Example #19
0
def scrapeNightlies(config, cursor, product_name, urllib=urllib2, date=None):
    month = date.strftime("%m")
    nightly_url = "%s/%s/%s/%s/%s/" % (config.base_url, product_name, "nightly", date.year, month)

    try:

        day = date.strftime("%d")
        dir_prefix = "%s-%s-%s" % (date.year, month, day)
        nightlies = getLinks(nightly_url, startswith=dir_prefix, urllib=urllib)
        for nightly in nightlies:
            for info in getNightly(nightly, nightly_url):
                (platform, repository, version, kvpairs) = info
                build_id = kvpairs["buildID"]
                build_type = "Nightly"
                if version.endswith("a2"):
                    build_type = "Aurora"
                buildutil.insert_build(cursor, product_name, version, platform, build_id, build_type, None, repository)

    except urllib.URLError:
        util.reportExceptionAndContinue(logger)
Example #20
0
    def test_insert_build(self):
        cursor = self.connection.cursor()

        # Test 1: successfully insert a build
        buildutil.insert_build(
            cursor, "Firefox", "VERSIONAME5", "PLATFORMNAME5", "20110101", "Release", "5", "REPO5", "build1"
        )
        actual = self.build_exists(
            cursor, "Firefox", "VERSIONAME5", "PLATFORMNAME5", "20110101", "Release", "5", "REPO5", "build1"
        )
        ok_(actual)

        # Test 2: fail at inserting a build
        buildutil.insert_build(
            cursor, "Unknown", "VERSIONAME5", "PLATFORMNAME5", "20110101", "Release", "5", "REPO5", "build1"
        )
        actual = self.build_exists(
            cursor, "Unknown", "VERSIONAME5", "PLATFORMNAME5", "20110101", "Release", "5", "REPO5", "build1"
        )
        ok_(not actual)
Example #21
0
def scrapeNightlies(config, cursor, product_name, urllib=urllib2, date=None):
    month = date.strftime('%m')
    nightly_url = '%s/%s/%s/%s/%s/' % (config.base_url, product_name,
                       'nightly', date.year, month)

    try:

        day = date.strftime('%d')
        dir_prefix = '%s-%s-%s' % (date.year, month, day)
        nightlies = getLinks(nightly_url, startswith=dir_prefix,
                 urllib=urllib)
        for nightly in nightlies:
            for info in getNightly(nightly, nightly_url):
                (platform, repository, version, kvpairs) = info
                build_id = kvpairs['buildID']
                build_type = 'Nightly'
                if version.endswith('a2'):
                    build_type = 'Aurora'
                buildutil.insert_build(cursor, product_name, version, platform,
                                       build_id, build_type, None, repository)

    except urllib.URLError:
        util.reportExceptionAndContinue(logger)
Example #22
0
 def scrapeNightlies(self, connection, product_name, date):
     nightly_url = urljoin(self.config.base_url, product_name, 'nightly',
                           date.strftime('%Y'),
                           date.strftime('%m'),
                           '')
     cursor = connection.cursor()
     dir_prefix = date.strftime('%Y-%m-%d')
     nightlies = getLinks(nightly_url, startswith=dir_prefix)
     for nightly in nightlies:
         for info in getNightly(nightly, nightly_url):
             platform, repository, version, kvpairs = info
             build_id = kvpairs['buildID']
             build_type = 'Nightly'
             if version.endswith('a2'):
                 build_type = 'Aurora'
             buildutil.insert_build(cursor,
                                    product_name,
                                    version,
                                    platform,
                                    build_id,
                                    build_type,
                                    None,
                                    repository)
Example #23
0
def scrapeReleases(config, cursor, product_name, urllib=urllib2):
    prod_url = '%s/%s/' % (config.base_url, product_name)

    # releases are sometimes in nightly, sometimes in candidates dir.
    # look in both.
    for directory in ('nightly', 'candidates'):
        if not getLinks(prod_url, startswith=directory, urllib=urllib):
            logger.debug('Dir %s not found for %s' % (directory, product_name))
            continue

        url = '%s/%s/%s/' % (config.base_url, product_name, directory)

        try:
            releases = getLinks(url, endswith='-candidates/', urllib=urllib)
            for release in releases:
                for info in getRelease(release, url):
                    (platform, version, build_number, kvpairs) = info
                    build_type = 'Release'
                    beta_number = None
                    repository = 'mozilla-release'
                    if 'b' in version:
                        build_type = 'Beta'
                        version, beta_number = version.split('b')
                        repository = 'mozilla-beta'
                    build_id = kvpairs['buildID']
                    buildutil.insert_build(cursor,
                                           product_name,
                                           version,
                                           platform,
                                           build_id,
                                           build_type,
                                           beta_number,
                                           repository,
                                           ignore_duplicates=True)
        except urllib.URLError:
            util.reportExceptionAndContinue(logger)