def directory_names_from_datetimestamp(self, datetimestamp): dates = [convert_datetime_to_string(datetimestamp, DIRECTORY_DATE), # only really needed for non mobile convert_datetime_to_string(datetimestamp, DIRECTORY_DATETIME)] for platform in self.build_platforms: for date in dates: for repo in self.repos: yield repo, '%s-%s-%s' % (date, repo, platform)
def publish_results(self, starttime=0, tstrt=0, tstop=0, testname='', cache_enabled=True, rejected=False): # Create JSON to send to webserver author = None if self.build.tree == 'try': rev_json_url = self.build.changeset.replace('/rev/', '/json-rev/') rev_json = utils.get_remote_json(rev_json_url) if rev_json: author = rev_json['pushuser'] blddate = float(convert_datetime_to_string(self.build.date, TIMESTAMP)) self.loggerdeco.debug('publish_results: build.id: %s, build.date: %s, blddate: %s' % ( self.build.id, self.build.date, blddate)) resultdata = { 'phoneid': self.phone.id, 'testname': testname, 'starttime': starttime, 'throbberstart': tstrt, 'throbberstop': tstop, 'blddate': blddate, 'cached': cache_enabled, 'rejected': rejected, 'revision': self.build.changeset, 'author': author, 'productname': self.build.app_name, 'productversion': self.build.version, 'osver': self.phone.osver, 'bldtype': self.build.type, 'machineid': self.phone.machinetype } result = {'data': resultdata} # Upload if self._signer: encoded_result = jwt.encode(result, signer=self._signer) content_type = 'application/jwt' else: encoded_result = json.dumps(result) content_type = 'application/json; charset=utf-8' req = urllib2.Request(self._resulturl + 'add/', encoded_result, {'Content-Type': content_type}) max_attempts = 10 wait_time = 10 for attempt in range(1, max_attempts+1): try: f = urllib2.urlopen(req) f.read() f.close() return except Exception, e: # Retry submission if the exception is due to a # timeout and if we haven't exceeded the maximum # number of attempts. if attempt < max_attempts: self.loggerdeco.warning('PerfTest.publish_results: ' 'Attempt %d/%d error %s sending ' 'results to server' % ( attempt, max_attempts, e)) time.sleep(wait_time) continue self.loggerdeco.exception('Error sending results to server') self.worker_subprocess.mailer.send( '%s attempt %s/%s Error sending %s results for phone %s, ' 'build %s' % (utils.host(), attempt, max_attempts, self.name, self.phone.id, self.build.id), 'There was an error attempting to send test results ' 'to the result server %s.\n' '\n' 'Host %s\n' 'Job %s\n' 'Test %s\n' 'Phone %s\n' 'Repository %s\n' 'Build %s\n' 'Revision %s\n' 'Exception %s\n' 'Result %s\n' % (self.result_server, utils.host(), self.job_url, self.name, self.phone.id, self.build.tree, self.build.id, self.build.changeset, e, json.dumps(resultdata, sort_keys=True, indent=2))) message = 'Error sending results to server' self.status = PhoneTest.EXCEPTION self.message = message self.update_status(message=message)
def directory_names_from_datetimestamp(self, datetimestamp): yield None, convert_datetime_to_string(datetimestamp, TIMESTAMP)
def get_build_data(build_url, builder_type='taskcluster'): """Return a dict containing information parsed from a build's .txt file. :param build_url: string containing url to the firefox build. :param builder_type: either 'buildbot' or'taskcluster' Returns None if the file does not exist or does not contain build data, otherwise returns a dict with keys: 'url' : url to build 'id' : CCYYMMDDHHMMSS string in UTC 'date' : build id as UTC datetime 'changeset' : full url to changeset, 'changeset_dirs' : set of directories where changes occurred. 'revision' : revision, 'builder_type' : either 'buildbot' or 'taskcluster' 'repo' : repository name 'abi' : 'arm' or 'x86' 'sdk' : 'api-<minimum sdk>' if known or None 'build_type' : 'opt' or 'debug' 'nightly' : True if this is a nighlty build. 'platform' : android, android-x86, android-<sdk> """ build_id = None changeset = None revision = None repo = None abi = None sdk = None build_type = None platform = None nightly = None if builder_type == 'taskcluster': build_id_tz = build_dates.UTC else: build_id_tz = build_dates.PACIFIC LOGGER.debug('get_build_data(%s, builder_type=%s)', build_url, builder_type) # Parse the url for meta data if possible. re_tinderbox = re.compile( r'https?://ftp.mozilla.org/pub/mobile/tinderbox-builds/(.*)-(android[^/]*)/\d+/fennec.*\.apk$' ) re_nightly = re.compile( r'https?://ftp.mozilla.org/pub/mobile/nightly/\d{4}/\d{2}/\d{4}-\d{2}-\d{2}-\d{2}-\d{2}-\d{2}-(.*)-(android[^/]*)/fennec.*\.apk$' ) re_platform = re.compile(r'(android)-?(x86)?-?(api-\d+)?-?(debug)?') re_mozconfig_sdk = re.compile(r'(api-\d+)') ftp_build = False match_tinderbox = re_tinderbox.match(build_url) if match_tinderbox: ftp_build = True nightly = False (repo, platform_api_build_type) = match_tinderbox.groups() LOGGER.debug( 'get_build_data: match_tinderbox: repo: %s, platform_api_build_type: %s', repo, platform_api_build_type) else: match_nightly = re_nightly.match(build_url) if match_nightly: ftp_build = True nightly = True (repo, platform_api_build_type) = match_nightly.groups() LOGGER.debug( 'get_build_data: match_nightly: repo: %s, platform_api_build_type: %s', repo, platform_api_build_type) if ftp_build: if builder_type == 'taskcluster': LOGGER.error( 'get_build_data(%s, builder_type=%s) for ftp build. ' 'Setting timezone to Pacific.', build_url, builder_type) build_id_tz = build_dates.PACIFIC match_platform = re_platform.match(platform_api_build_type) if match_platform: (platform, abi, sdk, debug) = match_platform.groups() build_type = 'debug' if debug else 'opt' if not abi: abi = 'arm' elif abi == 'i386' or abi == 'i686': abi = 'x86' LOGGER.debug( 'get_build_data: platform: %s, abi: %s, sdk: %s, debug: %s', platform, abi, sdk, debug) build_prefix, build_ext = os.path.splitext(build_url) build_json_url = build_prefix + '.json' build_json = get_remote_json(build_json_url) if build_json: build_id = build_json['buildid'] formatstr, build_date = build_dates.parse_datetime(build_id, tz=build_id_tz) # convert buildid to UTC to match Taskcluster build_id = build_dates.convert_datetime_to_string(build_date, build_dates.BUILDID, tz=build_dates.UTC) if not abi: abi = build_json['target_cpu'] if abi == 'i386' or abi == 'i686': abi = 'x86' moz_source_repo = build_json['moz_source_repo'].replace( 'MOZ_SOURCE_REPO=', '') repo = os.path.basename(moz_source_repo) revision = build_json['moz_source_stamp'] changeset = moz_source_repo + '/rev/' + revision if not sdk and 'mozconfig' in build_json: search = re_mozconfig_sdk.search(build_json['mozconfig']) if search: sdk = search.group(1) LOGGER.debug( 'get_build_data: build_json: build_id: %s, platform: %s, abi: %s, ' 'sdk: %s, repo: %s, revision: %s, changeset: %s', build_id, platform, abi, sdk, repo, revision, changeset) if build_type is None or sdk is None or nightly is None or platform is None: build_mozinfo_json_url = build_prefix + '.mozinfo.json' build_mozinfo_json = get_remote_json(build_mozinfo_json_url) if build_mozinfo_json: if not build_type and 'debug' in build_mozinfo_json: build_type = 'debug' if build_mozinfo_json['debug'] else 'opt' if not sdk: if 'android_min_sdk' in build_mozinfo_json: sdk = 'api-%s' % build_mozinfo_json['android_min_sdk'] else: if 'mozconfig' in build_mozinfo_json: search = re_mozconfig_sdk.search( build_mozinfo_json['mozconfig']) if search: sdk = search.group(1) if not platform: platform = build_mozinfo_json['os'] if sdk: platform += sdk if not nightly and 'nightly_build' in build_mozinfo_json: nightly = build_mozinfo_json['nightly_build'] LOGGER.debug( 'get_build_data: mozinfo build_type: %s, sdk: %s, nightly: %s', build_type, sdk, nightly) if not build_id or not changeset or not repo or not revision: build_id_tz = build_dates.PACIFIC build_txt = build_prefix + '.txt' content = get_remote_text(build_txt) if not content: return None lines = content.splitlines() if len(lines) < 1: return None buildid_regex = re.compile(r'([\d]{14})$') changeset_regex = re.compile(r'.*/([^/]*)/rev/(.*)') buildid_match = buildid_regex.match(lines[0]) if len(lines) >= 2: changeset_match = changeset_regex.match(lines[1]) else: LOGGER.warning( "Unable to find revision in %s, results cannot be " " uploaded to treeherder", build_url) changeset_match = changeset_regex.match("file://local/rev/local") lines.append("file://local/rev/local") if not buildid_match or not changeset_match: return None txt_build_id = lines[0] txt_changeset = lines[1] txt_repo = changeset_match.group(1) txt_revision = changeset_match.group(2) LOGGER.debug( 'get_build_data: txt build_id: %s, changeset: %s, repo: %s, revision: %s', txt_build_id, txt_changeset, txt_repo, txt_revision) formatstr, build_date = build_dates.parse_datetime(txt_build_id, tz=build_id_tz) # convert buildid to UTC to match Taskcluster txt_build_id = build_dates.convert_datetime_to_string( build_date, build_dates.BUILDID, tz=build_dates.UTC) if not build_id: build_id = txt_build_id elif build_id != txt_build_id: LOGGER.warning('get_build_data: build_id %s != txt_build_id %s', build_id, txt_build_id) if not changeset: changeset = txt_changeset elif txt_changeset not in changeset: LOGGER.warning( 'get_build_data: txt_changeset %s not in changeset %s', txt_changeset, changeset) if not repo: repo = txt_repo else: LOGGER.warning('get_build_data: repo %s != txt_repo %s', repo, txt_repo) if not revision: revision = txt_revision else: LOGGER.warning('get_build_data: revision %s != txt_revision %s', revision, txt_revision) platform = 'android' if abi == 'x86': platform += '-x86' if sdk: platform += '-' + sdk build_data = { 'url': build_url, 'id': build_id, 'date': build_date.astimezone(build_dates.UTC), 'changeset': changeset, 'changeset_dirs': get_changeset_dirs(changeset), 'revision': revision, 'builder_type': builder_type, 'repo': repo, 'abi': abi, 'sdk': sdk, 'build_type': build_type, 'nightly': nightly, 'platform': platform, } LOGGER.debug('get_build_data: %s', build_data) return build_data
def get_build_data(build_url, builder_type='taskcluster'): """Return a dict containing information about a build. :param build_url: string containing url to the firefox build. :param builder_type: either 'buildbot' or'taskcluster' Returns None if the file does not exist or does not contain build data, otherwise returns a dict with keys: 'url' : url to build 'id' : CCYYMMDDHHMMSS string in UTC 'date' : build id as UTC datetime 'changeset' : full url to changeset, 'changeset_dirs' : set of directories where changes occurred. 'revision' : revision, 'builder_type' : either 'buildbot' or 'taskcluster' 'repo' : repository name 'abi' : 'arm' or 'x86' 'sdk' : 'api-<minimum sdk>' if known or None 'build_type' : 'opt' or 'debug' 'nightly' : True if this is a nighlty build. 'platform' : android, android-x86, android-<sdk> """ logger = getLogger() logger.debug('get_build_data(%s, builder_type=%s)', build_url, builder_type) re_taskcluster_build_url = re.compile(r'https://queue.taskcluster.net/v1/task/([^/]+)/runs/\d/artifacts/public/build/') match = re_taskcluster_build_url.match(build_url) if match: task_id = match.group(1) logger.debug("get_build_data: taskId %s", task_id) task_definition = get_taskcluster_task_definition(task_id) build_data = get_build_data_from_taskcluster_task_definition(task_definition) if build_data: build_data['url'] = build_url return build_data if builder_type == 'taskcluster': build_id_tz = build_dates.UTC else: build_id_tz = build_dates.PACIFIC build_id = None changeset = None revision = None repo = None abi = None sdk = None build_type = None platform = None nightly = None # Parse the url for meta data if possible. re_platform = re.compile(r'(android)-?(x86)?-?(api-\d+)?-?(debug)?') re_mozconfig_sdk = re.compile(r'(api-\d+)') ftp_build = False re_tinderbox = re.compile(r'https?://ftp.mozilla.org/pub/mobile/tinderbox-builds/(.*)-(android[^/]*)/\d+/fennec.*\.apk$') match_tinderbox = re_tinderbox.match(build_url) if match_tinderbox: ftp_build = True nightly = False (repo, platform_api_build_type) = match_tinderbox.groups() logger.debug('get_build_data: match_tinderbox: repo: %s, platform_api_build_type: %s', repo, platform_api_build_type) else: re_nightly = re.compile(r'https?://ftp.mozilla.org/pub/mobile/nightly/\d{4}/\d{2}/\d{4}-\d{2}-\d{2}-\d{2}-\d{2}-\d{2}-(.*)-(android[^/]*)/fennec.*\.apk$') match_nightly = re_nightly.match(build_url) if match_nightly: ftp_build = True nightly = True (repo, platform_api_build_type) = match_nightly.groups() logger.debug('get_build_data: match_nightly: repo: %s, platform_api_build_type: %s', repo, platform_api_build_type) if ftp_build: if builder_type == 'taskcluster': logger.error('get_build_data(%s, builder_type=%s) for ftp build. ' 'Setting timezone to Pacific.', build_url, builder_type) build_id_tz = build_dates.PACIFIC match_platform = re_platform.match(platform_api_build_type) if match_platform: (platform, abi, sdk, debug) = match_platform.groups() build_type = 'debug' if debug else 'opt' if not abi: abi = 'arm' elif abi == 'i386' or abi == 'i686': abi = 'x86' logger.debug('get_build_data: platform: %s, abi: %s, sdk: %s, debug: %s', platform, abi, sdk, debug) build_prefix, build_ext = os.path.splitext(build_url) build_json_url = build_prefix + '.json' build_json = get_remote_json(build_json_url) if build_json: build_id = build_json['buildid'] formatstr, build_date = build_dates.parse_datetime(build_id, tz=build_id_tz) # convert buildid to UTC to match Taskcluster build_id = build_dates.convert_datetime_to_string(build_date, build_dates.BUILDID, tz=build_dates.UTC) if not abi: abi = build_json['target_cpu'] if abi == 'i386' or abi == 'i686': abi = 'x86' moz_source_repo = build_json['moz_source_repo'].replace('MOZ_SOURCE_REPO=', '') repo = os.path.basename(moz_source_repo) revision = build_json['moz_source_stamp'] changeset = moz_source_repo + '/rev/' + revision if not sdk and 'mozconfig' in build_json: search = re_mozconfig_sdk.search(build_json['mozconfig']) if search: sdk = search.group(1) logger.debug('get_build_data: build_json: build_id: %s, platform: %s, abi: %s, ' 'sdk: %s, repo: %s, revision: %s, changeset: %s', build_id, platform, abi, sdk, repo, revision, changeset) if build_type is None or sdk is None or nightly is None or platform is None: build_mozinfo_json_url = build_prefix + '.mozinfo.json' build_mozinfo_json = get_remote_json(build_mozinfo_json_url) if build_mozinfo_json: if not build_type and 'debug' in build_mozinfo_json: build_type = 'debug' if build_mozinfo_json['debug'] else 'opt' if not sdk: if 'android_min_sdk' in build_mozinfo_json: sdk = 'api-%s' % build_mozinfo_json['android_min_sdk'] else: if 'mozconfig' in build_mozinfo_json: search = re_mozconfig_sdk.search(build_mozinfo_json['mozconfig']) if search: sdk = search.group(1) if not platform: platform = build_mozinfo_json['os'] if sdk: platform += sdk if not nightly and 'nightly_build' in build_mozinfo_json: nightly = build_mozinfo_json['nightly_build'] logger.debug('get_build_data: mozinfo build_type: %s, sdk: %s, nightly: %s', build_type, sdk, nightly) if not build_id or not changeset or not repo or not revision: build_id_tz = build_dates.PACIFIC build_txt = build_prefix + '.txt' content = get_remote_text(build_txt) if not content: return None lines = content.splitlines() if len(lines) < 1: return None buildid_regex = re.compile(r'([\d]{14})$') changeset_regex = re.compile(r'.*/([^/]*)/rev/(.*)') buildid_match = buildid_regex.match(lines[0]) if len(lines) >= 2: changeset_match = changeset_regex.match(lines[1]) else: logger.warning("Unable to find revision in %s, results cannot be " " uploaded to treeherder", build_url) changeset_match = changeset_regex.match("file://local/rev/local") lines.append("file://local/rev/local") if not buildid_match or not changeset_match: return None txt_build_id = lines[0] txt_changeset = lines[1] txt_repo = changeset_match.group(1) txt_revision = changeset_match.group(2) logger.debug('get_build_data: txt build_id: %s, changeset: %s, repo: %s, revision: %s', txt_build_id, txt_changeset, txt_repo, txt_revision) formatstr, build_date = build_dates.parse_datetime(txt_build_id, tz=build_id_tz) # convert buildid to UTC to match Taskcluster txt_build_id = build_dates.convert_datetime_to_string(build_date, build_dates.BUILDID, tz=build_dates.UTC) if not build_id: build_id = txt_build_id elif build_id != txt_build_id: logger.warning('get_build_data: build_id %s != txt_build_id %s', build_id, txt_build_id) if not changeset: changeset = txt_changeset elif txt_changeset not in changeset: logger.warning('get_build_data: txt_changeset %s not in changeset %s', txt_changeset, changeset) if not repo: repo = txt_repo else: logger.warning('get_build_data: repo %s != txt_repo %s', repo, txt_repo) if not revision: revision = txt_revision else: logger.warning('get_build_data: revision %s != txt_revision %s', revision, txt_revision) platform = 'android' if abi == 'x86': platform += '-x86' if sdk: platform += '-' + sdk build_data = { 'url' : build_url, 'id' : build_id, 'date' : build_date.astimezone(build_dates.UTC), 'changeset' : changeset, 'changeset_dirs': get_changeset_dirs(changeset), 'revision' : revision, 'builder_type': builder_type, 'repo' : repo, 'abi' : abi, 'sdk' : sdk, 'build_type' : build_type, 'nightly' : nightly, 'platform' : platform, } logger.debug('get_build_data: %s', build_data) return build_data
def publish_results(self, starttime=0, tstrt=0, tstop=0, testname='', cache_enabled=True, rejected=False): # Create JSON to send to webserver author = None if self.build.tree == 'try': rev_json_url = self.build.changeset.replace('/rev/', '/json-rev/') rev_json = utils.get_remote_json(rev_json_url) if rev_json: author = rev_json['pushuser'] blddate = float(convert_datetime_to_string(self.build.date, TIMESTAMP)) self.loggerdeco.debug('publish_results: build.id: %s, build.date: %s, blddate: %s' % ( self.build.id, self.build.date, blddate)) resultdata = { 'phoneid': self.phone.id, 'testname': testname, 'starttime': starttime, 'throbberstart': tstrt, 'throbberstop': tstop, 'blddate': blddate, 'cached': cache_enabled, 'rejected': rejected, 'revision': self.build.changeset, 'author': author, 'productname': self.build.app_name, 'productversion': self.build.version, 'osver': self.phone.osver, 'bldtype': self.build.type, 'machineid': self.phone.machinetype } result = {'data': resultdata} # Upload if self._signer: encoded_result = jwt.encode(result, signer=self._signer) content_type = 'application/jwt' else: encoded_result = json.dumps(result) content_type = 'application/json; charset=utf-8' req = urllib2.Request(self._resulturl + 'add/', encoded_result, {'Content-Type': content_type}) max_attempts = 10 wait_time = 10 for attempt in range(1, max_attempts+1): try: f = urllib2.urlopen(req) f.read() f.close() return except Exception, e: # Retry submission if the exception is due to a # timeout and if we haven't exceeded the maximum # number of attempts. if attempt < max_attempts: self.loggerdeco.warning('PerfTest.publish_results: ' 'Attempt %d/%d error %s sending ' 'results to server' % ( attempt, max_attempts, e)) time.sleep(wait_time) continue self.loggerdeco.exception('Error sending results to server') self.worker_subprocess.mailer.send( '%s attempt %s/%s Error sending %s results for phone %s, ' 'build %s' % (utils.host(), attempt, max_attempts, self.name, self.phone.id, self.build.id), 'There was an error attempting to send test results ' 'to the result server %s.\n' '\n' 'Host %s\n' 'Job %s\n' 'Test %s\n' 'Phone %s\n' 'Repository %s\n' 'Build %s\n' 'Revision %s\n' 'Exception %s\n' 'Result %s\n' % (self.result_server, utils.host(), self.job_url, self.name, self.phone.id, self.build.tree, self.build.id, self.build.changeset, e, json.dumps(resultdata, sort_keys=True, indent=2))) message = 'Error sending results to phonedash server' self.add_failure(self.name, TestStatus.TEST_UNEXPECTED_FAIL, message, TreeherderStatus.EXCEPTION)