def build_time_from_directory_name(self, directory_name): logger.debug('Nightly:build_time_from_directory_name(%s)' % directory_name) build_time = None dirnamematch = None for r in self.nightly_dirname_regexs: dirnamematch = r.match(directory_name) if dirnamematch: break if dirnamematch: format, build_time = parse_datetime(directory_name) logger.debug('Nightly:build_time_from_directory_name: (%s, %s)' % (directory_name, build_time)) return build_time
def find_builds_by_revision(self, first_revision, last_revision): logger.debug('Finding builds between revisions %s and %s' % (first_revision, last_revision)) range = datetime.timedelta(hours=12) buildid_regex = re.compile(r'([\d]{14})$') builds = [] for repo in self.repos: first_timestamp, last_timestamp = get_revision_timestamps( repo, first_revision, last_revision) first_datetime = convert_timestamp_to_date(first_timestamp) last_datetime = convert_timestamp_to_date(last_timestamp) logger.debug('find_builds_by_revision: repo %s, ' 'first_revision: %s, first_datetime: %s, ' 'last_revision: %s, last_datetime: %s' % ( repo, first_revision, first_datetime, last_revision, last_datetime)) if not first_datetime or not last_datetime: continue for search_directory_repo, search_directory in self.get_search_directories_by_time(first_datetime, last_datetime): # search_directory_repo is not None for Tinderbox builds and # can be used to filter the search directories. logger.debug('find_builds_by_revision: Checking repo: %s ' 'search_directory_repo %s search_directory %s...' % (repo, search_directory_repo, search_directory)) if search_directory_repo and search_directory_repo != repo: logger.info('find_builds_by_revision: skipping repo %s, ' 'search_directory_repo: %s, search_directory: %s' % (repo, search_directory_repo, search_directory)) continue format = None datetimestamps = [] for link in url_links(search_directory): try: datetimestring = link.get('href').strip('/') if self.does_build_directory_contain_repo_name() and repo not in datetimestring: logger.info('find_builds_by_revisions:' 'skipping datetimestring: repo: %s, ' 'datetimestring: %s' % (repo, datetimestring)) continue logger.debug('find_builds_by_revisions: datetimestring: %s' % datetimestring) link_format, link_datetime = parse_datetime(datetimestring) if not format: format = link_format logger.debug('find_builds_by_revisions: link_format: %s,' 'link_datetime: %s' % (link_format, link_datetime)) if link_datetime > first_datetime - range and link_datetime < last_datetime + range: datetimestamps.append(set_time_zone(link_datetime)) except ValueError: pass total_datetimestamps = len(datetimestamps) datetimestamps = sorted(set(datetimestamps)) unique_datetimestamps = len(datetimestamps) logger.debug('find_builds_by_revision: total_datetimestamps=%d, unique_datetimestamps=%d' % (total_datetimestamps, unique_datetimestamps)) logger.debug('find_builds_by_revisions: datetimestamps: %s' % datetimestamps) start_time = None end_time = None for datetimestamp in datetimestamps: for directory_repo, directory_name in self.directory_names_from_datetimestamp(datetimestamp): # Since Autophone requires returning builds # for each of its supported platforms, arm, # armv6 or x86, we need to search each to get # all of the builds. That is why we don't # terminate this loop when we find the first # build which matches the ending revision. logger.debug('find_builds_by_revisions: ' 'datetimestamp: %s, repo: %s, ' 'search_directory_repo: %s, ' 'search_directory: %s, directory_repo: %s, ' 'directory_name: %s' % (datetimestamp, repo, search_directory_repo, search_directory, directory_repo, directory_name)) try: links = url_links("%s%s/" % (search_directory, directory_name)) except urllib2.HTTPError: continue for link in links: href = link.get('href') match = self.buildtxt_regex.match(href) if match: txturl = "%s%s/%s" % (search_directory, directory_name, href) build_url = "%s%s/%s%s" % (search_directory, directory_name, match.group(1), self.buildfile_ext) logger.debug('find_builds_by_revisions: ' 'found build: datetimestamp: %s, ' 'repo: %s, search_directory_repo:%s,' 'search_directory: %s, ' 'directory_repo: %s, ' 'directory_name: %s, found build: %s' % (datetimestamp, repo, search_directory_repo, search_directory, directory_repo, directory_name, build_url)) contents = urllib2.urlopen(txturl).read() lines = contents.splitlines() if len(lines) > 1 and buildid_regex.match(lines[0]): buildid = lines[0] parts = lines[1].split('rev/') if len(parts) == 2: if repo != urls_repos[parts[0]]: logger.info('find_builds_by_revisions: ' 'skipping build: %s != %s' % (repo, urls_repos[parts[0]])) continue revision = parts[1] if revision.startswith(first_revision): start_time = convert_buildid_to_date(buildid) elif revision.startswith(last_revision): end_time = convert_buildid_to_date(buildid) if start_time: builds.append(build_url) break if end_time: break return builds
matching_builds = [] if options.build_url: logger.debug('cache.find_builds_by_directory(%s)', options.build_url) matching_builds = cache.find_builds_by_directory(options.build_url) elif not args: logger.debug('cache.find_builds_by_revision(%s, %s, %s)', options.first_revision, options.last_revision, options.build_location) matching_builds = cache.find_builds_by_revision( options.first_revision, options.last_revision, options.build_location) elif args[0] == 'latest': logger.debug('cache.find_latest_builds(%s)', options.build_location) matching_builds = cache.find_latest_builds(options.build_location) else: date_format, start_time = build_dates.parse_datetime(args[0]) if date_format == build_dates.BUILDID: end_time = start_time else: if len(args) > 1: date_format, end_time = build_dates.parse_datetime(args[1]) else: end_time = datetime.datetime.now(tz=PACIFIC) logger.debug('cache.find_builds_by_time(%s, %s, %s)', start_time, end_time, options.build_location) matching_builds = cache.find_builds_by_time(start_time, end_time, options.build_location) if not matching_builds: return 1 commands = [
def get_build_data(build_url, builder_type='taskcluster'): """Return a dict containing information parsed from a build's .txt file. :param build_url: string containing url to the firefox build. :param builder_type: either 'buildbot' or'taskcluster' Returns None if the file does not exist or does not contain build data, otherwise returns a dict with keys: 'url' : url to build 'id' : CCYYMMDDHHMMSS string in UTC 'date' : build id as UTC datetime 'changeset' : full url to changeset, 'changeset_dirs' : set of directories where changes occurred. 'revision' : revision, 'builder_type' : either 'buildbot' or 'taskcluster' 'repo' : repository name 'abi' : 'arm' or 'x86' 'sdk' : 'api-<minimum sdk>' if known or None 'build_type' : 'opt' or 'debug' 'nightly' : True if this is a nighlty build. 'platform' : android, android-x86, android-<sdk> """ build_id = None changeset = None revision = None repo = None abi = None sdk = None build_type = None platform = None nightly = None if builder_type == 'taskcluster': build_id_tz = build_dates.UTC else: build_id_tz = build_dates.PACIFIC LOGGER.debug('get_build_data(%s, builder_type=%s)', build_url, builder_type) # Parse the url for meta data if possible. re_tinderbox = re.compile( r'https?://ftp.mozilla.org/pub/mobile/tinderbox-builds/(.*)-(android[^/]*)/\d+/fennec.*\.apk$' ) re_nightly = re.compile( r'https?://ftp.mozilla.org/pub/mobile/nightly/\d{4}/\d{2}/\d{4}-\d{2}-\d{2}-\d{2}-\d{2}-\d{2}-(.*)-(android[^/]*)/fennec.*\.apk$' ) re_platform = re.compile(r'(android)-?(x86)?-?(api-\d+)?-?(debug)?') re_mozconfig_sdk = re.compile(r'(api-\d+)') ftp_build = False match_tinderbox = re_tinderbox.match(build_url) if match_tinderbox: ftp_build = True nightly = False (repo, platform_api_build_type) = match_tinderbox.groups() LOGGER.debug( 'get_build_data: match_tinderbox: repo: %s, platform_api_build_type: %s', repo, platform_api_build_type) else: match_nightly = re_nightly.match(build_url) if match_nightly: ftp_build = True nightly = True (repo, platform_api_build_type) = match_nightly.groups() LOGGER.debug( 'get_build_data: match_nightly: repo: %s, platform_api_build_type: %s', repo, platform_api_build_type) if ftp_build: if builder_type == 'taskcluster': LOGGER.error( 'get_build_data(%s, builder_type=%s) for ftp build. ' 'Setting timezone to Pacific.', build_url, builder_type) build_id_tz = build_dates.PACIFIC match_platform = re_platform.match(platform_api_build_type) if match_platform: (platform, abi, sdk, debug) = match_platform.groups() build_type = 'debug' if debug else 'opt' if not abi: abi = 'arm' elif abi == 'i386' or abi == 'i686': abi = 'x86' LOGGER.debug( 'get_build_data: platform: %s, abi: %s, sdk: %s, debug: %s', platform, abi, sdk, debug) build_prefix, build_ext = os.path.splitext(build_url) build_json_url = build_prefix + '.json' build_json = get_remote_json(build_json_url) if build_json: build_id = build_json['buildid'] formatstr, build_date = build_dates.parse_datetime(build_id, tz=build_id_tz) # convert buildid to UTC to match Taskcluster build_id = build_dates.convert_datetime_to_string(build_date, build_dates.BUILDID, tz=build_dates.UTC) if not abi: abi = build_json['target_cpu'] if abi == 'i386' or abi == 'i686': abi = 'x86' moz_source_repo = build_json['moz_source_repo'].replace( 'MOZ_SOURCE_REPO=', '') repo = os.path.basename(moz_source_repo) revision = build_json['moz_source_stamp'] changeset = moz_source_repo + '/rev/' + revision if not sdk and 'mozconfig' in build_json: search = re_mozconfig_sdk.search(build_json['mozconfig']) if search: sdk = search.group(1) LOGGER.debug( 'get_build_data: build_json: build_id: %s, platform: %s, abi: %s, ' 'sdk: %s, repo: %s, revision: %s, changeset: %s', build_id, platform, abi, sdk, repo, revision, changeset) if build_type is None or sdk is None or nightly is None or platform is None: build_mozinfo_json_url = build_prefix + '.mozinfo.json' build_mozinfo_json = get_remote_json(build_mozinfo_json_url) if build_mozinfo_json: if not build_type and 'debug' in build_mozinfo_json: build_type = 'debug' if build_mozinfo_json['debug'] else 'opt' if not sdk: if 'android_min_sdk' in build_mozinfo_json: sdk = 'api-%s' % build_mozinfo_json['android_min_sdk'] else: if 'mozconfig' in build_mozinfo_json: search = re_mozconfig_sdk.search( build_mozinfo_json['mozconfig']) if search: sdk = search.group(1) if not platform: platform = build_mozinfo_json['os'] if sdk: platform += sdk if not nightly and 'nightly_build' in build_mozinfo_json: nightly = build_mozinfo_json['nightly_build'] LOGGER.debug( 'get_build_data: mozinfo build_type: %s, sdk: %s, nightly: %s', build_type, sdk, nightly) if not build_id or not changeset or not repo or not revision: build_id_tz = build_dates.PACIFIC build_txt = build_prefix + '.txt' content = get_remote_text(build_txt) if not content: return None lines = content.splitlines() if len(lines) < 1: return None buildid_regex = re.compile(r'([\d]{14})$') changeset_regex = re.compile(r'.*/([^/]*)/rev/(.*)') buildid_match = buildid_regex.match(lines[0]) if len(lines) >= 2: changeset_match = changeset_regex.match(lines[1]) else: LOGGER.warning( "Unable to find revision in %s, results cannot be " " uploaded to treeherder", build_url) changeset_match = changeset_regex.match("file://local/rev/local") lines.append("file://local/rev/local") if not buildid_match or not changeset_match: return None txt_build_id = lines[0] txt_changeset = lines[1] txt_repo = changeset_match.group(1) txt_revision = changeset_match.group(2) LOGGER.debug( 'get_build_data: txt build_id: %s, changeset: %s, repo: %s, revision: %s', txt_build_id, txt_changeset, txt_repo, txt_revision) formatstr, build_date = build_dates.parse_datetime(txt_build_id, tz=build_id_tz) # convert buildid to UTC to match Taskcluster txt_build_id = build_dates.convert_datetime_to_string( build_date, build_dates.BUILDID, tz=build_dates.UTC) if not build_id: build_id = txt_build_id elif build_id != txt_build_id: LOGGER.warning('get_build_data: build_id %s != txt_build_id %s', build_id, txt_build_id) if not changeset: changeset = txt_changeset elif txt_changeset not in changeset: LOGGER.warning( 'get_build_data: txt_changeset %s not in changeset %s', txt_changeset, changeset) if not repo: repo = txt_repo else: LOGGER.warning('get_build_data: repo %s != txt_repo %s', repo, txt_repo) if not revision: revision = txt_revision else: LOGGER.warning('get_build_data: revision %s != txt_revision %s', revision, txt_revision) platform = 'android' if abi == 'x86': platform += '-x86' if sdk: platform += '-' + sdk build_data = { 'url': build_url, 'id': build_id, 'date': build_date.astimezone(build_dates.UTC), 'changeset': changeset, 'changeset_dirs': get_changeset_dirs(changeset), 'revision': revision, 'builder_type': builder_type, 'repo': repo, 'abi': abi, 'sdk': sdk, 'build_type': build_type, 'nightly': nightly, 'platform': platform, } LOGGER.debug('get_build_data: %s', build_data) return build_data
def get_build_data(build_url, builder_type='taskcluster'): """Return a dict containing information about a build. :param build_url: string containing url to the firefox build. :param builder_type: either 'buildbot' or'taskcluster' Returns None if the file does not exist or does not contain build data, otherwise returns a dict with keys: 'url' : url to build 'id' : CCYYMMDDHHMMSS string in UTC 'date' : build id as UTC datetime 'changeset' : full url to changeset, 'changeset_dirs' : set of directories where changes occurred. 'revision' : revision, 'builder_type' : either 'buildbot' or 'taskcluster' 'repo' : repository name 'abi' : 'arm' or 'x86' 'sdk' : 'api-<minimum sdk>' if known or None 'build_type' : 'opt' or 'debug' 'nightly' : True if this is a nighlty build. 'platform' : android, android-x86, android-<sdk> """ logger = getLogger() logger.debug('get_build_data(%s, builder_type=%s)', build_url, builder_type) re_taskcluster_build_url = re.compile(r'https://queue.taskcluster.net/v1/task/([^/]+)/runs/\d/artifacts/public/build/') match = re_taskcluster_build_url.match(build_url) if match: task_id = match.group(1) logger.debug("get_build_data: taskId %s", task_id) task_definition = get_taskcluster_task_definition(task_id) build_data = get_build_data_from_taskcluster_task_definition(task_definition) if build_data: build_data['url'] = build_url return build_data if builder_type == 'taskcluster': build_id_tz = build_dates.UTC else: build_id_tz = build_dates.PACIFIC build_id = None changeset = None revision = None repo = None abi = None sdk = None build_type = None platform = None nightly = None # Parse the url for meta data if possible. re_platform = re.compile(r'(android)-?(x86)?-?(api-\d+)?-?(debug)?') re_mozconfig_sdk = re.compile(r'(api-\d+)') ftp_build = False re_tinderbox = re.compile(r'https?://ftp.mozilla.org/pub/mobile/tinderbox-builds/(.*)-(android[^/]*)/\d+/fennec.*\.apk$') match_tinderbox = re_tinderbox.match(build_url) if match_tinderbox: ftp_build = True nightly = False (repo, platform_api_build_type) = match_tinderbox.groups() logger.debug('get_build_data: match_tinderbox: repo: %s, platform_api_build_type: %s', repo, platform_api_build_type) else: re_nightly = re.compile(r'https?://ftp.mozilla.org/pub/mobile/nightly/\d{4}/\d{2}/\d{4}-\d{2}-\d{2}-\d{2}-\d{2}-\d{2}-(.*)-(android[^/]*)/fennec.*\.apk$') match_nightly = re_nightly.match(build_url) if match_nightly: ftp_build = True nightly = True (repo, platform_api_build_type) = match_nightly.groups() logger.debug('get_build_data: match_nightly: repo: %s, platform_api_build_type: %s', repo, platform_api_build_type) if ftp_build: if builder_type == 'taskcluster': logger.error('get_build_data(%s, builder_type=%s) for ftp build. ' 'Setting timezone to Pacific.', build_url, builder_type) build_id_tz = build_dates.PACIFIC match_platform = re_platform.match(platform_api_build_type) if match_platform: (platform, abi, sdk, debug) = match_platform.groups() build_type = 'debug' if debug else 'opt' if not abi: abi = 'arm' elif abi == 'i386' or abi == 'i686': abi = 'x86' logger.debug('get_build_data: platform: %s, abi: %s, sdk: %s, debug: %s', platform, abi, sdk, debug) build_prefix, build_ext = os.path.splitext(build_url) build_json_url = build_prefix + '.json' build_json = get_remote_json(build_json_url) if build_json: build_id = build_json['buildid'] formatstr, build_date = build_dates.parse_datetime(build_id, tz=build_id_tz) # convert buildid to UTC to match Taskcluster build_id = build_dates.convert_datetime_to_string(build_date, build_dates.BUILDID, tz=build_dates.UTC) if not abi: abi = build_json['target_cpu'] if abi == 'i386' or abi == 'i686': abi = 'x86' moz_source_repo = build_json['moz_source_repo'].replace('MOZ_SOURCE_REPO=', '') repo = os.path.basename(moz_source_repo) revision = build_json['moz_source_stamp'] changeset = moz_source_repo + '/rev/' + revision if not sdk and 'mozconfig' in build_json: search = re_mozconfig_sdk.search(build_json['mozconfig']) if search: sdk = search.group(1) logger.debug('get_build_data: build_json: build_id: %s, platform: %s, abi: %s, ' 'sdk: %s, repo: %s, revision: %s, changeset: %s', build_id, platform, abi, sdk, repo, revision, changeset) if build_type is None or sdk is None or nightly is None or platform is None: build_mozinfo_json_url = build_prefix + '.mozinfo.json' build_mozinfo_json = get_remote_json(build_mozinfo_json_url) if build_mozinfo_json: if not build_type and 'debug' in build_mozinfo_json: build_type = 'debug' if build_mozinfo_json['debug'] else 'opt' if not sdk: if 'android_min_sdk' in build_mozinfo_json: sdk = 'api-%s' % build_mozinfo_json['android_min_sdk'] else: if 'mozconfig' in build_mozinfo_json: search = re_mozconfig_sdk.search(build_mozinfo_json['mozconfig']) if search: sdk = search.group(1) if not platform: platform = build_mozinfo_json['os'] if sdk: platform += sdk if not nightly and 'nightly_build' in build_mozinfo_json: nightly = build_mozinfo_json['nightly_build'] logger.debug('get_build_data: mozinfo build_type: %s, sdk: %s, nightly: %s', build_type, sdk, nightly) if not build_id or not changeset or not repo or not revision: build_id_tz = build_dates.PACIFIC build_txt = build_prefix + '.txt' content = get_remote_text(build_txt) if not content: return None lines = content.splitlines() if len(lines) < 1: return None buildid_regex = re.compile(r'([\d]{14})$') changeset_regex = re.compile(r'.*/([^/]*)/rev/(.*)') buildid_match = buildid_regex.match(lines[0]) if len(lines) >= 2: changeset_match = changeset_regex.match(lines[1]) else: logger.warning("Unable to find revision in %s, results cannot be " " uploaded to treeherder", build_url) changeset_match = changeset_regex.match("file://local/rev/local") lines.append("file://local/rev/local") if not buildid_match or not changeset_match: return None txt_build_id = lines[0] txt_changeset = lines[1] txt_repo = changeset_match.group(1) txt_revision = changeset_match.group(2) logger.debug('get_build_data: txt build_id: %s, changeset: %s, repo: %s, revision: %s', txt_build_id, txt_changeset, txt_repo, txt_revision) formatstr, build_date = build_dates.parse_datetime(txt_build_id, tz=build_id_tz) # convert buildid to UTC to match Taskcluster txt_build_id = build_dates.convert_datetime_to_string(build_date, build_dates.BUILDID, tz=build_dates.UTC) if not build_id: build_id = txt_build_id elif build_id != txt_build_id: logger.warning('get_build_data: build_id %s != txt_build_id %s', build_id, txt_build_id) if not changeset: changeset = txt_changeset elif txt_changeset not in changeset: logger.warning('get_build_data: txt_changeset %s not in changeset %s', txt_changeset, changeset) if not repo: repo = txt_repo else: logger.warning('get_build_data: repo %s != txt_repo %s', repo, txt_repo) if not revision: revision = txt_revision else: logger.warning('get_build_data: revision %s != txt_revision %s', revision, txt_revision) platform = 'android' if abi == 'x86': platform += '-x86' if sdk: platform += '-' + sdk build_data = { 'url' : build_url, 'id' : build_id, 'date' : build_date.astimezone(build_dates.UTC), 'changeset' : changeset, 'changeset_dirs': get_changeset_dirs(changeset), 'revision' : revision, 'builder_type': builder_type, 'repo' : repo, 'abi' : abi, 'sdk' : sdk, 'build_type' : build_type, 'nightly' : nightly, 'platform' : platform, } logger.debug('get_build_data: %s', build_data) return build_data
def get_build_data_from_taskcluster_task_definition(task_definition): logger = getLogger() # 1 = project/repo, 2 = pushdate CCYYMMDDHHMMSS, 3 = platform, 4 = build_type re_route_pushdate = re.compile(r'index\.gecko\.v2\.([^.]+)\.pushdate\.\d{4}\.\d{2}\.\d{2}\.(\d{14})\.mobile\.(android.*)-(opt|debug)') # 1 = project/repo, 2 = revision, # 3 = platform, 4 = build_type re_route_revision = re.compile(r'index\.gecko\.v2\.([^.]+)\.revision\.([^.]+)\.mobile\.(android.*)-(opt|debug)') # 1 - api, 2 - custom re_platform = re.compile(r'android-(x86|api-\d+)-?(\w+)?') repo = None pushdate = None revision = None platform = None build_type = None success = False for route in task_definition['routes']: match = re_route_pushdate.match(route) if match: (repo, pushdate, platform, build_type) = match.groups() else: match = re_route_revision.match(route) if match: (repo, revision, platform, build_type) = match.groups() if repo and pushdate and revision and platform and build_type: success = True break logger.debug('get_build_data_from_taskcluster_task_definition: %s, %s, %s, %s, %s', repo, pushdate, revision, platform, build_type) if not success: return None formatstr, build_date = build_dates.parse_datetime(pushdate, tz=build_dates.UTC) match = re_platform.match(platform) if not match: logger.debug('get_build_data_from_taskcluster_task_definitions: ' 'failed to match platform %s', platform) return None (api, extra) = match.groups() if api == 'x86': api = 'api-16' abi = 'x86' else: abi = 'arm' changeset = builds.REPO_URLS[repo] + 'rev/' + revision build_data = { 'url': None, 'id': pushdate, 'date': build_date, 'changeset': changeset, 'changeset_dirs': get_changeset_dirs(changeset), 'revision': revision, 'builder_type': 'taskcluster', 'repo': repo, 'abi': abi, 'sdk': api, 'build_type': build_type, 'nightly': False, 'platform': platform, } logger.debug('get_build_data_from_taskcluster_task_definition: %s', build_data) return build_data
matching_builds = [] if options.build_url: logger.debug('cache.find_builds_by_directory(%s)', options.build_url) matching_builds = cache.find_builds_by_directory(options.build_url) elif not args: logger.debug('cache.find_builds_by_revision(%s, %s, %s)', options.first_revision, options.last_revision, options.build_location) matching_builds = cache.find_builds_by_revision( options.first_revision, options.last_revision, options.build_location) elif args[0] == 'latest': logger.debug('cache.find_latest_builds(%s)', options.build_location) matching_builds = cache.find_latest_builds(options.build_location) else: date_format, start_time = build_dates.parse_datetime(args[0]) if date_format == build_dates.BUILDID: end_time = start_time else: if len(args) > 1: date_format, end_time = build_dates.parse_datetime(args[1]) else: end_time = datetime.datetime.now(tz=PACIFIC) logger.debug('cache.find_builds_by_time(%s, %s, %s)', start_time, end_time, options.build_location) matching_builds = cache.find_builds_by_time( start_time, end_time, options.build_location) if not matching_builds: return 1 commands = [command_str(b, options.test_names, options.devices)