def build_time_from_directory_name(self, directory_name): logger.debug('Tinderbox:build_time_from_directory_name(%s)' % directory_name) try: build_time = convert_timestamp_to_date(int(directory_name)) except ValueError: build_time = None return build_time
def find_builds_by_revision(self, first_revision, last_revision): logger.debug('Finding builds between revisions %s and %s' % (first_revision, last_revision)) range = datetime.timedelta(hours=12) buildid_regex = re.compile(r'([\d]{14})$') builds = [] for repo in self.repos: first_timestamp, last_timestamp = get_revision_timestamps( repo, first_revision, last_revision) first_datetime = convert_timestamp_to_date(first_timestamp) last_datetime = convert_timestamp_to_date(last_timestamp) logger.debug('find_builds_by_revision: repo %s, ' 'first_revision: %s, first_datetime: %s, ' 'last_revision: %s, last_datetime: %s' % ( repo, first_revision, first_datetime, last_revision, last_datetime)) if not first_datetime or not last_datetime: continue for search_directory_repo, search_directory in self.get_search_directories_by_time(first_datetime, last_datetime): # search_directory_repo is not None for Tinderbox builds and # can be used to filter the search directories. logger.debug('find_builds_by_revision: Checking repo: %s ' 'search_directory_repo %s search_directory %s...' % (repo, search_directory_repo, search_directory)) if search_directory_repo and search_directory_repo != repo: logger.info('find_builds_by_revision: skipping repo %s, ' 'search_directory_repo: %s, search_directory: %s' % (repo, search_directory_repo, search_directory)) continue format = None datetimestamps = [] for link in url_links(search_directory): try: datetimestring = link.get('href').strip('/') if self.does_build_directory_contain_repo_name() and repo not in datetimestring: logger.info('find_builds_by_revisions:' 'skipping datetimestring: repo: %s, ' 'datetimestring: %s' % (repo, datetimestring)) continue logger.debug('find_builds_by_revisions: datetimestring: %s' % datetimestring) link_format, link_datetime = parse_datetime(datetimestring) if not format: format = link_format logger.debug('find_builds_by_revisions: link_format: %s,' 'link_datetime: %s' % (link_format, link_datetime)) if link_datetime > first_datetime - range and link_datetime < last_datetime + range: datetimestamps.append(set_time_zone(link_datetime)) except ValueError: pass total_datetimestamps = len(datetimestamps) datetimestamps = sorted(set(datetimestamps)) unique_datetimestamps = len(datetimestamps) logger.debug('find_builds_by_revision: total_datetimestamps=%d, unique_datetimestamps=%d' % (total_datetimestamps, unique_datetimestamps)) logger.debug('find_builds_by_revisions: datetimestamps: %s' % datetimestamps) start_time = None end_time = None for datetimestamp in datetimestamps: for directory_repo, directory_name in self.directory_names_from_datetimestamp(datetimestamp): # Since Autophone requires returning builds # for each of its supported platforms, arm, # armv6 or x86, we need to search each to get # all of the builds. That is why we don't # terminate this loop when we find the first # build which matches the ending revision. logger.debug('find_builds_by_revisions: ' 'datetimestamp: %s, repo: %s, ' 'search_directory_repo: %s, ' 'search_directory: %s, directory_repo: %s, ' 'directory_name: %s' % (datetimestamp, repo, search_directory_repo, search_directory, directory_repo, directory_name)) try: links = url_links("%s%s/" % (search_directory, directory_name)) except urllib2.HTTPError: continue for link in links: href = link.get('href') match = self.buildtxt_regex.match(href) if match: txturl = "%s%s/%s" % (search_directory, directory_name, href) build_url = "%s%s/%s%s" % (search_directory, directory_name, match.group(1), self.buildfile_ext) logger.debug('find_builds_by_revisions: ' 'found build: datetimestamp: %s, ' 'repo: %s, search_directory_repo:%s,' 'search_directory: %s, ' 'directory_repo: %s, ' 'directory_name: %s, found build: %s' % (datetimestamp, repo, search_directory_repo, search_directory, directory_repo, directory_name, build_url)) contents = urllib2.urlopen(txturl).read() lines = contents.splitlines() if len(lines) > 1 and buildid_regex.match(lines[0]): buildid = lines[0] parts = lines[1].split('rev/') if len(parts) == 2: if repo != urls_repos[parts[0]]: logger.info('find_builds_by_revisions: ' 'skipping build: %s != %s' % (repo, urls_repos[parts[0]])) continue revision = parts[1] if revision.startswith(first_revision): start_time = convert_buildid_to_date(buildid) elif revision.startswith(last_revision): end_time = convert_buildid_to_date(buildid) if start_time: builds.append(build_url) break if end_time: break return builds