Exemplo n.º 1
0
def main(args):
    compare_to_client = TreeherderClient(server_url=HOSTS[args.host])
    production_client = TreeherderClient(server_url=HOSTS["production"])

    # Support comma separated projects
    projects = args.projects.split(',')
    for _project in projects:
        logger.info("Comparing {} against production.".format(_project))
        # Remove properties that are irrelevant for the comparison
        pushes = compare_to_client.get_pushes(_project, count=50)
        for _push in sorted(pushes, key=lambda push: push["revision"]):
            del _push["id"]
            for _rev in _push["revisions"]:
                del _rev["result_set_id"]

        production_pushes = production_client.get_pushes(_project, count=50)
        for _push in sorted(production_pushes, key=lambda push: push["revision"]):
            del _push["id"]
            for _rev in _push["revisions"]:
                del _rev["result_set_id"]

        for index in range(0, len(pushes)):
            assert pushes[index]["revision"] == production_pushes[index]["revision"]
            difference = DeepDiff(pushes[index], production_pushes[index])
            if difference:
                logger.info(difference.to_json())
                logger.info("{}/#/jobs?repo={}&revision={}".format(
                            compare_to_client.server_url,
                            _project,
                            pushes[index]["revision"]))
                logger.info("{}/#/jobs?repo={}&revision={}".format(
                            production_client.server_url,
                            _project,
                            production_pushes[index]["revision"]))
Exemplo n.º 2
0
        "Compare a push from a Treeherder instance to the production instance."
    )
    parser.add_argument(
        "--host", default="localhost", help="Host to compare. It defaults to localhost"
    )
    parser.add_argument("--revision", required=True, help="Revision to compare")
    parser.add_argument(
        "--project",
        default="mozilla-central",
        help="Project to compare. It defaults to mozilla-central",
    )

    args = parser.parse_args()

    th_instance = TreeherderClient(server_url=HOSTS[args.host])
    th_instance_pushid = th_instance.get_pushes(args.project, revision=args.revision)[0]["id"]
    th_instance_jobs = (
        th_instance.get_jobs(args.project, push_id=th_instance_pushid, count=None) or []
    )

    production = TreeherderClient(server_url=HOSTS["production"])
    production_pushid = production.get_pushes(args.project, revision=args.revision)[0]["id"]
    production_jobs = production.get_jobs(args.project, push_id=production_pushid, count=None)

    production_dict = {}
    for job in production_jobs:
        production_dict[job["job_guid"]] = job

    th_instance_dict = {}
    th_instance_not_found = []
    for job in th_instance_jobs:
Exemplo n.º 3
0
def retrieve_test_logs(repo, revision, platform='linux64',
                       cache_dir=None, use_cache=True,
                       warning_re=WARNING_RE):
    """
    Retrieves and processes the test logs for the given revision.

    Returns list of processed files.
    """
    if not cache_dir:
        cache_dir = "%s-%s-%s" % (repo, revision, platform)

    cache = logspam.cache.Cache(cache_dir, warning_re)

    cache_dir_exists = os.path.isdir(cache_dir)
    if cache_dir_exists and use_cache:
        # We already have logs for this revision.
        print "Using cached data"
        try:
            return cache.read_results()
        except logspam.cache.CacheFileNotFoundException as e:
            print "Cache file for %s not found" % warning_re
            print e

    client = TreeherderClient()
    print "getting result set"
    pushes = client.get_pushes(repo, revision=revision)
    print "pushes = client.get_pushes('%s', revision='%s')" % (repo, revision)
    print "got pushes"
    if not pushes:
        print "Failed to find %s in %s" % (revision, repo)
        return None

    print "getting jobs"
    for x in range(5):
        try:
            # option_collection_hash is just the convoluted way of specifying
            # we want a debug build.
            print "jobs = client.get_jobs('%s',result_set_id=%d, count=5000, platform='%s', option_collection_hash='%s')" % (
                    repo, pushes[0]['id'], platform, DEBUG_OPTIONHASH)
            jobs = client.get_jobs(repo,
                                   result_set_id=pushes[0]['id'],
                                   count=5000, # Just make this really large to avoid pagination
                                   platform=platform,
                                   option_collection_hash=DEBUG_OPTIONHASH,
                                   state='completed')
            break
        except requests.exceptions.ConnectionError:
            pass

    if not jobs:
        print "No jobs found for %s %s" % (revision, platform)
        import traceback
        traceback.print_exc()
        return None

    print "got jobs"

    print "getting %d job log urls" % len(jobs)
    job_ids = [ job['id'] for job in jobs ]
    print job_ids
    for x in range(5):
        logs = []
        try:
            for y in range(0, len(job_ids), 100):
                logs = logs + client.get_job_log_url(repo, job_id=job_ids[y:y+100])
            job_logs = logs
            break
        except requests.exceptions.ConnectionError, e:
            pass
class Treeherder(object):
    """Wrapper class for TreeherderClient to ease the use of its API."""

    def __init__(self, application, branch, platform, server_url=TREEHERDER_URL):
        """Create a new instance of the Treeherder class.

        :param application: The name of the application to download.
        :param branch: Name of the branch.
        :param platform: Platform of the application.
        :param server_url: The URL of the Treeherder instance to access.
        """
        self.logger = logging.getLogger(__name__)

        self.client = TreeherderClient(server_url=server_url)
        self.application = application
        self.branch = branch
        self.platform = platform

    def get_treeherder_platform(self, platform):
        """Return the internal Treeherder platform identifier.

        :param platform: Platform of the application.
        """
        try:
            return PLATFORM_MAP[platform]
        except KeyError:
            raise NotSupportedError('Platform "{}" is not supported.'.format(platform))

    def query_builds_by_revision(self, revision, job_type_name='Build', debug_build=False):
        """Retrieve build folders for a given revision with the help of Treeherder.

        :param revision: Revision of the build to download.
        :param job_type_name: Name of the job to look for. For builds it should be
            'Build', 'Nightly', and 'L10n Nightly'. Defaults to `Build`.
        :param debug_build: Download a debug build.
        """
        builds = set()

        try:
            self.logger.info('Querying {url} for list of builds for revision: {revision}'.format(
                             url=self.client.server_url, revision=revision))

            # Retrieve the option hash to filter for type of build (opt, and debug for now)
            option_hash = None
            for key, values in self.client.get_option_collection_hash().iteritems():
                for value in values:
                    if value['name'] == ('debug' if debug_build else 'opt'):
                        option_hash = key
                        break
                if option_hash:
                    break

            resultsets = self.client.get_pushes(self.branch, revision=revision)

            # Set filters to speed-up querying jobs
            kwargs = {
                'option_collection_hash': option_hash,
                'job_type_name': job_type_name,
                'exclusion_profile': False,
            }
            kwargs.update(self.get_treeherder_platform(self.platform))

            for resultset in resultsets:
                kwargs.update({'result_set_id': resultset['id']})
                jobs = self.client.get_jobs(self.branch, **kwargs)
                for job in jobs:
                    log_urls = self.client.get_job_log_url(self.branch, job_id=job['id'])
                    for log_url in log_urls:
                        if self.application in log_url['url']:
                            self.logger.debug('Found build folder: {}'.format(log_url['url']))
                            builds.update([log_url['url']])

        except Exception:
            self.logger.exception('Failure occurred when querying Treeherder for builds')

        return list(builds)
class GetBuild(object):
    ARCHIVE_URL = "https://archive.mozilla.org"
    NIGHTLY_LATEST_URL_FOLDER = "/pub/firefox/nightly/latest-mozilla-central/"
    PLATFORM_FN_MAPPING = {
        'linux32': {
            'key': 'linux-i686',
            'ext': 'tar.bz2',
            'trydl': 'linux',
            'job': ['linux32']
        },
        'linux64': {
            'key': 'linux-x86_64',
            'ext': 'tar.bz2',
            'trydl': 'linux64',
            'job': ['linux64']
        },
        'mac': {
            'key': 'mac',
            'ext': 'dmg',
            'trydl': 'macosx64',
            'job': ['osx']
        },
        'win32': {
            'key': 'win32',
            'ext': 'zip',
            'trydl': 'win32',
            'job': ['windows', '32']
        },
        'win64': {
            'key': 'win64',
            'ext': 'zip',
            'trydl': 'win64',
            'job': ['windows', '64']
        }
    }

    def __init__(self, repo, platform, status_check):
        self.repo = repo
        self.platform = platform
        self.platform_option = 'opt'
        self.pushes = []
        self.skip_status_check = status_check
        self.thclient = TreeherderClient()

    def fetch_push(self, user_email, build_hash, default_count=500):
        tmp_pushes = self.thclient.get_pushes(self.repo, count=default_count)
        for push in tmp_pushes:
            if push['author'].lower() == user_email.lower():
                self.pushes.append(push)
                if build_hash is None:
                    return push
                elif push['revision'] == build_hash:
                    return push
        print "Can't find the specify build hash [%s] in resultsets!!" % build_hash
        return None

    def get_job(self, resultset, platform_keyword_list):
        jobs = self.thclient.get_jobs(self.repo, result_set_id=resultset['id'])
        for job in jobs:
            cnt = 0
            for platform_keyword in platform_keyword_list:
                if platform_keyword in job['platform']:
                    cnt += 1
            if job['platform_option'] == self.platform_option and cnt == len(
                    platform_keyword_list):
                return job
        print "Can't find the specify platform [%s] and platform_options [%s] in jobs!!!" % (
            self.platform, self.platform_option)
        return None

    def get_files_from_remote_url_folder(self, remote_url_str):
        return_dict = {}
        try:
            response_obj = urllib2.urlopen(remote_url_str)
            if response_obj.getcode() == 200:
                for line in response_obj.readlines():
                    match = re.search(r'(?<=href=").*?(?=")', line)
                    if match:
                        href_link = match.group(0)
                        f_name = href_link.split("/")[-1]
                        return_dict[f_name] = href_link
            else:
                print "ERROR: fetch remote file list error with code [%s]" % str(
                    response_obj.getcode())
        except Exception as e:
            print "ERROR: [%s]" % e.message
        return return_dict

    def download_file(self, output_dp, download_link):
        print "Prepare to download the build from link [%s]" % download_link
        response = requests.get(download_link, verify=False, stream=True)
        download_fn = download_link.split("/")[-1]
        if os.path.exists(output_dp) is False:
            os.makedirs(output_dp)
        download_fp = os.path.join(output_dp, download_fn)
        try:
            try:
                total_len = int(response.headers['content-length'])
            except:
                total_len = None
            with open(download_fp, 'wb') as fh:
                for data in tqdm(response.iter_content(chunk_size=512 * 1024),
                                 total=total_len / (512 * 1024)):
                    fh.write(data)
            return download_fp
        except Exception as e:
            print "ERROR: [%s]" % e.message
            return None

    def download_from_remote_url_folder(self, remote_url_str, output_dp):
        # get latest nightly build list from remote url folder
        remote_file_dict = self.get_files_from_remote_url_folder(
            remote_url_str)

        # filter with platform, and return file name with extension
        if len(remote_file_dict.keys()) == 0:
            print "ERROR: can't get remote file list, could be the network error, or url path[%s] wrong!!" % remote_url_str
            return False
        else:
            if self.platform not in self.PLATFORM_FN_MAPPING:
                print "ERROR: we are currently not support the platform[%s] you specified!" % self.platform
                print "We are currently support the platform tag: [%s]" % self.PLATFORM_FN_MAPPING.keys(
                )
                return False
            else:
                matched_keyword = self.PLATFORM_FN_MAPPING[
                    self.platform]['key'] + "." + self.PLATFORM_FN_MAPPING[
                        self.platform]['ext']
                matched_file_list = [
                    fn for fn in remote_file_dict.keys()
                    if matched_keyword in fn and "firefox" in fn
                ]
                if len(matched_file_list) != 1:
                    print "WARN: the possible match file list is not equal 1, list as below: [%s]" % matched_file_list
                    if len(matched_file_list) < 1:
                        return False
                    matched_file_list = sorted(matched_file_list)[-1:]
                    print "WARN: select following file [%s]" % matched_file_list

        # combine file name with json
        matched_file_name = matched_file_list[0]
        json_file_name = matched_file_name.replace(
            self.PLATFORM_FN_MAPPING[self.platform]['key'] + "." +
            self.PLATFORM_FN_MAPPING[self.platform]['ext'],
            self.PLATFORM_FN_MAPPING[self.platform]['key'] + ".json")
        if json_file_name not in remote_file_dict:
            print "ERROR: can't find the json file[%s] in remote file list[%s]!" % (
                json_file_name, remote_file_dict)
            return False
        else:
            print "DEBUG: matched file name: [%s], json_file_name: [%s]" % (
                matched_file_name, json_file_name)

        # download files
        download_fx_url = self.ARCHIVE_URL + remote_file_dict[matched_file_name]
        download_fx_fp = self.download_file(output_dp, download_fx_url)
        download_json_url = self.ARCHIVE_URL + remote_file_dict[json_file_name]
        download_json_fp = self.download_file(output_dp, download_json_url)

        # check download status
        if download_fx_fp and download_json_fp:
            print "SUCCESS: build files download in [%s], [%s] " % (
                download_fx_fp, download_json_fp)
            return True
        else:
            print "ERROR: build files download in [%s,%s] " % (
                download_fx_fp, download_json_fp)
            return False

    def get_try_build(self, user_email, build_hash, output_dp):
        resultset = self.fetch_push(user_email, build_hash)

        # check result set
        if resultset:
            # if build hash is not porvided, use the latest revision as build hash value
            if build_hash is None:
                build_hash = resultset['revision']
            print "Resultset is found, and build hash is [%s]" % build_hash

            # compose remote folder url
            build_folder_url_template = "%s/pub/firefox/%s-builds/%s-%s/%s-%s/"
            build_folder_url = build_folder_url_template % (
                self.ARCHIVE_URL, self.repo, user_email, build_hash, self.repo,
                self.PLATFORM_FN_MAPPING[self.platform]['trydl'])

            # skip status check will retrieve the files list from remote folder url
            if self.skip_status_check:
                return self.download_from_remote_url_folder(
                    build_folder_url, output_dp)
            else:
                job = self.get_job(
                    resultset, self.PLATFORM_FN_MAPPING[self.platform]['job'])
                if job:
                    if job['result'].lower() == "success":
                        return self.download_from_remote_url_folder(
                            build_folder_url, output_dp)
                    else:
                        "Current job status is [%s] !!" % job['result'].lower()
                        return False
                else:
                    print "ERROR: can't find the job!"
                    return False
        else:
            print "ERROR: can't get result set! skip download build from try server, [%s, %s]" % (
                user_email, build_hash)
            return False

    def get_nightly_build(self, output_dp):
        remote_url_str = self.ARCHIVE_URL + self.NIGHTLY_LATEST_URL_FOLDER
        return self.download_from_remote_url_folder(remote_url_str, output_dp)
Exemplo n.º 6
0
parser = argparse.ArgumentParser()
parser.add_argument("-v", "--version",
                    help="test against Thunderbird version",
                    default=None,
                    required=True)
args = parser.parse_args()

tb_version = args.version
tb_branch = branches[tb_version]

with open("testapps.json", "r") as jf:
    data = json.load(jf)

nightly_data = data[tb_version]

pushes = client.get_pushes(tb_branch, )  # gets last 10 by default
for platform in nightly_data:
    platform_data = nightly_data[platform]
    found_artifacts = False
    platform_data['testzip'] = \
        platform_data['testzip'].replace('.zip', '').replace('.tar.gz', '')

    for push in pushes:
        jobs = client.get_jobs(tb_branch, push_id=push['id'])

        for job in jobs:
            logging.debug(job['job_type_name'])
            if (
                    job['state'] == 'completed' and
                    job['job_type_name'] ==
                    mapping_builds[tb_version][platform]
class TriggerBuild(object):
    ARCHIVE_URL = "https://archive.mozilla.org"
    NIGHTLY_LATEST_URL_FOLDER = "/pub/firefox/nightly/latest-mozilla-central/"
    PLATFORM_FN_MAPPING = {
        'linux32': {
            'key': 'linux-i686',
            'ext': 'tar.bz2',
            'trydl': 'linux',
            'job': ['linux32']
        },
        'linux64': {
            'key': 'linux-x86_64',
            'ext': 'tar.bz2',
            'trydl': 'linux64',
            'job': ['linux64']
        },
        'mac': {
            'key': 'mac',
            'ext': 'dmg',
            'trydl': 'macosx64',
            'job': ['osx']
        },
        'win32': {
            'key': 'win32',
            'ext': 'zip',
            'trydl': 'win32',
            'job': ['windows', '32']
        },
        'win64': {
            'key': 'win64',
            'ext': 'zip',
            'trydl': 'win64',
            'job': ['windows', '64']
        }
    }
    ENV_KEY_TRY_REPO_USER_EMAIL = "EMAIL"
    ENV_KEY_ENABLE_WIN32 = "WIN32_FLAG"
    ENV_KEY_SKIP_STATUS_CHECK = "SKIP_STATUS_CHECK"
    ENV_KEY_OUTPUT_DP = "OUTPUT_DP"
    ENV_KEY_BUILD_HASH = "BUILD_HASH"
    ENV_KEY_BUILD_TAG = "BUILD_TAG"
    REPO_NAME = {'TRY': "try", "NIGHTLY": "nightly"}
    DEFAULT_AGENT_CONF_DIR_LINUX = "/home/hasal/Hasal/agent"
    DEFAULT_AGENT_CONF_DIR_MAC = "/Users/hasal/Hasal/agent"
    DEFAULT_AGENT_CONF_DIR_WIN = "C:\\Users\\user\\Hasal\\agent"
    DEFAULT_AGENT_STATUS_DIR = "agent_status"
    DEFAULT_AGENT_JOB_STATUS = {
        'BEGIN': 'begin',
        'FINISH': 'finish',
        'EXCEPTION': 'exception'
    }
    DEFAULT_AGENT_JOB_WACTH_TIMEOUT = 180

    def __init__(self, input_env_data):
        self.platform_option = 'opt'
        self.thclient = TreeherderClient()
        self.pushes = []
        self.env_data = {
            key.upper(): value
            for key, value in input_env_data.items()
        }
        self.dispatch_variables(self.env_data)

    def dispatch_variables(self, input_env_data):
        # if user email not in environment data, repo will be the nightly
        if self.ENV_KEY_TRY_REPO_USER_EMAIL in input_env_data.keys():
            self.user_email = input_env_data[self.ENV_KEY_TRY_REPO_USER_EMAIL]
            self.repo = self.REPO_NAME['TRY']
        else:
            self.repo = self.REPO_NAME['NIGHTLY']

        # check current platform, widnows will double check the --win32 flag enabled or not
        if sys.platform == "linux2":
            self.platform = "linux64"
        elif sys.platform == "darwin":
            self.platform = "mac"
        else:
            if self.ENV_KEY_ENABLE_WIN32 in input_env_data.keys(
            ) and input_env_data[self.ENV_KEY_ENABLE_WIN32] == 'true':
                self.platform = "win32"
            else:
                self.platform = "win64"

        # assign skip status check to variable
        if self.ENV_KEY_SKIP_STATUS_CHECK in input_env_data.keys(
        ) and input_env_data[self.ENV_KEY_SKIP_STATUS_CHECK] == 'true':
            self.skip_status_check = True
        else:
            self.skip_status_check = False

        # assign build hash to variable
        if self.ENV_KEY_BUILD_HASH in input_env_data.keys():
            self.build_hash = input_env_data[self.ENV_KEY_BUILD_HASH]
        else:
            self.build_hash = None

        # assign output dp to variable
        if self.ENV_KEY_OUTPUT_DP in input_env_data.keys():
            self.output_dp = input_env_data[self.ENV_KEY_OUTPUT_DP]
        else:
            self.output_dp = os.getcwd()

        # assign build number to variable
        if self.ENV_KEY_BUILD_TAG in input_env_data.keys():
            self.jenkins_build_tag = input_env_data[self.ENV_KEY_BUILD_TAG]
        else:
            self.jenkins_build_tag = "jenkins-unknown-0"
        self.HASAL_JSON_FN = str(self.jenkins_build_tag) + ".json"

    def check_agent_status(self):
        for i in range(0, self.DEFAULT_AGENT_JOB_WACTH_TIMEOUT):
            # extract job id from agent_status dir
            agent_status_dir_path = os.path.join(os.getcwd(),
                                                 self.DEFAULT_AGENT_STATUS_DIR)
            print "INFO: housekeeping the agent status folder [%s]" % agent_status_dir_path
            if not os.path.exists(agent_status_dir_path):
                os.mkdir(agent_status_dir_path)
            agent_status_file_list = os.listdir(agent_status_dir_path)
            print "DEBUG: current agent status file list [%s]" % agent_status_file_list

            # get latest agent id
            job_id_list = [
                os.path.splitext(id)[0] for id in agent_status_file_list
            ]
            job_id_list.sort(key=lambda x: int(x.rsplit('-', 1)[1]))
            if len(job_id_list) > 0:
                current_id = job_id_list[-1]
            else:
                current_id = 0

            # get latest agent status
            # agent status will sort by alphabetical, so the last one will be the latest status
            job_status_list = [
                os.path.splitext(status)[1].split(os.path.extsep)[1]
                for status in agent_status_file_list
                if os.path.splitext(status)[0] == str(current_id)
            ]
            job_status_list.sort()
            if len(job_status_list) > 0:
                current_job_status = job_status_list[-1]
            else:
                return True

            if current_job_status == self.DEFAULT_AGENT_JOB_STATUS['FINISH']:
                for target_name in agent_status_file_list:
                    check_target = os.path.join(agent_status_dir_path,
                                                target_name)
                    os.remove(check_target)
                return True
            else:
                time.sleep(10)
        return False

    def trigger(self):

        # check agent status folder
        if self.check_agent_status() is False:
            sys.exit(1)

        # download build
        if self.repo == self.REPO_NAME['TRY']:
            download_fx_fp, download_json_fp = self.get_try_build(
                self.user_email, self.build_hash, self.output_dp)
        else:
            download_fx_fp, download_json_fp = self.get_nightly_build(
                self.output_dp)

        if download_fx_fp is None or download_json_fp is None:
            print "ERROR: something wrong with your build download process, please check the setting and job status."
            sys.exit(1)
        else:
            current_platform_release = platform.release().strip()
            # generate hasal.json data
            with open(download_json_fp) as dl_json_fh:
                dl_json_data = json.load(dl_json_fh)
                perfherder_revision = dl_json_data['moz_source_stamp']
                build_pkg_platform = dl_json_data['moz_pkg_platform']
                # mapping the perfherder pkg platform to nomenclature of builddot
                builddot_mapping_platform = {
                    "linux-i686": {
                        "_": "linux32"
                    },
                    "linux-x86_64": {
                        "_": "linux64"
                    },
                    "mac": {
                        "_": "osx-10-10"
                    },
                    "win32": {
                        "_": "windows7-32"
                    },
                    "win64": {
                        "_": "windows8-64",
                        "7": "windows8-64",
                        "10": "windows10-64"
                    }
                }
                with open(self.HASAL_JSON_FN, "w") as write_fh:
                    write_data = copy.deepcopy(self.env_data)
                    write_data['FX-DL-PACKAGE-PATH'] = download_fx_fp
                    write_data['FX-DL-JSON-PATH'] = download_json_fp
                    write_data['PERFHERDER-REVISION'] = perfherder_revision
                    if current_platform_release in builddot_mapping_platform[
                            build_pkg_platform].keys():
                        write_data[
                            'PERFHERDER-PKG-PLATFORM'] = builddot_mapping_platform[
                                build_pkg_platform][current_platform_release]
                    else:
                        write_data[
                            'PERFHERDER-PKG-PLATFORM'] = builddot_mapping_platform[
                                build_pkg_platform]["_"]
                    json.dump(write_data, write_fh)

            if os.path.exists(os.path.join(os.getcwd(), self.HASAL_JSON_FN)):
                print "INFO: current json file created at [%s]" % os.path.join(
                    os.getcwd(), self.HASAL_JSON_FN)
            else:
                print "ERROR: json file not exist in expected path [%s]" % os.path.join(
                    os.getcwd(), self.HASAL_JSON_FN)

            # create agent status folder
            if os.path.exists(
                    os.path.join(os.getcwd(),
                                 self.DEFAULT_AGENT_STATUS_DIR)) is False:
                os.mkdir(
                    os.path.join(os.getcwd(), self.DEFAULT_AGENT_STATUS_DIR))

            # move to agent config folder
            if sys.platform == "linux2":
                new_hasal_json_fp = os.path.join(
                    self.DEFAULT_AGENT_CONF_DIR_LINUX, self.HASAL_JSON_FN)
            elif sys.platform == "darwin":
                new_hasal_json_fp = os.path.join(
                    self.DEFAULT_AGENT_CONF_DIR_MAC, self.HASAL_JSON_FN)
            else:
                new_hasal_json_fp = os.path.join(
                    self.DEFAULT_AGENT_CONF_DIR_WIN, self.HASAL_JSON_FN)
            os.rename(self.HASAL_JSON_FN, new_hasal_json_fp)

            if os.path.exists(new_hasal_json_fp):
                print "INFO: hasal json file move to new location [%s]" % new_hasal_json_fp
            else:
                print "ERROR: hasal json file in not in new location [%s]" % new_hasal_json_fp
            sys.exit(0)

    def fetch_push(self, user_email, build_hash, default_count=500):
        tmp_pushes = self.thclient.get_pushes(self.repo, count=default_count)
        for push in tmp_pushes:
            if push['author'].lower() == user_email.lower():
                self.pushes.append(push)
                if build_hash is None:
                    return push
                elif push['revision'] == build_hash:
                    return push
        print "Can't find the specify build hash [%s] in resultsets!!" % build_hash
        return None

    def get_job(self, resultset, platform_keyword_list):
        jobs = self.thclient.get_jobs(self.repo, result_set_id=resultset['id'])
        for job in jobs:
            cnt = 0
            for platform_keyword in platform_keyword_list:
                if platform_keyword in job['platform']:
                    cnt += 1
            if job['platform_option'] == self.platform_option and cnt == len(
                    platform_keyword_list):
                return job
        print "Can't find the specify platform [%s] and platform_options [%s] in jobs!!!" % (
            self.platform, self.platform_option)
        return None

    def get_files_from_remote_url_folder(self, remote_url_str):
        return_dict = {}
        try:
            response_obj = urllib2.urlopen(remote_url_str)
            if response_obj.getcode() == 200:
                for line in response_obj.readlines():
                    match = re.search(r'(?<=href=").*?(?=")', line)
                    if match:
                        href_link = match.group(0)
                        f_name = href_link.split("/")[-1]
                        return_dict[f_name] = href_link
            else:
                print "ERROR: fetch remote file list error with code [%s]" % str(
                    response_obj.getcode())
        except Exception as e:
            print "ERROR: [%s]" % e.message
        return return_dict

    def download_file(self, output_dp, download_link):
        print "Prepare to download the build from link [%s]" % download_link
        response = requests.get(download_link, verify=False, stream=True)
        download_fn = download_link.split("/")[-1]
        if os.path.exists(output_dp) is False:
            os.makedirs(output_dp)
        download_fp = os.path.join(output_dp, download_fn)
        try:
            try:
                total_len = int(response.headers['content-length'])
            except:
                total_len = None
            with open(download_fp, 'wb') as fh:
                for data in tqdm(response.iter_content(chunk_size=512 * 1024),
                                 total=total_len / (512 * 1024)):
                    fh.write(data)
            return download_fp
        except Exception as e:
            print "ERROR: [%s]" % e.message
            return None

    def download_from_remote_url_folder(self, remote_url_str, output_dp):
        # get latest nightly build list from remote url folder
        remote_file_dict = self.get_files_from_remote_url_folder(
            remote_url_str)

        # filter with platform, and return file name with extension
        if len(remote_file_dict.keys()) == 0:
            print "ERROR: can't get remote file list, could be the network error, or url path[%s] wrong!!" % remote_url_str
            return False
        else:
            if self.platform not in self.PLATFORM_FN_MAPPING:
                print "ERROR: we are currently not support the platform[%s] you specified!" % self.platform
                print "We are currently support the platform tag: [%s]" % self.PLATFORM_FN_MAPPING.keys(
                )
                return False
            else:
                matched_keyword = self.PLATFORM_FN_MAPPING[
                    self.platform]['key'] + "." + self.PLATFORM_FN_MAPPING[
                        self.platform]['ext']
                matched_file_list = [
                    fn for fn in remote_file_dict.keys()
                    if ((matched_keyword in fn) and ('firefox' in fn) and (
                        not fn.endswith('.asc')))
                ]
                if len(matched_file_list) != 1:
                    print "WARN: the possible match file list is not equal 1, list as below: [%s]" % matched_file_list
                    if len(matched_file_list) < 1:
                        return False
                    matched_file_list = sorted(matched_file_list)[-1:]
                    print "WARN: select following file [%s]" % matched_file_list

        # combine file name with json
        matched_file_name = matched_file_list[0]
        json_file_name = matched_file_name.replace(
            self.PLATFORM_FN_MAPPING[self.platform]['key'] + "." +
            self.PLATFORM_FN_MAPPING[self.platform]['ext'],
            self.PLATFORM_FN_MAPPING[self.platform]['key'] + ".json")
        if json_file_name not in remote_file_dict:
            print "ERROR: can't find the json file[%s] in remote file list[%s]!" % (
                json_file_name, remote_file_dict)
            return False
        else:
            print "DEBUG: matched file name: [%s], json_file_name: [%s]" % (
                matched_file_name, json_file_name)

        # download files
        download_fx_url = self.ARCHIVE_URL + remote_file_dict[matched_file_name]
        download_fx_fp = self.download_file(output_dp, download_fx_url)
        download_json_url = self.ARCHIVE_URL + remote_file_dict[json_file_name]
        download_json_fp = self.download_file(output_dp, download_json_url)

        # check download status
        if download_fx_fp and download_json_fp:
            print "SUCCESS: build files download in [%s], [%s] " % (
                download_fx_fp, download_json_fp)
            return (download_fx_fp, download_json_fp)
        else:
            print "ERROR: build files download in [%s,%s] " % (
                download_fx_fp, download_json_fp)
            return None

    def get_try_build(self, user_email, build_hash, output_dp):
        resultset = self.fetch_push(user_email, build_hash)

        # check result set
        if resultset:
            # if build hash is not porvided, use the latest revision as build hash value
            if build_hash is None:
                build_hash = resultset['revision']
            print "Resultset is found, and build hash is [%s]" % build_hash

            # compose remote folder url
            build_folder_url_template = "%s/pub/firefox/%s-builds/%s-%s/%s-%s/"
            build_folder_url = build_folder_url_template % (
                self.ARCHIVE_URL, self.repo, user_email, build_hash, self.repo,
                self.PLATFORM_FN_MAPPING[self.platform]['trydl'])

            # skip status check will retrieve the files list from remote folder url
            if self.skip_status_check:
                return self.download_from_remote_url_folder(
                    build_folder_url, output_dp)
            else:
                job = self.get_job(
                    resultset, self.PLATFORM_FN_MAPPING[self.platform]['job'])
                if job:
                    if job['result'].lower() == "success":
                        return self.download_from_remote_url_folder(
                            build_folder_url, output_dp)
                    else:
                        print "WARNING: Current job status is [%s] !! Your build will download when job status is success" % job[
                            'result'].lower()
                        return (None, None)
                else:
                    print "ERROR: can't find the job!"
                    return (None, None)
        else:
            print "ERROR: can't get result set! skip download build from try server, [%s, %s]" % (
                user_email, build_hash)
            return (None, None)

    def get_nightly_build(self, output_dp):
        remote_url_str = self.ARCHIVE_URL + self.NIGHTLY_LATEST_URL_FOLDER
        return self.download_from_remote_url_folder(remote_url_str, output_dp)