Exemple #1
0
def iso_to_directory(iso_file, directory):
    """Extract content of an ISO image to destination directory."""
    if not os.path.exists(directory):
        os.makedirs(directory)
    execute_command('bsdtar -C %(directory)s -xf %(iso_file)s',
                    {'directory': directory, 'iso_file': iso_file})
    execute_command('chmod -R u+w %(directory)s', {'directory': directory})
Exemple #2
0
def generate_html_report(release, spec, versions):
    """Generate html format of the report.

    :param release: Release string of the product.
    :param spec: Specification read from the specification file.
    :param versions: List of versions to generate.
    :type release: str
    :type spec: Specification
    :type versions: list
    """

    logging.info("  Generating the html report, give me a few minutes, please "
                 "...")

    cmd = HTML_BUILDER.format(
        release=release,
        date=datetime.datetime.utcnow().strftime('%m/%d/%Y %H:%M UTC'),
        working_dir=spec.environment["paths"]["DIR[WORKING,SRC]"],
        build_dir=spec.environment["paths"]["DIR[BUILD,HTML]"])
    execute_command(cmd)

    with open(spec.environment["paths"]["DIR[CSS_PATCH_FILE]"], "w") as \
            css_file:
        css_file.write(THEME_OVERRIDES)

    with open(spec.environment["paths"]["DIR[CSS_PATCH_FILE2]"], "w") as \
            css_file:
        css_file.write(THEME_OVERRIDES)

    logging.info("  Done.")
def build_distro(repo_dir, target):
    utils.print_group("Building distro {} in {}".format(target, repo_dir))
    os.chdir(repo_dir)

    process = utils.execute_command("bazel", "build", target)
    if process.returncode:
        raise Exception("Failed to build {}: {}".format(
            target, process.stderr))

    print(process.stdout)

    process = utils.execute_command("find", "-L",
                                    os.path.join(repo_dir, "bazel-bin"),
                                    "-name", "*.tar.gz")
    if process.returncode:
        raise Exception("Unable to find tar.gz output file: {}".format(
            process.stderr))

    lines = process.stdout.split("\n")
    files = [l for l in lines if l.strip()]
    if len(files) != 1:
        raise Exception("Expected exactly one tar.gz file, not {}: {}".format(
            len(files), ", ".join(files)))

    return files[0]
Exemple #4
0
def replay(trace, background=False, offset=0, delay=0, release_timeout=0.09):
    # args
    exe_local_path = 'mysendevent-arm64'
    exe_remote_path = '/data/local/tmp/mysendevent-arm64'
    trace_local_path = trace
    trace_remote_path = '/data/local/tmp/' + trace

    if option.event == "":
        touch_screen_event = find_touchscreen()
    else:
        touch_screen_event = option.event

    execute_command('adb shell ' + 'cd ' + '/data/local/tmp/')
    push(exe_local_path, exe_remote_path, force=True)
    push(trace_local_path, trace_remote_path, force=True)
    execute_command('adb shell ' + 'chmod +x ' +
                    exe_remote_path)  # 'Permission denied'
    if get_api_version() < 23:
        cmd_string = 'adb shell ' + exe_remote_path + ' -e ' + touch_screen_event + ' -t ' + trace_remote_path + \
           ' -o ' + str(offset) + ' -r ' + str(release_timeout) + ' -w '
    else:
        cmd_string = 'adb exec-out ' + exe_remote_path + ' -e ' + touch_screen_event + ' -t ' + trace_remote_path + \
           ' -o ' + str(offset) + ' -r ' + str(release_timeout) + ' -w '
    if option.huawei:
        cmd_string += ' -m huawei '
    if option.debug:
        cmd_string += ' -v '

    if background:
        execute_background(cmd_string, delay=delay, outfile=sys.stdout)
    else:
        execute_intercept(cmd_string, delay=delay, outfile=sys.stdout)
Exemple #5
0
def runMultiRank(city):
    process_time = time.time()
    input = city + "-feature"
    commond = "spark-submit --master yarn --deploy-mode cluster --name " + city + "-PoiRankTask --class cluster.task.PoiRankTask --jars " + libjars + " --executor-memory 5G --num-executors 2 --executor-cores 5 --driver-memory 1G --driver-cores 1 --conf spark.default.parallelism=1 --conf spark.storage.memoryFraction=0.3 --conf spark.shuffle.memoryFraction=0.5  --conf spark.shuffle.consolidateFiles=true " + jar_path + "poi-rank-1.0-SNAPSHOT.jar  " + input + " " + output
    utils.execute_command(commond, shell=True)
    end_time = time.time()
    logger.info("%s poirank_task finished, used time:%s s", city, str(end_time - process_time))
Exemple #6
0
def directory_to_initrd(directory, initrd_file):
    """Compress directory as initrd.gz file."""
    assert(initrd_file.endswith('.gz'))
    initrd_file = initrd_file[:-3]
    execute_command("cd %(in)s && find . | cpio --create --format='newc' > "
                    "%(out)s", {'in': directory, 'out': initrd_file})
    execute_command('gzip %(file)s', {'file': initrd_file})
def start_bisecting(project_name, platform_name, git_repo_location, commits_list, needs_clean):
    left = 0
    right = len(commits_list)
    while left < right:
        mid = (left + right) // 2
        mid_commit = commits_list[mid]
        print_expanded_group(":bazel: Test with Bazel built at " + mid_commit)
        eprint("Remaining suspected commits are:\n")
        for i in range(left, right):
            eprint(commits_list[i] + "\n")
        if test_with_bazel_at_commit(
            project_name, platform_name, git_repo_location, mid_commit, needs_clean
        ):
            print_collapsed_group(":bazel: Succeeded at " + mid_commit)
            left = mid + 1
        else:
            print_collapsed_group(":bazel: Failed at " + mid_commit)
            right = mid

    print_expanded_group(":bazel: Bisect Result")
    if right == len(commits_list):
        eprint("first bad commit not found, every commit succeeded.")
    else:
        first_bad_commit = commits_list[right]
        eprint("first bad commit is " + first_bad_commit)
        os.chdir(BAZEL_REPO_DIR)
        execute_command(["git", "--no-pager", "log", "-n", "1", first_bad_commit])
 def _gitflow_release_finish(self):
     if self.data['tag_already_exists']:
         return
     cmd = self.vcs.cmd_gitflow_release_finish(self.data['version'])
     print cmd
     if utils.ask("Run this command"):
         print utils.execute_command(cmd)
Exemple #9
0
def install_vps(connection, swap_supported=False):
    try:
        cmds_create_swap = [
            "touch /var/swap.img", "chmod 600 /var/swap.img",
            "dd if=/dev/zero of=/var/swap.img bs=1024k count=2000",
            "mkswap /var/swap.img", "swapon /var/swap.img",
            "echo \"/var/swap.img none swap sw 0 0\" >> /etc/fstab"
        ]

        cmds_apt_get = [
            "apt-get update -y", "apt-get upgrade -y",
            "apt-get dist-upgrade -y", "apt-get install nano htop git -y",
            "apt-get install build-essential libtool autotools-dev automake pkg-config libssl-dev libevent-dev bsdmainutils software-properties-common -y",
            "apt-get install libboost-all-dev -y",
            "add-apt-repository ppa:bitcoin/bitcoin -y", "apt-get update -y",
            "apt-get install libdb4.8-dev libdb4.8++-dev -y"
        ]

        if swap_supported:
            logging.info("Create SWAP file !")
            for cmd in cmds_create_swap:
                execute_command(connection, '{}'.format(cmd))

        logging.info("Download dependencies !")
        for cmd in cmds_apt_get:
            execute_command(connection, '{}'.format(cmd))
    except Exception as e:
        logging.error('Could not install vps', exc_info=e)
Exemple #10
0
 def _gitflow_release_start(self):
     logger.info('Location: ' + utils.execute_command('pwd'))
     self.vcs.gitflow_check_branch("develop", switch=True)
     cmd = self.vcs.cmd_gitflow_release_start(self.data['new_version'])
     print cmd
     if utils.ask("Run this command"):
         print utils.execute_command(cmd)
def main():
    pipeline_slug = os.getenv("BUILDKITE_PIPELINE_SLUG")
    git_repository = os.getenv("BUILDKITE_REPO")
    last_green_commit = get_last_green_commit(git_repository, pipeline_slug)
    current_commit = subprocess.check_output(["git", "rev-parse",
                                              "HEAD"]).decode("utf-8").strip()
    if last_green_commit:
        execute_command(["git", "fetch", "-v", "origin", last_green_commit])
        result = (subprocess.check_output([
            "git", "rev-list",
            "%s..%s" % (last_green_commit, current_commit)
        ]).decode("utf-8").strip())

    # If current_commit is newer that last_green_commit, `git rev-list A..B` will output a bunch of
    # commits, otherwise the output should be empty.
    if not last_green_commit or result:
        execute_command(
            [
                "echo %s | %s cp - %s" % (
                    current_commit,
                    gsutil_command(),
                    bazelci_last_green_commit_url(git_repository,
                                                  pipeline_slug),
                )
            ],
            shell=True,
        )
    else:
        eprint(
            "Updating abandoned: last green commit (%s) is not older than current commit (%s)."
            % (last_green_commit, current_commit))
Exemple #12
0
def initrd_to_directory(initrd_file, directory):
    """Extract initrd.gz file to destination subdirectory."""
    assert(initrd_file.endswith('.gz'))
    execute_command('gunzip %(file)s', {'file': initrd_file})
    initrd_file = initrd_file[:-3]
    execute_command('cd %(dir)s ; cpio -id < %(file)s', {'dir': directory,
                                                         'file': initrd_file})
Exemple #13
0
 def _gitflow_release_finish(self):
     if self.data['tag_already_exists']:
         return
     cmd = self.vcs.cmd_gitflow_release_finish(self.data['version'])
     print cmd
     if utils.ask("Run this command"):
         print utils.execute_command(cmd)
 def _gitflow_release_start(self):
     logger.info('Location: ' + utils.execute_command('pwd'))
     self.vcs.gitflow_check_branch("develop", switch=True)
     cmd = self.vcs.cmd_gitflow_release_start(self.data['new_version'])
     print cmd
     if utils.ask("Run this command"):
         print utils.execute_command(cmd)
    def _multi_part_put(self, file_path, destination_path, file_size):
        """
            Uploads file in chunks using Swift Tool (st) command
            http://bazaar.launchpad.net/~hudson-openstack/swift/1.2/view/head:/bin/st

        """
        logger.info("RackspaceCloudFilesTarget: Starting multi-part put "
                    "for %s " % file_path)

        # calculate chunk size
        # split into 10 chunks if possible
        chunk_size = int(file_size / 10)
        if chunk_size > MAX_SPLIT_SIZE:
            chunk_size = MAX_SPLIT_SIZE

        st_exe = which("st")
        st_command = [
            st_exe, "-A", "https://auth.api.rackspacecloud.com/v1.0", "-U",
            self.username, "-K", self.api_key, "upload", "--segment-size",
            str(chunk_size), self.container_name, destination_path
        ]
        logger.info("RackspaceCloudFilesTarget: Executing command: %s" %
                    " ".join(st_command))
        working_dir = os.path.dirname(file_path)
        execute_command(st_command, cwd=working_dir)
        logger.info("RackspaceCloudFilesTarget: Multi-part put for %s "
                    "completed successfully!" % file_path)
def parse_excel_upload():
    """
    解析特征阀值并上传
    :return:
    """

    download_rank_config_commond = 'wget "http://svn.sogou-inc.com/svn/go2map/data/poi/edit/trunk/rank/poi-rank.xlsx"  --user=svnsogoumap --password="******" -O /search/odin/taoyongbo/rank/input/poi-rank.xlsx'
    utils.execute_command(download_rank_config_commond, shell=True)

    # 解析excel 生成特征阈值及权重配置文件
    parse_commond = "java  -Xms800M -Xmx2g -jar " + constant.java_jar_path + "excelparse.jar"
    utils.execute_command(parse_commond, shell=True)
    parse_excel_time = time.time()
    logger.info("parse_excel finished,used time:%s s", str(time.time() - parse_excel_time))

    utils.rm_mkdir(current_rank_version + "/config/")
    mv_config_rank_commond = "cp /search/odin/taoyongbo/rank/result/poi-threshold.txt /search/odin/taoyongbo/rank/result/poi-weight.txt " + current_rank_version + "/config/"
    utils.execute_command(mv_config_rank_commond, shell=True)

    upload_config_time = time.time()
    utils.rm_mkdir(rank_output_path, constant.cluster_sign)
    utils.rm_mkdir(rank_output_path + "/config/", constant.cluster_sign)

    # 特征阈值文件上传
    upload_threshold_commond = "hadoop fs -put " + current_rank_version + constant.poi_threshold_path + " " + rank_output_path + "/config/poi-threshold.txt"

    utils.execute_command(upload_threshold_commond, shell=True)

    # 权重文件上传
    upload_weight_commond = "hadoop fs -put " + current_rank_version + constant.weight_path + " " + rank_output_path + "/config/poiWeight.txt"
    utils.execute_command(upload_weight_commond, shell=True)

    logger.info("upload_threshold finished,used  time:%s s", str(time.time() - upload_config_time))
Exemple #17
0
def generate_cpta(spec, data):
    """Generate all formats and versions of the Continuous Performance Trending
    and Analysis.

    :param spec: Specification read from the specification file.
    :param data: Full data set.
    :type spec: Specification
    :type data: InputData
    """

    logging.info("Generating the Continuous Performance Trending and Analysis "
                 "...")

    ret_code = _generate_all_charts(spec, data)

    cmd = HTML_BUILDER.format(
        date=datetime.utcnow().strftime('%m/%d/%Y %H:%M UTC'),
        working_dir=spec.environment["paths"]["DIR[WORKING,SRC]"],
        build_dir=spec.environment["paths"]["DIR[BUILD,HTML]"])
    execute_command(cmd)

    with open(spec.environment["paths"]["DIR[CSS_PATCH_FILE]"], "w") as \
            css_file:
        css_file.write(THEME_OVERRIDES)

    with open(spec.environment["paths"]["DIR[CSS_PATCH_FILE2]"], "w") as \
            css_file:
        css_file.write(THEME_OVERRIDES)

    archive_input_data(spec)

    logging.info("Done.")

    return ret_code
    def _multi_part_put(self, file_path, destination_path, file_size):
        """
            Uploads file in chunks using Swift Tool (st) command
            http://bazaar.launchpad.net/~hudson-openstack/swift/1.2/view/head:/bin/st

        """
        logger.info("RackspaceCloudFilesTarget: Starting multi-part put "
                    "for %s " % file_path)

        # calculate chunk size
        # split into 10 chunks if possible
        chunk_size = int(file_size / 10)
        if chunk_size > MAX_SPLIT_SIZE:
            chunk_size = MAX_SPLIT_SIZE

        st_exe = which("st")
        st_command = [
            st_exe,
            "-A", "https://auth.api.rackspacecloud.com/v1.0",
            "-U", self.username,
            "-K", self.api_key,
            "upload",
            "--segment-size", str(chunk_size),
            self.container_name, destination_path

        ]
        logger.info("RackspaceCloudFilesTarget: Executing command: %s" %
                    " ".join(st_command))
        working_dir = os.path.dirname(file_path)
        execute_command(st_command, cwd=working_dir)
        logger.info("RackspaceCloudFilesTarget: Multi-part put for %s "
                    "completed successfully!" % file_path)
Exemple #19
0
def _cut(path):
    video_dir = os.path.join(path, 'video')
    yitu_dir = os.path.join(path, 'yitu_orgin')
    output_dir = os.path.join(os.path.dirname(path), 'cluster', 'output')
    cmd = './do_cut.sh {} {} {} {}'.format(path, video_dir, yitu_dir, output_dir)
    kwargs = {'cwd': '/home/kklt/train'}
    utils.execute_command(cmd, **kwargs)
def main(argv=None):
    if argv is None:
        argv = sys.argv[1:]

    utils.PRINT_COMMANDS = True

    try:
        repo = utils.get_meta_data(REPO_META_DATA_KEY)
        gh_org = utils.get_meta_data("prerelease-gh-org")
        gh_repo = utils.get_meta_data("prerelease-gh-repo")
        commit = utils.get_meta_data("prerelease-commit")
        target = utils.get_meta_data("prerelease-distro-target")

        text = f"Testing {repo} distro (<a href='https://github.com/{gh_org}/{gh_repo}/commit/{commit}'>{gh_org}/{gh_repo} @ {commit}</a>)"
        utils.execute_command("buildkite-agent", "annotate", "--style", "info",
                              "--context", "distro", text)

        repo_dir = download_repository(gh_org, gh_repo, commit)
        distro_path = build_distro(repo_dir, target)
        save_distro(distro_path)
        request_repo_patching()
    except Exception as ex:
        utils.eprint("".join(
            traceback.format_exception(None, ex, ex.__traceback__)))
        return 1

    return 0
def execute_bazel_test(bazel_binary, platform, flags, targets, bep_file,
                       monitor_flaky_tests, incompatible_flags):
    print_expanded_group(":bazel: Test")

    aggregated_flags = [
        "--flaky_test_attempts=3",
        "--build_tests_only",
        "--local_test_jobs=" + concurrent_test_jobs(platform),
    ]
    # Don't enable remote caching if the user enabled remote execution / caching themselves
    # or flaky test monitoring is enabled, as remote caching makes tests look less flaky than
    # they are.
    aggregated_flags += compute_flags(
        platform,
        flags,
        incompatible_flags,
        bep_file,
        enable_remote_cache=not monitor_flaky_tests)

    try:
        execute_command([bazel_binary] + common_startup_flags(platform) +
                        ["test"] + aggregated_flags + targets)
    except subprocess.CalledProcessError as e:
        raise Exception("bazel test failed with exit code {}".format(
            e.returncode))
Exemple #22
0
def matchCount_distcp():
    logger.info("matchCount_distcp process")
    rm_commond = "hadoop fs -rmr " + constant.yarn_matchCount_input_path
    utils.execute_command(rm_commond, shell=True)

    commond = "hadoop distcp   -update -skipcrccheck -m 50 " + constant.yarn_matchCount_output_path + " " + constant.yarn_matchCount_input_path
    utils.execute_command(commond, shell=True)
    logger.info("matchCount_distcp finished")
Exemple #23
0
def gps_distcp():
    logger.info("gps_distcp process")
    rm_commond = "hadoop fs -rmr " + constant.yarn_gps_input_path
    utils.execute_command(rm_commond, shell=True)

    commond = "hadoop distcp -overwrite  -m 50 " + constant.zeus_path + constant.zeus_gps_path + " " + constant.yarn_gps_input_path
    utils.execute_command(commond, shell=True)
    logger.info("gps_distcp finished")
Exemple #24
0
def reencode_single_file(movie_full_path):
    filename_only = movie_full_path[movie_full_path.rindex('/') + 1:]
    new_filename = filename_only.replace('.mp4', '%s.mp4' % NEW_FILE_SUFFIX)
    new_full_path = movie_full_path.replace(filename_only, '/' + OUTPUT_DIR + '/' + new_filename)
    execute_command(AVCONV_TEMPLATE % (movie_full_path, new_full_path))
    new_path_for_old_file = movie_full_path.replace(filename_only, '/' + \
            DONE_DIR + '/' + filename_only)
    os.rename(movie_full_path, new_path_for_old_file)
def execute_bazel_run(bazel_binary, platform, targets, incompatible_flags):
    if not targets:
        return
    print_collapsed_group("Setup (Run Targets)")
    for target in targets:
        execute_command([bazel_binary] + common_startup_flags(platform) +
                        ["run"] + common_build_flags(None, platform) +
                        (incompatible_flags or []) + [target])
def execute_bazel_clean(bazel_binary, platform):
    print_expanded_group(":bazel: Clean")

    try:
        execute_command([bazel_binary] + common_startup_flags(platform) +
                        ["clean", "--expunge"])
    except subprocess.CalledProcessError as e:
        raise Exception("bazel clean failed with exit code {}".format(
            e.returncode))
Exemple #27
0
def directory_to_iso(directory, iso_file):
    """Create ISO image from directory."""
    output_dir = os.path.dirname(os.path.abspath(iso_file))
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
    execute_command('genisoimage -o %(iso)s -r -J -no-emul-boot '
                    ' -boot-load-size 4 -boot-info-table '
                    '-b isolinux/isolinux.bin -c isolinux/boot.cat %(dir)s',
                    {'iso': iso_file, 'dir': directory})
Exemple #28
0
def install_gyp():
    if os.path.exists(os.path.join("vendor", "gyp")):
        return

    print "Clone gyp"
    utils.execute_command([
        "git", "clone", "https://chromium.googlesource.com/external/gyp.git",
        "vendor/gyp"
    ])
Exemple #29
0
 def compute_image_mean(self, test_id, backend, lmdb_path,
                        binary_proto_path):
     image_mean_bin = caffe_home() + "/build/tools/compute_image_mean"
     lmdb_path = os.path.abspath(lmdb_path)
     binary_proto_path = os.path.abspath(binary_proto_path)
     command = [
         image_mean_bin, "-backend=" + backend, lmdb_path, binary_proto_path
     ]
     command = ' '.join(command)
     utils.execute_command(test_id, command)
Exemple #30
0
def featureCombine_task(environment='beta'):
    logger.info("spark featureCombine_task process:{environment}".format(
        environment=environment))
    scala_jar_path = root_path + environment + '/scala_spark/'
    scala_libjars_path = scala_jar_path + 'lib'
    fileList, dirList = utils.get_files(scala_libjars_path)
    libjars = ",".join(fileList)
    commond = "spark-submit --master yarn --deploy-mode cluster --name FeatureCombineTask --class cluster.task.FeatureCombineTask --jars " + libjars + " --executor-memory 4G --num-executors 26 --executor-cores 6 --driver-memory 6G --conf spark.default.parallelism=350 --conf spark.storage.memoryFraction=0.5 --conf spark.shuffle.memoryFraction=0.3 " + scala_jar_path + "poi-rank-1.0-SNAPSHOT.jar  " + input + " " + output
    utils.execute_command(commond, shell=True)
    logger.info("spark featureCombine_task finished")
Exemple #31
0
def polygonRank_task(environment='beta'):
    logger.info("spark polygonRank_task process:{environment}".format(
        environment=environment))
    scala_jar_path = root_path + environment + '/scala_spark/'
    scala_libjars_path = scala_jar_path + 'lib'
    fileList, dirList = utils.get_files(scala_libjars_path)
    libjars = ",".join(fileList)
    commond = "spark-submit --master yarn --deploy-mode cluster --name PolygonRankTask --class cluster.task.PolygonRankTask --jars " + libjars + " --executor-memory 11520M --num-executors 20 --executor-cores 15  --driver-memory 25G --driver-cores 10 --conf spark.default.parallelism=1 --conf spark.storage.memoryFraction=0.4 --conf spark.shuffle.memoryFraction=0.6 --conf spark.shuffle.consolidateFiles=true " + scala_jar_path + "poi-rank-1.0-SNAPSHOT.jar  " + input + " " + output
    utils.execute_command(commond, shell=True)
    logger.info("spark polygonRank_task finished")
Exemple #32
0
def matchcount_task(environment='beta'):
    logger.info("spark matchcount_task process:{environment}".format(
        environment=environment))
    scala_jar_path = root_path + environment + '/scala_spark/'
    scala_libjars_path = scala_jar_path + 'lib'
    fileList, dirList = utils.get_files(scala_libjars_path)
    libjars = ",".join(fileList)
    commond = "spark-submit --master yarn --deploy-mode cluster --name MatchCountTask --class cluster.task.MatchCountTask --executor-memory 4G --num-executors 19 --executor-cores 5  --conf spark.default.parallelism=3000 " + scala_jar_path + "poi-rank-1.0-SNAPSHOT.jar  " + input + " " + output
    utils.execute_command(commond, shell=True)
    logger.info("spark matchcount_task finished")
Exemple #33
0
def keep_device_connected(device_name, delay=3):
    execute_command('adb start-server', print_to_stdout=True)

    res_string = execute_command('adb devices')[:-1]  # remove '\n'
    connected_devices = res_string.splitlines()[1:]
    while _device_checker(connected_devices, device_name) is False:
        print('[adbhelper] Retry after ' + str(delay) + 's')
        time.sleep(delay)
        res_string = execute_command('adb devices')[:-1]  # remove '\n'
        connected_devices = res_string.splitlines()[1:]
Exemple #34
0
def rank_optimize_task(environment='beta',
                       rank_output_path=constant.default_rank_output_path):
    logger.info("spark rank_optimization process:{environment}".format(
        environment=environment))
    scala_jar_path = root_path + environment + '/scala_spark/'
    scala_libjars_path = scala_jar_path + 'lib'
    fileList, dirList = utils.get_files(scala_libjars_path)
    libjars = ",".join(fileList)
    commond = "spark-submit --master yarn --deploy-mode cluster --name RankOptimizeTask --class cluster.task.RankOptimizeTask  --jars " + libjars + " --executor-memory 4G --num-executors 19 --executor-cores 5 --conf spark.default.parallelism=350 " + scala_jar_path + "poi-rank-1.0-SNAPSHOT.jar  " + input + " " + rank_output_path
    utils.execute_command(commond, shell=True)
    logger.info("spark rank_optimization finished")
Exemple #35
0
def structure_distcp(zeus_structure_path=None):
    if zeus_structure_path is not None:
        constant.zeus_structure_path = zeus_structure_path
    logger.info("structure_distcp process")

    rm_commond = "hadoop fs -rmr " + constant.yarn_structure_input_path
    utils.execute_command(rm_commond, shell=True)

    commond = "hadoop distcp -overwrite " + constant.zeus_path + constant.zeus_structure_path + " " + constant.yarn_structure_input_path
    utils.execute_command(commond, shell=True)
    logger.info("structure_distcp finished")
Exemple #36
0
def poiXml_myself_distcp(zeus_myself_path=None):
    logger.info("poiXml_myself_distcp process")

    if zeus_myself_path is not None:
        constant.zeus_myself_path = zeus_myself_path

    rm_commond3 = "hadoop fs -rmr " + constant.yarn_myself_input_path
    utils.execute_command(rm_commond3, shell=True)

    commond3 = "hadoop distcp -overwrite -m 30 " + constant.zeus_path + constant.zeus_myself_path + " " + constant.yarn_myself_input_path
    utils.execute_command(commond3, shell=True)
    logger.info("poiXml_myself_distcp finished")
def kill_app():
    cmd = "yarn application -list | awk -F ' ' '{print $1,$4}'"
    output = utils.get_shell_output(cmd)
    kill_cmd = "yarn application -kill "

    for line in output:
        field = line.strip().split(" ")
        appId = field[0]
        user = field[1]
        if user == "go2data_rank":
            kill_app = kill_cmd + appId
            utils.execute_command(kill_app)
            logger.info("application:{app} kill done".format(app=appId))
    def tar_backup(self, backup, dump_dir, file_name):
        tar_exe = which("tar")

        tar_cmd = [tar_exe, "-cvzf", file_name, dump_dir]
        cmd_display = " ".join(tar_cmd)
        workspace = self.get_task_workspace_dir(backup)
        try:
            logger.info("Running tar command: %s" % cmd_display)
            execute_command(tar_cmd, cwd=workspace)
            self._delete_dump_dir(backup, dump_dir)
        except CalledProcessError, e:
            last_log_line = e.output.split("\n")[-1]
            errors.raise_archive_error(e.returncode, last_log_line)
Exemple #39
0
 def _install_app_task(self):
     # Install the application into emulator
     cmd = [ self.adb_bin, '-e', 'install', '-r', self._get_aligned_apk() ]
     if execute_command(cmd, cwd=self.project_path, os_env=self.os_environ):
         log.info('Installed application "%s.%s.apk" to emulator'%(self.project_name,
                                                                   self.zipped_prefix))
     self._install_app_POST()
    def extract_restore_source_backup(self, restore):
        working_dir = self.get_task_workspace_dir(restore)
        file_reference = restore.source_backup.target_reference
        logger.info("Extracting tar file '%s'" % file_reference.file_name)

        tarx_cmd = [
            which("tar"),
            "-xf",
            file_reference.file_name
        ]

        logger.info("Running tar extract command: %s" % tarx_cmd)
        try:
            execute_command(tarx_cmd, cwd=working_dir)
        except CalledProcessError, cpe:
            logger.error("Failed to execute extract command: %s" % tarx_cmd)
            raise ExtractError(cause=cpe)
    def run_mongo_restore(self, restore, destination_uri, dump_dir, source_database_name,
                          log_file_name, dump_log_file_name,
                          exclude_system_users=None,
                          exclude_admin_system_users=None,
                          exclude_system_roles=None,
                          options=None):

        if source_database_name:
            source_dir = os.path.join(dump_dir, source_database_name)
        else:
            source_dir = dump_dir

        workspace = self.get_task_workspace_dir(restore)
        # IMPORTANT delete dump log file so the restore command would not break
        dump_log_path = os.path.join(workspace, dump_dir, dump_log_file_name)
        if os.path.exists(dump_log_path):
            os.remove(dump_log_path)

        if exclude_system_users:
            self._delete_system_users_from_dump(restore, source_dir)

        if exclude_admin_system_users:
            self._delete_admin_system_users_from_dump(restore, source_dir)

        if exclude_system_roles:
            self._delete_roles_from_dump(restore, source_dir)

        working_dir = workspace
        log_path = os.path.join(workspace, log_file_name)

        restore_cmd = [
            which("mongoctl"),
            "restore",
            destination_uri,
            source_dir
        ]

        if options:
            restore_cmd.extend(options)

        restore_cmd_display = restore_cmd[:]

        restore_cmd_display[restore_cmd_display.index("restore") + 1] = mask_mongo_uri(destination_uri)

        logger.info("Running mongoctl restore command: %s" %
                    " ".join(restore_cmd_display))

        returncode = execute_command_wrapper(restore_cmd,
                                             output_path=log_path,
                                             cwd=working_dir)

        # read the last dump log line
        last_line_tail_cmd = [which('tail'), '-1', log_path]
        last_log_line = execute_command(last_line_tail_cmd)

        if returncode:
            raise RestoreError(returncode, last_log_line)
Exemple #42
0
    def _create_dex_task(self):
        # Creates Dalvik executable
        cmd = [self.dx_bin, '--dex',
               '--output=%s/bin/classes.dex' % (self.project_path),
               '%s/obj'%(self.project_path),
               '%s/libs' % (self.project_path)]

        if execute_command(cmd, cwd=self.project_path, os_env=self.os_environ):
            log.info('Generated DEX executable')
        self._create_dex_POST()
Exemple #43
0
 def _create_R_java_task(self):
     # Generate R.java
     cmd = [self.aapt_bin,
             'package', '-f', '-m',
             '-S', '%s/res'%(self.project_path),
             '-M', '%s'%(self.app_manifest),
             '-I', '%s'%(self.android_jar),
             '-J', '%s/src'%(self.project_path)]
     if execute_command(cmd, cwd=self.project_path, os_env=self.os_environ):
         log.info('Generated R.java')
     self._create_R_java_POST()
Exemple #44
0
    def _compile_java_code_task(self):
        # Compile java sources
        cmd = [self.javac_bin,
                '-d', '%s/obj'%(self.project_path),
                '-classpath', '%s'%(self.android_jar),
                '-sourcepath', '%s/src'%(self.project_path)]
        cmd.extend([_s for _s in self._get_sources()])

        if execute_command(cmd, cwd=self.project_path, os_env=self.os_environ):
            log.info('Compiled sources')
        self._compile_java_code_POST()
Exemple #45
0
 def _zip_align_apk_task(self):
     # Zip align the apk
     cmd = [ self.zipalign_bin,
             '-f', '4',
             '%s/bin/%s.%s.apk'%(self.project_path,
                                 self.project_name,
                                 self.signed_prefix),
             '%s/bin/%s.%s.apk'%(self.project_path,
                                 self.project_name,
                                 self.zipped_prefix)]
     if execute_command(cmd, cwd=self.project_path, os_env=self.os_environ):
         log.info('Zip aligned application "%s.%s.apk"'%(self.project_name,
                                                         self.zipped_prefix))
     self._zip_align_apk_POST()
Exemple #46
0
    def _create_unsigned_apk_task(self):
        # Create unsigned APK
        cmd = [ self.aapt_bin,
                'package', '-f',
                '-M', '%s'%(self.app_manifest),
                '-I', '%s'%(self.android_jar),
                '-F', '%s/bin/%s.%s.apk'%(self.project_path,
                                          self.project_name,
                                          self.unsigned_prefix),
                '-S', '%s/res' % (self.project_path),
                '%s/bin'%(self.project_path)]

        if execute_command(cmd, cwd=self.project_path, os_env=self.os_environ):
            log.info('Created %s.%s.apk' % (self.project_name,
                                            self.unsigned_prefix))
        self._create_unsigned_apk_POST()
Exemple #47
0
    def _sign_apk_task(self):
        # Sign the apk
        cmd = [ self.jarsigner_bin,
                '-keystore', self.key_store,
                '-storepass', self.store_pass,
                '-keypass', self.key_pass,
                '-signedjar', '%s/bin/%s.%s.apk'%(self.project_path,
                                                  self.project_name,
                                                  self.signed_prefix),
                '%s/bin/%s.%s.apk'%(self.project_path,
                                    self.project_name,
                                    self.unsigned_prefix),
                self.key_alias ]

        if execute_command(cmd, cwd=self.project_path, os_env=self.os_environ):
            log.info('Signed application "%s.%s.apk"' % (self.project_name,
                                                         self.signed_prefix))
        self._sign_apk_POST()
Exemple #48
0
 def create_project_task(self):
     # Create/update basic project
     cmd = [ self.android_bin,
             'update' if os.path.exists(self.project_path) else 'create', 'project',
             '--path', self.project_path ]
     if not os.path.exists(self.project_path):
         cmd.extend([
             '--name', self.project_name,
             '--activity',  self.activity_name,
             '--package', self.package_name,
             '--target', self.target
         ])
     if execute_command(cmd, cwd=self.project_path):
         if cmd[1] == 'update':
             log.info('Updating project "%s" ALREADY created in "%s"' % (self.project_name,
                                                                   self.project_path))
         else:
             log.info('Project "%s" is created in "%s"' % (self.project_name,
                                                           self.project_path))
     self.create_project_POST()
 def execute(self):
     """ Do the actual releasing.
     """
     logger.info('Location: ' + utils.execute_command('pwd'))
     if utils.has_extension(self.vcs, 'gitflow'):
         if self.vcs.gitflow_check_prefix("release"):
             self._gitflow_release_finish()
             current = self.vcs.current_branch()
             logger.info(
                 ('Switching from ' + current +
                  ' to master branch for egg generation.'))
             self.vcs.gitflow_check_branch("master", switch=True)
             self._release()
             logger.info('Switching to back to ' + current + ' branch.')
             self.vcs.gitflow_switch_to_branch(current)
         else:
             logger.critical(
                 "You are not on a release branch, first run a prerelease "
                 "or gitflow release.")
             sys.exit(1)
     else:
         self._make_tag()
         self._release()
Exemple #50
0
 def _launch_app_task(self):
     # Launch the application into emulator
     cmd = [ self.adb_bin, 'shell',  'am',  'start',  '-n', self.app_activity ]
     if execute_command(cmd, cwd=self.project_path, os_env=self.os_environ):
         log.info('Launched the application in the emulator')
     self._launch_app_POST()
Exemple #51
0
def rebuild_md5sum(directory):
    """Rebuild md5sum.txt file in the given directory."""
    execute_command('cd %(dir)s ; md5sum `find ! -name "md5sum.txt" ! '
                    '-path "./isolinux/*" -follow -type f` > md5sum.txt ;',
                    {'dir': directory})
Exemple #52
0
 def current_branch(self):
     return utils.execute_command("git rev-parse --abbrev-ref HEAD").strip()
Exemple #53
0
 def gitflow_switch_to_branch(self, branch, silent=True):
     if not silent:
         logger.info(
             'You are not on the "%s" branch, switching now.' % branch)
     utils.execute_command(self.cmd_checkout_from_tag(branch, '.'))