Пример #1
0
def muta_binary():
    with util.chdir(conf.config["muta"]["path"]):
        subprocess.call("cargo build --release --example muta-chain",
                        shell=True)
        with util.chdir("./devtools/keypair"):
            subprocess.call("cargo build --release", shell=True)

        subprocess.call("rm -rf build", shell=True)
        subprocess.call("mkdir -p build", shell=True)
        subprocess.call("cp ./target/release/examples/muta-chain ./build",
                        shell=True)
        subprocess.call("cp ./target/release/muta-keypair ./build", shell=True)
Пример #2
0
def parse_traces(path):
    util.chdir(path)
    files = [f for f in os.listdir('.') if os.path.isfile(f)]
    for filename in files:
        if filename.startswith('Trace_'):
            file = open(filename, 'r')
            lines = file.read().splitlines()
            und_index = filename.index('_');
            task_id = filename[und_index+1:]
            traces[int(task_id)] = lines

        if filename.startswith('br_'):
            file = open(filename, 'r')
            lines = file.read().splitlines()
            branch_checks.extend(lines)
Пример #3
0
    def install(self):
        """Places build files into the install directory.

        Whatever is in the install directory at the completion of this
        command is packaged by the builder for release.

        """
        with chdir(self.config['build_dir']), umask(0o22):
            self.cmd('make', 'DESTDIR={install_dir_abs}', 'install')


        # And remove any .la files
        for root, _, files in os.walk('{install_dir}'.format(**self.config)):
            for f in files:
                if f.endswith('.la'):
                    os.unlink(os.path.join(root, f))

        # Remove the headers from any man page
        with umask(0o22):
            man_dir = self.j('{prefix_dir}', 'share', 'man')
            for root, _, files in os.walk(man_dir):
                for f in files:
                    man_remove_header(os.path.join(root, f))

        self.strip_libiberty()
        self.strip_silly_info()
        self.strip_info_dir()
Пример #4
0
def generate(name, parent, **kw):
    """
    create a docket Dockerfile
    """
    path = join('library', name)
    if exists(path):
       error("dir/file at path {0} exists!".format(path))

    override = None
    try:
        confpath = abspath("supervisord.conf")
        open(confpath)
    except IOError:
        confpath = abspath(join(dirname(__file__), "supervisord.conf"))

    os.makedirs(path)
    with util.chdir(path):
        shutil.copyfile(confpath, "supervisord.conf")

        with open("Dockerfile", 'w') as f:
            # this one (in practice) should be ignored
            print >> f, "FROM {0}".format(parent)
            # install supervisor and/or pip according to platform
            for dep in platforms.dependencies(parent):
                print >> f, "RUN {0}".format(dep)
            print >> f, "RUN mkdir -p /etc/supervisor"
            print >> f, "ADD supervisord.conf /etc/supervisor/supervisord.conf"
            print >> f, "ENV ETCD http://172.17.42.1:4001"

            if kw.get('inject'):
                print >> f, "# injected service pooling script"
                print >> f, "RUN which espb || pip install {0}".format(ESPB_SCRIPT)
                print >> f, 'CMD ["/usr/local/bin/espb", "register", "{0}"]'.format(name)

    print join(path, "Dockerfile")
Пример #5
0
def create_check_formula():
    create_f_inits()
    create_mo_consts()
    create_rw_consts()
    create_sync_consts()
    create_branch_consts()
    create_spsync_consts()
    
    print const_s

    sat_check = ""
    sat_check = str(const_s.check())
    print(const_s.check())
    util.chdir(ref_cwd)
    f = open('sat_check.out', 'w')
    f.write(sat_check + "\n")
Пример #6
0
def main():
    user = os.environ['USER']
    subprocess.check_call(
        ['sudo', 'yum', 'install', '-y', 'mz_bench', 'mz_bench_dev'])
    with util.chdir(os.path.join(dirname, '..')):

        if '--local' in sys.argv:
            run_command = ['mz-bench-dev', 'run-local']
        else:
            run_command =\
                ['mz-bench'
                , 'run'
                , '--user_repo=' + user 
                , '--exclusive_node_usage=false'
                ]

        with server.background_server():
            subprocess.check_call(
                run_command +
                [ 'examples/http_post.erl'
                , '--env=host=' + hostname
                , '--env=port=' + str(server.port)
                , '--env=max_rps=2'
                , '--env=endpoint=/update-db'
                ])
            subprocess.check_call(
                run_command +
                [ 'examples/http_get.erl'
                , '--env=host=' + hostname
                , '--env=port=' + str(server.port)
                , '--env=max_rps=2'
                , '--env=endpoint=/index.html'
                ])
def call( cmdline, targetDir=".", suppressErrors=False, grabStdOut=False, verbose=True ):
    print_command_line( cmdline )

    useShell = os.name != "posix"
    cmds = build_command_list(cmdline, useShell)

    with util.chdir(targetDir):
        if verbose:
            print( ' '.join(map(bytes.decode,cmds)))
        if grabStdOut:
            p = Popen( cmdline, stdout=PIPE, stderr=PIPE, shell=useShell )
        else:
            p = Popen( cmdline, stderr=PIPE, shell=useShell )
        (out, err) = p.communicate()
        if grabStdOut and verbose:
            print(out)
        if not p.returncode == 0:
            # if this is not a source build, there will be no 'assemble' task for the root gradle to complete, so it
            # will throw an exception.  Rather than have the script determine whether a source build is necessary before
            # executing the command, we choose to run it anyway and catch the exception,
            gradleTask = "clean" if command_options.should_clean else "assembleDebug" if command_options.is_debug_build else "assembleRelease"
            err_decoded = err.decode("utf-8")
            if ("Task '%s' not found in root project 'OculusRoot'" % gradleTask) in err_decoded:
              raise NoSourceException( targetDir )
            error_string = "command (%s) failed with returncode: %d" % (cmdline, p.returncode)
            if verbose:
                print(err_decoded)
            if suppressErrors:
                print(error_string)
            else:
                raise BuildFailedException(error_string)

    return ( p.returncode, out, err )
Пример #8
0
    def _build(self, reconfigure, force, force_revursive, variant):
        if self.group_only:
            self.ensure_dir('{install_dir}')
            noprefix_dir = self.j('{install_dir}', 'noprefix')
            if self.exists(noprefix_dir):
                if os.path.islink(noprefix_dir):
                    os.unlink(noprefix_dir)
                else:
                    self.rmtree(noprefix_dir)
            os.symlink(self.j('..', '..', '{devtree_dir}'), noprefix_dir)
            self._package()
            return

        # Download
        self._download()
        # Configure
        self._configure(reconfigure)
        # Make
        with chdir(self.config['build_dir']):
            self.make()
        # Install
        self.rmtree('{install_dir}')
        self.ensure_dir('{install_dir}')
        self.install()
        # Package
        self._package()
def run_gradle_task(opts, task, args = None):
    """
    Forks a sub-process to execute a gradle build task

    :param opts: Parsed command line options
    :param task: Gradle task name
    :param args: Array of additional arguments to supply to gradle build
    """
    flags = [task]
    flags.append('--daemon' if opts.use_gradle_daemon else '--no-daemon')
    # lifecycle logging is enabled when a log level is not specified.
    if opts.loglevel != "lifecycle": flags.append('-%s' % opts.loglevel)
    if opts.profile: flags.append('--profile')
    if opts.scan: flags.append('--scan')
    if opts.disable_sig_check: flags.append('-Pdisable_sig_check')
    if opts.clear_logcat: flags.append('-Pclear_logcat')
    if opts.build_cache: flags.append('--build-cache')
    if opts.configure_on_demand: flags.append('--configure-on-demand')
    if opts.parallel : flags.append('--parallel')

    gradle_file_path = find_gradle_root_project()
    with util.chdir(os.path.dirname(gradle_file_path)):
        beginTime = time.time()
        command = [ gradle_command() ] + flags + (args or [])
        call(command)
        endTime = time.time()
        deltaTime = endTime - beginTime
        print("Gradle took %f seconds" % deltaTime)
Пример #10
0
def muta_docker():
    with util.chdir(conf.config["muta"]["path"]):
        commit_id = subprocess.getoutput('git log -1 --pretty="%h"')
        image_tag = f"{conf.config['muta']['docker_username']}/muta:{commit_id}"
        subprocess.call(f"docker build -t {image_tag} .", shell=True)
        subprocess.call(f"docker push {image_tag}", shell=True)
    return {"image_tag": image_tag}
Пример #11
0
def hard_link_in_cwd(filename):
	with util.directory_created("hardlink_scratch") as directory:
		with util.chdir(directory):
			util.touch("a")
			os.link("a", "b")
			with util.archive_created(os.path.join("..", "hardlink_archive.xar"), ".") as path:
				with util.directory_created(os.path.join("..", "hardlink_extracted")) as extracted:
					subprocess.check_call(["xar", "-x", "-C", extracted, "-f", path])
					_assert_same_inodes(os.path.join(extracted, "a"), os.path.join(extracted, "b"))
Пример #12
0
def _run_converter_and_yarn_session(input_dir, output_dir, n_nodes, jar_path):
    # setup properties file
    run_dir = tempfile.mkdtemp(prefix="bclconverter_run_dir")
    try:
        ## start by preparing the properties file (at the moment the program
        # doesn't accept command line arguments
        tmp_conf_dir = os.path.join(run_dir, "conf")
        os.makedirs(tmp_conf_dir)
        props_file = os.path.join(tmp_conf_dir, GlobalConf['props_filename'])
        with open(props_file, 'w') as f:
            f.write("root = {}/\n".format(input_dir.rstrip('/')))
            f.write("fout = {}/\n".format(output_dir.rstrip('/')))
            f.write("numTasks = {:d}\n".format(GlobalConf['tasksPerNode'] *
                                               n_nodes))
            f.write("flinkpar = {:d}\n".format(GlobalConf['flinkpar']))
            f.write("jnum = {:d}\n".format(GlobalConf['jnum']))

        logger.info("Wrote properties in file %s", props_file)
        if logger.isEnabledFor(logging.DEBUG):
            with open(props_file) as f:
                logger.debug(
                    "\n=============================\n%s\n=====================\n",
                    f.read())
        # now run the program
        logger.debug("Running flink cwd %s", run_dir)
        cmd = [
            get_exec("flink"),
            "run",
            "-m",
            "yarn-cluster",
            '-yn',
            n_nodes,
            '-yjm',
            GlobalConf['job_manager_mem'],  # job manager memory
            '-ytm',
            GlobalConf['task_manager_mem'],  # task manager memory
            '-ys',
            GlobalConf['slots'],
            "-c",
            "bclconverter.bclreader.test",  # class name
            jar_path
        ]
        logger.debug("executing command: %s", cmd)
        with chdir(run_dir):
            logger.debug("In CWD, where we're going to run flink")
            logger.debug("cat conf/bclconverter.properties gives:")
            subprocess.check_call("cat conf/bclconverter.properties",
                                  shell=True)
            logger.debug("Now running flink")
            subprocess.check_call(map(str, cmd), cwd=run_dir)
    finally:
        logger.debug("Removing run directory %s", run_dir)
        try:
            shutil.rmtree(run_dir)
        except IOError as e:
            logger.debug("Error cleaning up temporary dir %s", run_dir)
            logger.debug(e.message)
Пример #13
0
def tar_gz(output, tree):
    """Create a tar.gz file named `output` from a specified directory tree.

    When creating the tar.gz a standard set of meta-data will be used to
    help ensure things are consistent.

    """
    with tarfile.open(output, 'w:gz', format=tarfile.GNU_FORMAT) as tf:
        with chdir(tree):
            for f in os.listdir('.'):
                tf.add(f, filter=tar_info_filter)
def build_in_dir( targetDir, args = []):
    with util.chdir(targetDir):
        print('\n\nbuilding in ' + targetDir)
        if os.path.exists( 'build.gradle' ):
            if command_options.should_clean:
                run_gradle_task(command_options, 'clean')
            elif command_options.is_debug_build:
                run_gradle_task(command_options, 'assembleDebug', args)
            else:
                run_gradle_task(command_options, 'assembleRelease', args)
        print('\n\nfinished building in ' + targetDir)
Пример #15
0
def deploy_binary():
    pool = make_pool()
    with util.chdir(conf.config["muta"]["path"]):
        subprocess.call("python3 -m zipfile -c build.zip build", shell=True)
        pool.run("killall muta-chain | true")
        pool.run("rm -rf build")
        for e in pool:
            print(f"Upload build.zip {e.host}")
            e.put("build.zip", "build.zip")
    pool.run(
        "python3 -m zipfile -e build.zip . && cd build && chmod +x muta-chain")
Пример #16
0
def download_hls_stream(master_playlist_uri, id='.', num_workers=10, refreash_interval=0, num_refreshes=1):
    '''
    Download hls stream to local folder indiciated by id
    @param master_playlist_uri
    @param id Defaut CWD
    @param num_workers Number of downloader workers
    @param refresh_interval unit: second, Default 0
    @param num_refreshes Default 1
    '''
    print('master_playlist_uri: {master_playlist_uri}'.format(master_playlist_uri=master_playlist_uri))
    local_root = id

    print('downloading {uri} to {local}'.format(uri=master_playlist_uri, local=local_root))
    old_cwd = util.chdir(local_root)

    downloader.start(num_workers)

    for r in range(0, num_refreshes):
        master_playlist = download_master_playlist(master_playlist_uri)

        if master_playlist.is_variant:
            host_root, subpath, master_playlist_file = util.parse_uri(master_playlist_uri)
            # download resource from stream playlist
            for playlist in master_playlist.playlists:
                if util.is_full_uri(playlist.uri):
                    download_stream(playlist.uri)
                    pass
                else:
                    playlist_uri = host_root + '/' + subpath + '/' + playlist.uri
                    download_stream(playlist_uri, os.path.dirname(playlist.uri))

            # download resource from media
            for m in master_playlist.media:
                if not m.uri:
                    continue
                if util.is_full_uri(m.uri):
                    download_stream(m.uri)
                    pass
                else:
                    media_uri = host_root + '/' + subpath + '/' + m.uri
                    download_stream(media_uri, os.path.dirname(m.uri))
        else:
            download_stream(master_playlist_uri)

        if refreash_interval:
            print('Refreshing the master playlist in {interval} seconds'.format(interval=refreash_interval))

        time.sleep(refreash_interval)

    downloader.join()
    downloader.stop()
    os.chdir(old_cwd)
Пример #17
0
def systemRefresh():
    """
    Up
    """
    log('UPDATING SYSTEM SPICE')

    chdir('/home/ec2-user/spicerackclient')
    cmd('git pull')
    cmd('tar -cvf /home/ec2-user/system.tar system')
    chdir('/home/ec2-user')
    
    currentversion = dget('systemversion', 0)
    currentversion = 1 + int(currentversion)
    put('systemversion', currentversion)

    with open('systemversion.txt', 'w') as f:
        f.write(str(currentversion))

    cmd('tar --append --file=system.tar systemversion.txt')

    log('UPDATED SYSTEM SPICE TO VERSION: %s' % currentversion)
    return 'success'
Пример #18
0
def hard_link_in_cwd(filename):
    with util.directory_created("hardlink_scratch") as directory:
        with util.chdir(directory):
            util.touch("a")
            os.link("a", "b")
            with util.archive_created(
                    os.path.join("..", "hardlink_archive.xar"), ".") as path:
                with util.directory_created(
                        os.path.join("..", "hardlink_extracted")) as extracted:
                    subprocess.check_call(
                        ["xar", "-x", "-C", extracted, "-f", path])
                    _assert_same_inodes(os.path.join(extracted, "a"),
                                        os.path.join(extracted, "b"))
Пример #19
0
def coalesce_heap(filename):
	with util.directory_created("scratch") as directory:
		shutil.copy("/bin/ls", os.path.join(directory, "ls"))
		shutil.copy(os.path.join(directory, "ls"), os.path.join(directory, "foo"))
		with util.chdir(directory):
			with util.archive_created(os.path.join("..", "heap.xar"), ".", "--coalesce-heap") as path:
				# Verify file offsets are as we expect
				offsets = _file_offsets_for_archive(path, os.path.join("..", "heap1.xsl"))
				(f1, o1) = offsets[0]
				(f2, o2) = offsets[1]
				
				# Make sure extraction goes all right
				with util.directory_created("extracted") as extracted:
					subprocess.check_call(["xar", "-x", "-f", path, "-C", extracted])
Пример #20
0
    def _configure(self, reconfigure):
        configured_flag = self.j('{build_dir}', '.configured')
        if self.exists(configured_flag):
            if reconfigure:
                logger.info("{pkg_name} already configured. Reconfiguring.".format(**self.config))
                os.unlink(configured_flag)
            else:
                logger.info("{pkg_name} already configured. Continuing".format(**self.config))
                return
        self.ensure_dir('{build_dir}')
        with chdir(self.config['build_dir']):
            self.configure()

        touch(configured_flag)
Пример #21
0
    def _run_attempt(self, rep_num, retry_num):
        if rep_num > 1 and retry_num > 1:
            #  not the first time we run.  Sleep to give time to the yarn cluster to "recuperate"
            logger.info("Sleeping %d seconds between runs", GlobalConf['sleep_between_runs'])
            time.sleep(GlobalConf['sleep_between_runs'])

        run_dir = os.path.join(self._results_dir, "rep_{:d}_attempt_{:d}".format(rep_num, retry_num))
        logger.debug("Making run directory %s for repetition %s / attempt %s", run_dir, rep_num, retry_num)
        os.makedirs(run_dir)

        with chdir(run_dir):
            logger.info("Starting attempt")
            attempt_info = self._workflow.execute()
        attempt_info.repeat_num = rep_num
        attempt_info.attempt_num = retry_num
        return attempt_info
Пример #22
0
def normal_heap(filename):
	with util.directory_created("scratch") as directory:
		shutil.copy("/bin/ls", os.path.join(directory, "ls"))
		shutil.copy(os.path.join(directory, "ls"), os.path.join(directory, "foo"))
		with util.chdir(directory):
			with util.archive_created(os.path.join("..", "heap.xar"), ".") as path:
				# Verify file offsets are as we expect
				offsets = _file_offsets_for_archive(path, os.path.join("..", "heap1.xsl"))
				(f1, o1) = offsets[0]
				(f2, o2) = offsets[1]
				
				assert o1 < o2, "offset for first file \"{f1}\" ({o1}) greater than or equal to offset for last file \"{f2}\" ({o2})".format(f1=f1, o1=o1, f2=f2, o2=o2)
				
				# Make sure extraction goes all right
				with util.directory_created("extracted") as extracted:
					subprocess.check_call(["xar", "-x", "-f", path, "-C", extracted])
Пример #23
0
def main():

    with util.chdir(os.path.join(dirname, '../')):
        serverStatus = checkServer()

        if '--local' in sys.argv:
            if serverStatus == 1:
                print("Please stop MZBench server before running local mode")
                sys.exit()
            run_command = ['../../bin/mzbench', 'run_local']
        else:
            if serverStatus == -1:
                print("Non-MZBench server is listening on 4800")
                sys.exit()
            if serverStatus == 0:
                if 'MZBENCH_REPO' in  os.environ:
                    mzbench_git_param = '{{mzbench_git, "{0}"}}'.format(os.environ['MZBENCH_REPO'])
                else:
                    mzbench_git_param = ''

                with open(dirname + '/mzbench_server.config', 'w') as config:
                    config.write('[{{mzbench_api, [{0}]}}].'.format(mzbench_git_param))

                subprocess.check_call(['../../bin/mzbench', 'start_server', '--config', dirname + '/mzbench_server.config'])

            run_command = ['../../bin/mzbench', 'run']

        with server.background_server():
            subprocess.check_call(
                run_command +
                [ 'examples/http_post.erl'
                , '--env=host=' + hostname
                , '--env=port=' + str(server.port)
                , '--env=max_rps=2'
                , '--env=endpoint=/update-db'
                ])
            subprocess.check_call(
                run_command +
                [ 'examples/http_get.erl'
                , '--env=host=' + hostname
                , '--env=port=' + str(server.port)
                , '--env=max_rps=2'
                , '--env=endpoint=/index.html'
                ])
        if ('--local' not in sys.argv) and (serverStatus == 0):
            subprocess.check_call(['../../bin/mzbench', 'stop_server'])
Пример #24
0
def main():

    with util.chdir(os.path.join(dirname, '../')):
        serverStatus = checkServer()

        if '--local' in sys.argv:
            if serverStatus == 1:
                print("Please stop MZBench server before running local mode")
                sys.exit()
            run_command = ['../../bin/mzbench', 'run_local']
        else:
            if serverStatus == -1:
                print("Non-MZBench server is listening on 4800")
                sys.exit()
            if serverStatus == 0:
                if 'MZBENCH_REPO' in os.environ:
                    mzbench_git_param = '{{mzbench_git, "{0}"}}'.format(
                        os.environ['MZBENCH_REPO'])
                else:
                    mzbench_git_param = ''

                with open(dirname + '/mzbench_server.config', 'w') as config:
                    config.write(
                        '[{{mzbench_api, [{0}]}}].'.format(mzbench_git_param))

                subprocess.check_call([
                    '../../bin/mzbench', 'start_server', '--config',
                    dirname + '/mzbench_server.config'
                ])

            run_command = ['../../bin/mzbench', 'run']

        with server.background_server():
            subprocess.check_call(run_command + [
                'examples/http_post.erl', '--env=host=' +
                hostname, '--env=port=' + str(server.port), '--env=max_rps=2',
                '--env=endpoint=/update-db'
            ])
            subprocess.check_call(run_command + [
                'examples/http_get.erl', '--env=host=' +
                hostname, '--env=port=' + str(server.port), '--env=max_rps=2',
                '--env=endpoint=/index.html'
            ])
        if ('--local' not in sys.argv) and (serverStatus == 0):
            subprocess.check_call(['../../bin/mzbench', 'stop_server'])
Пример #25
0
def _run_converter_and_yarn_session(input_dir, output_dir, n_nodes, jar_path):
    # setup properties file
    run_dir = tempfile.mkdtemp(prefix="bclconverter_run_dir")
    try:
        ## start by preparing the properties file (at the moment the program
        # doesn't accept command line arguments
        tmp_conf_dir = os.path.join(run_dir, "conf")
        os.makedirs(tmp_conf_dir)
        props_file = os.path.join(tmp_conf_dir, GlobalConf['props_filename'])
        with open(props_file, 'w') as f:
            f.write("root = {}/\n".format(input_dir.rstrip('/')))
            f.write("fout = {}/\n".format(output_dir.rstrip('/')))
            f.write("numTasks = {:d}\n".format(GlobalConf['tasksPerNode'] * n_nodes))
            f.write("flinkpar = {:d}\n".format(GlobalConf['flinkpar']))
            f.write("jnum = {:d}\n".format(GlobalConf['jnum']))

        logger.info("Wrote properties in file %s", props_file)
        if logger.isEnabledFor(logging.DEBUG):
            with open(props_file) as f:
                logger.debug("\n=============================\n%s\n=====================\n", f.read())
        # now run the program
        logger.debug("Running flink cwd %s", run_dir)
        cmd = [ get_exec("flink"), "run",
                "-m", "yarn-cluster",
                 '-yn',  n_nodes,
                 '-yjm', GlobalConf['job_manager_mem'], # job manager memory
                 '-ytm', GlobalConf['task_manager_mem'], # task manager memory
                 '-ys',  GlobalConf['slots'],
                "-c", "bclconverter.bclreader.test", # class name
                jar_path ]
        logger.debug("executing command: %s", cmd)
        with chdir(run_dir):
            logger.debug("In CWD, where we're going to run flink")
            logger.debug("cat conf/bclconverter.properties gives:")
            subprocess.check_call("cat conf/bclconverter.properties", shell=True)
            logger.debug("Now running flink")
            subprocess.check_call(map(str, cmd), cwd=run_dir)
    finally:
        logger.debug("Removing run directory %s", run_dir)
        try:
            shutil.rmtree(run_dir)
        except IOError as e:
            logger.debug("Error cleaning up temporary dir %s", run_dir)
            logger.debug(e.message)
Пример #26
0
    def _run_attempt(self, rep_num, retry_num):
        if rep_num > 1 and retry_num > 1:
            #  not the first time we run.  Sleep to give time to the yarn cluster to "recuperate"
            logger.info("Sleeping %d seconds between runs",
                        GlobalConf['sleep_between_runs'])
            time.sleep(GlobalConf['sleep_between_runs'])

        run_dir = os.path.join(
            self._results_dir,
            "rep_{:d}_attempt_{:d}".format(rep_num, retry_num))
        logger.debug("Making run directory %s for repetition %s / attempt %s",
                     run_dir, rep_num, retry_num)
        os.makedirs(run_dir)

        with chdir(run_dir):
            logger.info("Starting attempt")
            attempt_info = self._workflow.execute()
        attempt_info.repeat_num = rep_num
        attempt_info.attempt_num = retry_num
        return attempt_info
Пример #27
0
def download_song(url):
    if TESTING:
        return True

    # remove previous song if it exists
    if os.path.exists(get_song_path()):
        os.remove(get_song_path())

    # download and move to correct play path (for some versions only)
    with util.chdir(util.BASE_PATH):
        logger.info("Downloading {}".format(url))
        ydl_opts = {
            'format': 'bestaudio/best',
            'outtmpl': 'song.mkv',
            'noplaylist': True
        }
        ydl = youtube_dl.YoutubeDL(ydl_opts)
        ydl.download([url])
    song_path = get_song_path()
    logger.info("Downloaded %s to %s" % (url, song_path))
    return os.path.exists(get_song_path())
Пример #28
0
def mkrepo():
    with chdir(Config.repo):
        # collects the Package metadata
        # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=299035
        awkcheck = 'BEGIN{ok=1} { if ($0 ~ /^E: /) { ok = 0 }; print } END{exit !ok}'
        run(f'apt-ftparchive packages . | awk {shlex.quote(awkcheck)} > Packages'
            )

        run('rm -rf by-hash')
        run('bzip2 -kf Packages')
        run('apt-ftparchive -o APT::FTPArchive::AlwaysStat="true" -o APT::FTPArchive::Release::Codename=./ -o APT::FTPArchive::Release::Acquire-By-Hash="yes" release . > Release'
            )
        run(f'gpg --yes -abs --local-user {Config.maintainer.gpgkey} -o Release.gpg --digest-algo sha256 Release'
            )
        run(f'gpg --yes -abs --local-user {Config.maintainer.gpgkey} --clearsign -o InRelease --digest-algo sha256 Release'
            )

        for hsh in ['MD5Sum', 'SHA1', 'SHA256', 'SHA512']:
            run(f'mkdir -p by-hash/{hsh}')
            for pkg in ['Packages', 'Packages.bz2']:
                run(f'cp {pkg} by-hash/{hsh}/`{hsh.lower().replace("sum", "")}sum {pkg} '
                    + " | awk '{print $1}'`")
Пример #29
0
    def parse(self):
        with util.chdir(self.path):
            with open("Dockerfile") as f:
                self.lines.append("\n\n########## {0}".format(self.name))
                for line in map(string.strip, f.readlines()):
                    try:
                        tokens = line.strip().split()
                        token = tokens.pop(0).lower()
                        text = " ".join(tokens)
                    except (IndexError, AttributeError):
                        continue
                    if token == 'from':
                        self.parent = text
                        #line = "##" + line
                        continue
                    if token == 'cmd':
                        if 'supervisord' in text:
                            print >> sys.stderr, "!! dropping supervisord invocation from {0}".format(self.path)
                        else:
                            self.cmd = text
                        line = "#% " + line
                    if token == 'entrypoint':
                        self.entrypoint = text
                        line = "#% " + line
                    if token == 'add':
                        # rewrite line with qualified path
                        src, dst = text.split()
                        rewritten = join(self.path, src)
                        self.paths.append(rewritten)
                        line = "ADD {0} {1}".format(rewritten, dst)
                    if token == 'env':
                        self.env.append(text)
                    if token == 'volume':
                        self.volumes.append(text)
                    if token == 'expose':
                        self.ports.extend(text.split())

                    self.lines.append(line)
Пример #30
0
    files = [f for f in os.listdir('.') if os.path.isfile(f)]
    op_file = open('all_graphs.html', 'w')
    for filename in files:
        if filename.endswith(".html") and filename != "all_graphs.html":
            ip_file = open(filename, 'r')
            while True:
                data = ip_file.read(100000)
                if data == '':
                    break
                op_file.write(data)
            ip_file.close()
    op_file.close()
             
    
#read each file in paths folder
util.chdir(PATHS_FOLDER)
all_paths = []
all_xdata = []
all_hover_text = []
#all_colors = []
max_y_axis = 0
min_x_axis = 0
max_x_axis = 0
node_ids = []
files = [f for f in os.listdir('.') if os.path.isfile(f)]
for filename in files:
    if filename.endswith(".log"):
        und_index = filename.index('_');
        dot_index = filename.index('.');
        src_node_id = filename[und_index+1:dot_index]
        #paths, hover_text, colors, file_max_y_axis = generate_graph_data(filename)
Пример #31
0
def spiceMerge(spices, username):
    """
    Manages the merger of a spice into the development repository.

    @param spices: list of (spicename, dirname)
    """
    log('[spicemanager] In func:spiceMerge')
    chdir('/home/ec2-user/spicerackdev')
    cmd('git pull')
    chdir('/home/ec2-user')

    payload = {}
    for dirname in spices:
        with open('/home/ec2-user/spicerackdev/%s/metadata.json' % dirname) as f:
            data = json.load(f)
        spicename = data['name'] if 'name' in data else data['spicename']

        mid = get('spice-id:%s' % spicename.replace(' ', '-'))
        log('[spicemanager] Processing %s %s' % (spicename, mid))
        log('[spicemanager] Spice %s has mid %s' % (spicename.replace(' ', '-'), mid))
        if mid is None:
            mid = str(uuid.uuid4())
            put('spice-id:%s' % spicename.replace(' ', '-'), mid)

        version = 1 + int(dget('version:%s' % mid, 0))
        put('version:%s' % mid, version)
        log('[spicemanager] Version: %s' % version)

        payload[mid] = {
            'spicename' : data['name'] if 'name' in data else data['spicename'],
            'gituser' : username,
            'author' : data['author'],
            'tagline' : data['description'],
            'description' : data['description'],
            'version' : version,
            'hotkey' : False,
            'main' : data['main']
            }
        
        if 'height' in data:
            payload[mid]['height'] = data['height']
        if 'width' in data:
            payload[mid]['width'] = data['width']

        put('metadata:%s' % mid, payload[mid])
        
        with open('/home/ec2-user/spicerackdev/%s/metadata.json' % dirname, 'w') as f:
            json.dump(payload[mid], f)

        if 'height' in payload[mid]:
            del payload[mid]['height']
        if 'width' in payload[mid]:
            del payload[mid]['width']

        # Tar the spice directory into /spices/{mid}.tar.gz
        cmd('tar -cvf /home/ec2-user/spices/%s.tar.gz /home/ec2-user/spicerackdev/%s' % (mid, dirname))
        
        # Copy the icon file into /icons
        iconpath = '/home/ec2-user/spicerackdev/%s/icon.png' % dirname
        cmd('cp -f %s /home/ec2-user/icons/%s.png' % (iconpath, mid))
        
        iconpath = '/home/ec2-user/%s/icon.png' % dirname
        if not os.path.exists(iconpath):
            cmd('tar -cvf /home/ec2-user/spices/%s.tar.gz /home/ec2-user/spicerackdev/%s' % (mid, dirname))
        else:
            iconpath = iconpath.strip('icon.png')
            cmd('tar -cvf /home/ec2-user/spices/%s.tar.gz /home/ec2-user/spicerackdev/%s --directory=%s icon.png' % (mid, dirname, iconpath))        

        chdir('/home/ec2-user/spicerackdev')
        cmd('git stash')
        chdir('/home/ec2-user')

        resp, content = send_post(payload, 'spices/refresh')
        log('[spicemanager] Heroku response: %s' % str(resp))

    return 'success'
Пример #32
0
#!/usr/bin/env python

"""
Start

"""

import os, json, socket, sys, util

path = os.path.split(os.path.realpath(__file__))[0]; os.chdir(path); sys.path.insert(0, path)

os.environ['DEVEL']='yes'
os.environ['PGHOST']=os.path.join(path, 'postgres_data/socket')

util.chdir()

ports    = util.get_ports()
base_url = util.base_url(ports['hub-share-2'])

print('''\n\nBASE URL: {}\n\n'''.format(base_url))

share_path= os.path.join(os.environ['SMC_ROOT'], 'data/projects/[project_id]')

cmd = "cd ../../ && . smc-env &&  service_hub.py --share_path={share_path} --foreground --hostname=0.0.0.0 --port=0 --share_port={share_port} --proxy_port=0 --gap=0 --base_url={base_url} start".format(
    base_url   = base_url,
    share_port = ports['hub-share-2'],
    share_path = share_path)

util.cmd(cmd)

Пример #33
0
#!/usr/bin/env python

import os, sys

path = os.path.split(os.path.realpath(__file__))[0]; os.chdir(path); sys.path.insert(0, path)

import util

util.chdir()
util.cmd("cd ../../; npm run webpack-watch-map")
Пример #34
0
def muta_config():
    c_poolsize = get_int("poolsize", 200000)
    c_timeout_gap = get_int("timeout_gap", 999999)
    c_cycles_limit = get_int("cycles_limit", 630000000)
    c_tx_num_limit = get_int("tx_num_limit", 30000)

    with open("./res/muta_config_request.toml") as f:
        face_config_list = toml.load(f)["node"]

    with util.chdir(conf.config["muta"]["path"]):
        r = subprocess.getoutput(
            f"./build/muta-keypair -n {len(face_config_list)}")
        with open("./build/keypairs.json", "w") as f:
            f.write(r)
        keypairs = json.loads(r)

        assert "common_ref" in keypairs
        for e in keypairs["keypairs"]:
            assert "private_key" in e
            assert "public_key" in e
            assert "address" in e
            assert "bls_public_key" in e

        genesis = toml.load(conf.config["muta"]["genesis_template"])
        assert genesis["services"][1]["name"] == "metadata"
        payload = json.loads(genesis["services"][1]["payload"])
        payload["common_ref"] = keypairs["common_ref"]
        payload["timeout_gap"] = c_timeout_gap
        payload["cycles_limit"] = c_cycles_limit
        payload["tx_num_limit"] = c_tx_num_limit
        payload["verifier_list"] = []

        for i, e in enumerate(keypairs["keypairs"]):
            if i >= (len(face_config_list) -
                     conf.config["muta"]["sync_node_number"]):
                break
            a = {
                "bls_pub_key": e["bls_public_key"],
                "address": e["address"],
                "propose_weight": 1,
                "vote_weight": 1,
            }
            payload["verifier_list"].append(a)
        genesis["services"][1]["payload"] = json.dumps(payload)
        with open("./build/genesis.toml", "w") as f:
            toml.dump(genesis, f)

        node_config_raw = toml.load(conf.config["muta"]["config_template"])
        for i in range(len(face_config_list)):
            node_config = copy.deepcopy(node_config_raw)
            host_config = face_config_list[i]
            keypair = keypairs["keypairs"][i]
            node_config["privkey"] = keypair["private_key"]
            node_config["data_path"] = host_config["data"]
            node_config["graphql"]["listening_address"] = "0.0.0.0:" + str(
                host_config["api_port"])
            node_config["network"]["listening_address"] = "0.0.0.0:" + str(
                host_config["p2p_port"])
            node_config["logger"]["log_path"] = os.path.join(
                host_config["data"], "logs")
            node_config["mempool"]["pool_size"] = c_poolsize
            node_config["network"]["bootstraps"] = [{
                "pubkey":
                keypairs["keypairs"][0]["public_key"],
                "address":
                face_config_list[0]["host"] + ":" +
                str(face_config_list[0]["p2p_port"]),
            }]

            with open(f"./build/config_{i+1}.toml", "w") as f:
                toml.dump(node_config, f)
Пример #35
0
#args: path to binary(p),binary name(b),arguments to binary(a)
parser = argparse.ArgumentParser(description='Differential profiling of task parallel programs to identify secondary effects')
parser.add_argument('-p', type=str, default='.',  help='path/to/input/program/executable. Absolute path, not relative path')
parser.add_argument('-b', type=str, help='Executable file name')
#parser.add_argument('-a', type=str, default='', help='argument string of input program')

args = parser.parse_args()

ref_cwd = os.getcwd();

regions = {}

cp = 0

util.chdir(args.p)
exists = os.path.isfile('diff_profile.csv')
if exists:
    util.run_command("rm diff_profile.csv", verbose=False)

#remove step_work.csv sc_ov.csv and step_nodes_serial
util.run_command("rm -rf step_work_*.csv sched_ov_*.csv step_nodes_serial", verbose=False)  
for counter in counter_args:
    #run serial program
    #run_string = "taskset -c 0 ./" + args.b + " " + args.a + " -- " + counter
    run_string = "./" + args.b + " 1 -- " + counter
    util.run_command(run_string, verbose=False)

    #mkdir step_nodes_serial
    util.run_command("mkdir step_nodes_serial", verbose=False)
    util.run_command("mv *.csv step_nodes_serial", verbose=False)
    "-r 1 -o /tmp/ofile564664_442514 -t 16 ../graphData/data/randLocalGraph_J_5_10000000",
    "-r 1 -o /tmp/ofile470293_748866 -t 16 ../graphData/data/randLocalGraph_J_5_10000000",
    "-r 1 -o /tmp/ofile685095_551810 -t 16 ../graphData/data/rMatGraph_E_5_10000000",
    "-r 1 -o /tmp/ofile897171_477798 -t 16 ../graphData/data/rMatGraph_WE_5_10000000",
    "-r 1 -o /tmp/ofile780084_677212 -t 16 ../graphData/data/randLocalGraph_E_5_10000000",
    "-r 1 -o /tmp/ofile974877_207802 -t 16 ../geometryData/data/3DonSphere_1000000",
]

# BTRACK_CONFIG = "./configure --enable-tbb --disable-threads --disable-openmp --prefix=" + ref_cwd + "/bodytrack CXXFLAGS=\"-O3 -funroll-loops -fprefetch-loop-arrays -fpermissive -fno-exceptions -static-libgcc -Wl,--hash-style=both,--as-needed -DPARSEC_VERSION=3.0-beta-20150206 -fexceptions -I" + TBBROOT + "/include -I" + TP_ROOT + "/include\" LDFLAGS=\"-L" + TBBROOT + "\obj -L" + TP_ROOT + "/obj\" LIBS=\"-ltbb -ltprof\" VPATH=\".\""

f = open('runtimes_prof_ser_FINAL.txt', 'w')

for config in configs:
    util.log_heading(config["NAME"], character="-")
    if do_clean:
        util.chdir(TP_ROOT)
        util.run_command("make clean", verbose=False)
        util.chdir(TBBROOT)
        util.run_command("make clean", verbose=False)
    if do_build:
        util.chdir(TP_ROOT)
        util.run_command("make", verbose=False)
        util.chdir(TBBROOT)
        util.run_command("make", verbose=False)
    #util.chdir(PARSEC_ROOT)

    for benchmark in benchmarks:
        util.chdir(ref_cwd)
        runtimes = []
        for i in range(0, config["NUM_RUNS"]):
            try:
Пример #37
0
total_count = 0
failed_count = 0
failed_tests = []
passed_count = 0
crashed_count = 0
crashed_tests = []

ref_cwd = os.getcwd();
arch = platform.machine()
full_hostname = platform.node()
hostname=full_hostname

for config in configs:
    util.log_heading(config["NAME"], character="-")
    if do_clean:
        util.chdir(TD_ROOT)
        util.run_command("make clean", verbose=False)
        util.chdir(TBBROOT)
        util.run_command("make clean", verbose=False)
    if do_build:
        util.chdir(TD_ROOT)
        util.run_command("make", verbose=False)
        util.chdir(TBBROOT)
        util.run_command("make", verbose=False)
    util.chdir(ref_cwd)
    
    try:
        clean_string = config["CLEAN_LINE"]
        util.run_command(clean_string, verbose=False)
    except:
        print "Clean failed"