Пример #1
0
def setup_container(builddir, container):
    with local.cwd(builddir):
        container_filename = str(container).split(os.path.sep)[-1]
        container_in = os.path.join("container-in", container_filename)
        Copy(container, container_in)
        uchroot = uchroot_no_args()

        with local.cwd("container-in"):
            uchroot = uchroot["-E", "-A", "-u", "0", "-g", "0", "-C", "-r",
                              "/", "-w", os.path.abspath("."), "--"]

        # Check, if we need erlent support for this archive.
        has_erlent = bash[
            "-c", "tar --list -f './{0}' | grep --silent '.erlent'".format(
                container_in)]
        has_erlent = (has_erlent & TF)

        # Unpack input container to: container-in
        if not has_erlent:
            cmd = local["/bin/tar"]["xf"]
            cmd = uchroot[cmd[container_filename]]
        else:
            cmd = tar["xf"]
            cmd = cmd[os.path.abspath(container_in)]

        with local.cwd("container-in"):
            cmd("--exclude=dev/*")
        rm(container_in)
    return os.path.join(builddir, "container-in")
Пример #2
0
 def setup_prepare(self):
     """Unpacks loads local docker images and generates the topology.
     """
     # Delete old artifacts, if any.
     cmd.rm("-rf", self.test_state.artifacts)
     cmd.mkdir(self.test_state.artifacts)
     print("artifacts dir: %s" % self.test_state.artifacts)
     for tar in self.test_state.containers_tars:
         print(cmd.docker("image", "load", "-i", tar))
     for loader in self.test_state.container_loaders:
         parts = loader.split("#")
         if len(parts) != 2:
             logger.error("Invalid container loader argument: %s, ignored" %
                          loader)
             continue
         tag, script = parts[0], parts[1]
         o = subprocess.check_output([script]).decode("utf-8")
         idx = o.index("as ")
         if idx < 0:
             logger.error("extracting tag from loader script %s" % loader)
             continue
         bazel_tag = o[idx + len("as "):].strip()
         logger.info("docker tag %s %s" % (bazel_tag, tag))
         subprocess.run(["docker", "tag", bazel_tag, tag], check=True)
     # Define where coredumps will be stored.
     print(
         cmd.docker("run", "--rm", "--privileged", "alpine", "sysctl", "-w",
                    "kernel.core_pattern=/share/coredump"))
     self._setup_generate()
Пример #3
0
def source_required(src_file, src_root):
    """
    Check, if a download is required.

    Args:
        src_file: The filename to check for.
        src_root: The path we find the file in.

    Returns:
        True, if we need to download something, False otherwise.
    """
    from os import path

    # Check if we need to do something
    src_dir = path.join(src_root, src_file)
    hash_file = path.join(src_root, src_file + ".hash")

    required = True
    if path.exists(src_dir) and path.exists(hash_file):
        new_hash = get_hash_of_dirs(src_dir)
        with open(hash_file, 'r') as h_file:
            old_hash = h_file.readline()
        required = not new_hash == old_hash
        if required:
            from plumbum.cmd import rm
            rm("-r", src_dir)
            rm(hash_file)
    return required
Пример #4
0
    def download(self):
        super(PrepareStage3, self).download()

        with local.cwd(self.builddir + "/usr"):
            Wget(self.src_uri_portage, self.src_file_portage)
            run(tar["xfj", self.src_file_portage])
            rm(self.src_file_portage)
Пример #5
0
    def configure(self):
        from plumbum.cmd import mkdir, rm
        sandbox_dir = path.join(self.builddir, "run")
        if path.exists(sandbox_dir):
            rm("-rf", sandbox_dir)

        mkdir(sandbox_dir)
Пример #6
0
def main(options):
    tool = options.tool
    bunzip2 = local['bunzip2']
    bzip2 = local['bzip2']
    tar = local['tar']

    for project, v in iter_versions(options.restrict_project, options.restrict_version, old=True, minimum=True):
        tarname = get_name_of_tar(tool, project)
        with tarfile.open(str(tarname), "r") as t:

            def mapping(inv, v=v):
                assert inv == v
                _, _, result = d4()('match-commits', '-p', project, '-v', '{0}f'.format(inv),  '-c', 'fse-commit-dbs').rstrip().partition('-> ')
                return int(result)


            for source, dest in get_extract_list(tool, project, v, mapping=mapping):
                alt_source = source[:-len('.bak')]
                try:
                    extract(t, source, OUT_DIR / dest)
                    # check for broken files
                    fixed_file = t.extractfile(alt_source)
                    with tarfile.open(fileobj=fixed_file) as t_fixed:
                        broken_files = [name[:-len('.broken')] for name in t_fixed.getnames() if name.endswith('.broken')]
                    fixed_file.close()

                    # now we have to remove the broken files from the archive
                    if broken_files:
                        plain_name = str(OUT_DIR / dest)[:-len('.bz2')]
                        bunzip2(str(OUT_DIR / dest))

                        # get number of .java currently in archive
                        with tarfile.open(plain_name) as t_current:
                            java_files = [name for name in t_current.getnames()
                                if name.endswith('.java') and not name.endswith('_scaffolding.java')]

                        if len(broken_files) == len(java_files):
                            # we are going to remove them all.
                            rm(plain_name)
                            touch(plain_name[:-len('.tar')] + '.empty')

                        else:
                            for broken_file in broken_files:
                                tar('--delete', '-f', plain_name, './' + broken_file)
                            bzip2(plain_name)

                        print "+ {source} -> {dest} ({broken} / {total} broken)".format(
                                source=source, dest=dest, broken=len(broken_files), total=len(java_files)
                            )
                    else:
                        print "+ {source} -> {dest} (none broken)".format(source=source, dest=dest)

                except KeyError:
                    try:
                        # no .bak file was ever created, so we are good.
                        extract(t, alt_source, OUT_DIR / dest)
                        print "* {source} -> {dest} (as is)".format(source=alt_source, dest=dest)
                    except KeyError:
                        print "- {source} -> missing".format(source=alt_source)
                        touch(str(OUT_DIR / dest)[:-len('.tar.bz2')] + '.missing')
Пример #7
0
    def generate_flamegraph(self, runnum):
        fl_log = os.path.join(self.get_res_dir(), "fg_log.txt")

        # stackcollapse
        st_pl = os.path.join(self.pc["fg_path"], "stackcollapse-perf.pl")
        perfdata = self.get_perfdata_path(runnum)
        folded = "out.perf-folded"

        fold = cat["out-{0}.perf-script".format(
            runnum)] | local[st_pl] > folded
        self.log("Running {}".format(fold))
        # seemingly plumbum doesn't allows to append-redirect stderr to file,
        # so we will capture it and send to log manually
        retcode, out, fold_stderr = fold.run(retcode=None)
        (echo[fold_stderr] >> fl_log)()

        # remove perf.data, if needed
        if (self.pc.get("rmperfdata") == "true"):
            rm("-f", perfdata)
        if retcode != 0:
            self.log("stackcollapse-perf failed, check out fg_log.txt")
            return

        # and generate
        fl_pl = os.path.join(self.pc["fg_path"], "flamegraph.pl")
        svg = os.path.join(self.get_res_dir(),
                           "{0}-{1}.svg".format(self.query, runnum))
        fl = local[fl_pl][folded] > svg
        self.log("Running {}".format(fl))
        retcode, out, fl_stderr = fl.run(retcode=None)
        (echo[fl_stderr] >> fl_log)()
        rm(folded)
        if retcode != 0:
            self.log("flamegraph failed, check out fg_log.txt")
Пример #8
0
 def __call__(self):
     if not CFG['clean'].value():
         return
     if not self._obj:
         return
     obj_builddir = os.path.abspath(self._obj.builddir)
     if os.path.exists(obj_builddir):
         rm("-rf", obj_builddir)
    def handleSystemImage(self, filename):
        logger.debug('simg2img: convert %s to system.ext4.img' % filename)
        simg2img = local['./simg2img/simg2img']
        simg2img(filename, 'system.ext4.img')
        logger.debug('done')

        self.handleSystemExt4Image('system.ext4.img')
        rm('-rf', 'system.ext4.img')
Пример #10
0
    def build(self):
        from plumbum.cmd import make, rm
        from benchbuild.utils.run import run
        povray_dir = path.join(self.builddir, self.src_dir)
        povray_binary = path.join(povray_dir, "unix", self.name)

        with local.cwd(povray_dir):
            rm("-f", povray_binary)
            run(make["clean", "all"])
Пример #11
0
def sif():
    sif = "support/sif"
    os.makedirs(sif, exist_ok=True)
    with local.cwd(sif):
        wget(
            "https://github.com/frankier/finn-wsd-eval/releases/download/bins/sif.zip"
        )
        unzip("sif.zip")
        rm("sif.zip")
Пример #12
0
def un_block():
	local.cwd.chdir(home)
	local.cwd.chdir("../../private/etc/")
	touch("hosts")
	rm("hosts")
	touch("hosts")
	f = open("hosts", "r+")
	f.write(hosts_default)
	killall["-HUP", "mDNSResponder"]
Пример #13
0
def clean_directories(builddir, in_dir=True, out_dir=True):
    with local.cwd(builddir):
        if in_dir and os.path.exists("container-in") and ask(
                "Should I delete '{0}'?".format(os.path.abspath(
                    "container-in"))):
            rm("-rf", "container-in")
        if out_dir and os.path.exists("container-out") and ask(
                "Should I delete '{0}'?".format(os.path.abspath(
                    "container-out"))):
            rm("-rf", "container-out")
Пример #14
0
    def download(self):
        from pprof.settings import config
        with local.cwd(self.builddir):
            Wget(self.src_uri, self.src_file)

            cp(config["sourcedir"] + "/bin/uchroot", "uchroot")
            run(fakeroot["tar", "xfj", self.src_file])
            rm(self.src_file)
            with local.cwd(self.builddir + "/usr"):
                Wget(self.src_uri_portage, self.src_file_portage)
                run(tar["xfj", self.src_file_portage])
                rm(self.src_file_portage)
Пример #15
0
def run_with_likwid(project, experiment, config, jobs, run_f, args, **kwargs):
    """
    Run the given file wrapped by likwid.

    Args:
        project: The pprof.project.
        experiment: The pprof.experiment.
        config: The pprof.settings.config.
        jobs: Number of cores we should use for this exection.
        run_f: The file we want to execute.
        args: List of arguments that should be passed to the wrapped binary.
        **kwargs: Dictionary with our keyword args. We support the following
            entries:

            project_name: The real name of our project. This might not
                be the same as the configured project name, if we got wrapped
                with ::pprof.project.wrap_dynamic
            has_stdin: Signals whether we should take care of stdin.
    """
    from pprof.settings import config as c
    from pprof.utils import run as r
    from pprof.utils.db import persist_likwid, persist_config
    from pprof.likwid import get_likwid_perfctr
    from plumbum.cmd import rm

    c.update(config)
    project_name = kwargs.get("project_name", project.name)
    likwid_f = project_name + ".txt"

    for group in ["CLOCK"]:
        likwid_path = path.join(c["likwiddir"], "bin")
        likwid_perfctr = local[path.join(likwid_path, "likwid-perfctr")]
        run_cmd = \
            likwid_perfctr["-O", "-o", likwid_f, "-m",
                           "-C", "0-{:d}".format(jobs),
                           "-g", group, run_f]
        run_cmd = r.handle_stdin(run_cmd[args], kwargs)

        with local.env(POLLI_ENABLE_LIKWID=1):
            run, session, _, _, _ = \
                r.guarded_exec(run_cmd, project_name, experiment.name,
                               project.run_uuid)

        likwid_measurement = get_likwid_perfctr(likwid_f)
        """ Use the project_name from the binary, because we
            might encounter dynamically generated projects.
        """
        persist_likwid(run, session, likwid_measurement)
        persist_config(run, session, {
            "cores": str(jobs),
            "likwid.group": group
        })
        rm("-f", likwid_f)
Пример #16
0
 def setup_prepare(self):
     """Unpacks the topology and loads local docker images.
     """
     # Delete old artifacts, if any.
     cmd.rm("-rf", self.test_state.artifacts)
     cmd.mkdir(self.test_state.artifacts)
     print('artifacts dir: %s' % self.test_state.artifacts)
     self._unpack_topo()
     print(cmd.docker('image', 'load', '-i',
                      self.test_state.containers_tar))
     # Define where coredumps will be stored.
     print(
         cmd.docker("run", "--rm", "--privileged", "alpine", "sysctl", "-w",
                    "kernel.core_pattern=/share/coredump"))
Пример #17
0
 def setup_prepare(self):
     """Unpacks loads local docker images and generates the topology.
     """
     # Delete old artifacts, if any.
     cmd.rm("-rf", self.test_state.artifacts)
     cmd.mkdir(self.test_state.artifacts)
     print("artifacts dir: %s" % self.test_state.artifacts)
     for tar in self.test_state.containers_tars:
         print(cmd.docker("image", "load", "-i", tar))
     # Define where coredumps will be stored.
     print(
         cmd.docker("run", "--rm", "--privileged", "alpine", "sysctl", "-w",
                    "kernel.core_pattern=/share/coredump"))
     self._setup_generate()
    def handleSamsungStockImage(self, filename):
        logger.debug('Unzipping "%s" from image file "%s"..' %
                     (self.samsungPattern, filename))
        unzip('-o', filename, self.samsungPattern)
        apFilename = self.findFileByPattern(os.listdir('.'),
                                            self.samsungPattern)
        logger.debug('done: %s', apFilename)

        logger.debug('Extracting system.img from tarball')
        tar('xf', apFilename, 'system.img')
        logger.debug('done')

        self.handleSystemImage('system.img')
        rm('-rf', 'system.img')
Пример #19
0
def parse_table(name):
    log('Starting parsing for "{}" table.'.format(name))

    url = 's3://sva.s2-new.opendsp.com/user=man/table={}/{}/'.format(name, s3date)
    log('Using remote path: {}'.format(url))

    directory = '{}/{}'.format(temp, time())
    log('Using local path: {}'.format(directory))

    mkdir('-p', directory)
    log('Directory "{}" has been created.'.format(directory))

    log('Downloading logs.')
    print s3cmd('sync', url, directory)

    log('Unzipping logs.')
    print gunzip('-rv', directory)

    all_logs = local.path(directory) // 'man.{}.*.log'.format(name)
    log('Available logs:\n{}'.format('\n'.join(all_logs)))

    stdout = (cut['-f8', all_logs] | sort | uniq['-c'])()
    log('Status from log:\n{}'.format(stdout))

    errors = finditer(r'(?P<amount>\d+)\s+ERROR:\s+(?P<code>\d+)', stdout)

    log('Parsing lines.')
    for err in errors:
        code = int(err.group('code'))
        amount = int(err.group('amount'))

        allowed = checks[code]
        log('Code {} -> Amount {} | Allowed {}'.format(code, amount, allowed))

        if code in checks and amount > allowed:
            error = 'Date: {}. File: "man.{}.enr.log". Error Code {}: amount {} exceeded allowed {}.' \
                    ''.format(date.strftime('%Y/%m/%d'), name, code, amount, allowed)
            log(error)
            send_notification(error)
        else:
            log('Everything is ok.')

    log('Removing "{}" directory.'.format(directory))
    rm('-rf', directory)

    log('Done for "{}" table.'.format(name))

    print '\n'
Пример #20
0
 def main(self):
     repo = 'https://github.com/demianw/tract_querier.git'
     with TemporaryDirectory() as tmpdir:
         clone = local.path(tmpdir) / "tract_querier"
         if not self.githash:
             git("clone", "--depth", "1", repo, clone)
         else:
             git("clone", repo, clone)
         clone_hash = git("rev-parse", "--short",
                          "HEAD")[:-1]  # remove trailing \n
         # save 70M of space
         rm('-r', clone / 'doc')
         rm('-r', clone / '.git')
         out = self.prefix / "tract_querier-" + clone_hash
         clone.move(out)
         chmod('-R', 'a-w', out)
Пример #21
0
 def uninstall(self):
     """
     Uninstalls the program.
     """
     from plumbum.cmd import rm  # pylint: disable=E0401
     print("Uninstalling " + self.name)
     with local.cwd(self.install_loc):
         for cmd in self.uninstall_cmds:
             exec_cmd = local[cmd["cmd"]]
             args = cmd["args"] if "args" in cmd else []
             args = [arg.replace("$HOME", local.env["HOME"]) \
                     for arg in args]
             exec_cmd = exec_cmd[args]
             with local.env(**cmd["env"] if "env" in cmd else {}):
                 print(exec_cmd())
     rm("-rf", self.install_loc)
    def handleLineageOsImage(self, filename):
        logger.debug(
            'Unzipping system.transfer.list and system.new.dat from image file "%s"..'
            % filename)
        unzip('-o', filename, 'system.transfer.list', 'system.new.dat')
        logger.debug('done')

        logger.debug('Building system.img via sdat2img..')
        python = local['python']
        sdat2img = python['sdat2img/sdat2img.py', 'system.transfer.list',
                          'system.new.dat', 'system.img']
        sdat2img()
        logger.debug('done')

        self.handleSystemExt4Image('system.img')

        rm('-rf', 'system.transfer.list', 'system.new.dat', 'system.img')
Пример #23
0
  def make_crt(self):
    print("Paste the following CSR to CAcert:")
    (cat < self._getFilename('csr')) & FG
    rm('-f', self._getFilename('crt'))

    print("Enter certificate:")
    crt = ''
    line = ''

    while line != "-----END CERTIFICATE-----":
      line = raw_input()
      crt += line + '\n'

    fn = self._getFilename('crt')
    (cat << crt > fn)()

    return fn
    def handleSystemExt4Image(self, systemImageFilename):
        logger.debug('Searching for files which match pattern "%s"' %
                     self.broadcomPattern)
        directory = './mounted-image'
        mkdir(directory)
        mount(systemImageFilename, directory)

        firmwares = find(directory, '-iname',
                         self.broadcomPattern).splitlines()

        for firmware in firmwares:
            print 'Found firmware: %s (Size: %d)' % (firmware,
                                                     os.path.getsize(firmware))
            versionCommand = strings[firmware] | tail['-1']
            print versionCommand()

        if not firmwares:
            print 'No firmwares found.'

        umount(directory)
        rm('-rf', directory)
Пример #25
0
def build_server(settings):
    force = strtobool(settings.get('FORCE_CERT_REGEN', 'false'))
    with local.env(**settings):
        pkitool = local[os.path.join(local.env['EASY_RSA'], 'pkitool')]
        openssl = local['openssl']

        if force:
            print('FORCE_CERT_REGEN=true, regenerating {}'.format(
                local.env['KEY_DIR']))
            rm(['-rf', local.env['KEY_DIR']])

        if not local.path(local.env['KEY_DIR']).exists():
            print('KEY_DIR does not exist, creating')
            mkdir.run(['-p', local.env['KEY_DIR']], retcode=0)
            # see if this needs to be separate
            touch(os.path.join(local.env['KEY_DIR'], 'index.txt'), retcode=0)
            with open(os.path.join(local.env['KEY_DIR'], 'serial'),
                      'w') as serial:
                serial.write('01')

        ca_files = glob(os.path.join(local.env['KEY_DIR'], 'ca.*'))
        server_files = glob(os.path.join(local.env['KEY_DIR'], 'server.*'))

        if ca_files:
            print('Root CA exists, skipping')
        else:
            pkitool.run('--initca', retcode=0, stderr=sys.stdout)

        if server_files:
            print('Server cert exists, skipping')
        else:
            pkitool.run(['--server', 'server'], retcode=0, stderr=sys.stdout)

        dh_pem = os.path.join(local.env['KEY_DIR'],
                              'dh' + local.env['KEY_SIZE'] + '.pem')
        if local.path(dh_pem).exists():
            print('DH param exists, skipping')
        else:
            openssl.run(['dhparam', '-out', dh_pem, local.env['KEY_SIZE']],
                        stderr=sys.stdout)
Пример #26
0
    def test_redirection(self):
        from plumbum.cmd import cat, ls, grep, rm

        chain = (ls | grep["\\.py"]) > "tmp.txt"
        chain()

        chain2 = (cat < "tmp.txt") | grep["local"]
        self.assertTrue("test_local.py" in chain2().splitlines())
        rm("tmp.txt")

        chain3 = (cat << "this is the\nworld of helloness and\nspam bar and eggs") | grep["hello"]
        self.assertTrue("world of helloness and" in chain3().splitlines())

        rc, _, err = (grep["-Zq5"] >= "tmp2.txt").run(["-Zq5"], retcode = None)
        self.assertEqual(rc, 2)
        self.assertFalse(err)
        self.assertTrue("Usage" in (cat < "tmp2.txt")())
        rm("tmp2.txt")

        rc, out, _ = (grep["-Zq5"] >= ERROUT).run(["-Zq5"], retcode = None)
        self.assertEqual(rc, 2)
        self.assertTrue("Usage" in out)
Пример #27
0
    def download(self):
        from benchbuild.utils.run import uchroot_no_args
        with local.cwd(self.builddir):
            Wget(self.src_uri, self.src_file)
            uchroot = uchroot_no_args()
            uchroot = uchroot["-E", "-A", "-C", "-r", "/", "-w", path.abspath(
                "."), "--"]

            # Check, if we need erlent support for this archive.
            has_erlent = bash[
                "-c", "tar --list -f './{0}' | grep --silent '.erlent'".format(
                    self.src_file)]
            has_erlent = (has_erlent & TF)

            cmd = local["/bin/tar"]["xf"]
            if not has_erlent:
                cmd = uchroot[cmd["./" + path.basename(self.src_file)]]
            else:
                cmd = cmd[self.src_file]

            run(cmd["--exclude=dev/*"])
            rm(self.src_file)
Пример #28
0
def build_server(settings):
    force = strtobool(settings.get('FORCE_CERT_REGEN', 'false'))
    with local.env(**settings):
        pkitool = local[os.path.join(local.env['EASY_RSA'], 'pkitool')]
        openssl = local['openssl']

        if force:
            print "FORCE_CERT_REGEN=true, regenerating {}".format(local.env['KEY_DIR'])
            rm(['-rf', local.env['KEY_DIR']])

        if not local.path(local.env['KEY_DIR']).exists():
            print "KEY_DIR does not exist, creating"
            mkdir.run(['-p', local.env['KEY_DIR']], retcode=0)
            # see if this needs to be separate
            touch(os.path.join(local.env['KEY_DIR'], 'index.txt'), retcode=0)
            with open(os.path.join(local.env['KEY_DIR'], 'serial'),
                      "w") as serial:
                serial.write("01")

        ca_files = glob(os.path.join(local.env['KEY_DIR'], 'ca.*'))
        server_files = glob(os.path.join(local.env['KEY_DIR'], 'server.*'))

        if ca_files:
            print 'Root CA exists, skipping'
        else:
            pkitool.run("--initca", retcode=0, stderr=sys.stdout)

        if server_files:
            print 'Server cert exists, skipping'
        else:
            pkitool.run(["--server", "server"], retcode=0, stderr=sys.stdout)

        dh_pem = os.path.join(local.env['KEY_DIR'],
                              'dh' + local.env['KEY_SIZE'] + '.pem')
        if local.path(dh_pem).exists():
            print 'DH param exists, skipping'
        else:
            openssl.run(["dhparam", "-out", dh_pem, local.env['KEY_SIZE']],
                        stderr=sys.stdout)
Пример #29
0
    def test_redirection(self):
        from plumbum.cmd import cat, ls, grep, rm

        chain = (ls | grep["\\.py"]) > "tmp.txt"
        chain()

        chain2 = (cat < "tmp.txt") | grep["local"]
        self.assertTrue("test_local.py" in chain2().splitlines())
        rm("tmp.txt")

        chain3 = (cat << "this is the\nworld of helloness and\nspam bar and eggs") | grep["hello"]
        self.assertTrue("world of helloness and" in chain3().splitlines())

        rc, _, err = (grep["-Zq5"] >= "tmp2.txt").run(["-Zq5"], retcode = None)
        self.assertEqual(rc, 2)
        self.assertFalse(err)
        self.assertTrue("Usage" in (cat < "tmp2.txt")())
        rm("tmp2.txt")

        rc, out, _ = (grep["-Zq5"] >= ERROUT).run(["-Zq5"], retcode = None)
        self.assertEqual(rc, 2)
        self.assertTrue("Usage" in out)
Пример #30
0
    def test_redirection(self):
        from plumbum.cmd import cat, ls, grep, rm

        chain = (ls | grep["\\.py"]) > "tmp.txt"
        chain()

        chain2 = (cat < "tmp.txt") | grep["local"]
        assert "test_local.py" in chain2().splitlines()
        rm("tmp.txt")

        chain3 = (cat << "this is the\nworld of helloness and\nspam bar and eggs") | grep["hello"]
        assert "world of helloness and" in chain3().splitlines()

        rc, _, err = (grep["-Zq5"] >= "tmp2.txt").run(["-Zq5"], retcode = None)
        assert rc == 2
        assert not err
        assert "usage" in (cat < "tmp2.txt")().lower()
        rm("tmp2.txt")

        rc, out, _ = (grep["-Zq5"] >= ERROUT).run(["-Zq5"], retcode = None)
        assert rc == 2
        assert "usage" in out.lower()
Пример #31
0
def conf(work_dir,
         vec_path="",
         dest=None,
         use_vec=False,
         use_surrounding_words=True):
    from finntk.wordnet.reader import fiwn_resman

    if dest is not None and not exists(dest):
        makedirs(dest, exist_ok=True)
        ln("-s", *glob(abspath("systems/supWSD") + "/*"), dest)
        rm("-r", pjoin(dest, "resources"), retcode=None)
        rm(pjoin(dest, "supconfig.xml"), retcode=None)

    if dest is None:
        dest = "systems/supWSD"

    fiwn_path = fiwn_resman.get_res("")
    makedirs(pjoin(dest, "resources/wndictionary"), exist_ok=True)
    for src_fn, dst_fn in [
        ("jwnl-properties.xml", "resources/wndictionary/prop.xml"),
        ("supconfig.xml", "supconfig.xml"),
    ]:
        content = Template(
            open("support/supWSD/{}.tmpl".format(src_fn)).read()).substitute({
                "FIWN_PATH":
                fiwn_path,
                "WORK_DIR":
                work_dir,
                "VEC_PATH":
                vec_path,
                "USE_VEC":
                "true" if use_vec else "false",
                "USE_SURROUNDING_WORDS":
                "true" if use_surrounding_words else "false",
            })

        with open("{}/{}".format(dest, dst_fn), "w") as dst_f:
            dst_f.write(content)
Пример #32
0
def block_sites():
	print "works"
	local.cwd.chdir(home)
	local.cwd.chdir("Dropbox/Docs")
	blocked = []
	with open('blocked_sites.csv', 'rb') as csvfile:
		reader = csv.reader(csvfile, delimiter=',', quotechar='|')
		for row in reader:
			for r in row:
				blocked.append(r)
	print blocked
	local.cwd.chdir(home)
	local.cwd.chdir("../../private/etc/")
	rm("hosts")
	touch("hosts")
	with open("hosts", "r+") as hosts:
		hosts.write(hosts_default)
		# hosts.write("\n")
		for item in blocked:
			site = "127.0.0.1 " + item + "\n"
			site = "127.0.0.1 " + "www." + item + "\n"
			hosts.write(site)
	killall["-HUP", "mDNSResponder"]
Пример #33
0
def load(data, udf, data_dir, overwrite):
    """Load Ibis test data and build/upload UDFs"""
    print(str(ENV))

    con = make_ibis_client()

    # validate our environment before performing possibly expensive operations
    if not can_write_to_hdfs(con):
        raise IbisError('Failed to write to HDFS; check your settings')
    if udf and not can_build_udfs():
        raise IbisError('Build environment does not support building UDFs')

    # load the data files
    if data:
        tmp_dir = tempfile.mkdtemp(prefix='__ibis_tmp_')
        try:
            load_impala_data(con, str(data_dir), overwrite)
        finally:
            rm('-rf', tmp_dir)
    else:
        print('Skipping Ibis test data load (--no-data)')

    # build and upload the UDFs
    if udf:
        already_loaded = is_udf_loaded(con)
        print('Attempting to build and load test UDFs')
        if already_loaded and not overwrite:
            print('UDFs already loaded and not overwriting; moving on')
        else:
            if already_loaded:
                print('UDFs already loaded; attempting to overwrite')
            print('Building UDFs')
            build_udfs()
            print('Uploading UDFs')
            upload_udfs(con)
    else:
        print('Skipping UDF build/load (--no-udf)')
Пример #34
0
    def main(self, srcDir, dstBucket, dstDir):
        # protect to prevent deleting of all backups
        if self.keepCount < 2:
            self.keepCount = 2

        s3DirPath = "s3://" + dstBucket + "/" + dstDir
        if self.verbose:
            print("Sending backups from", srcDir, "to", s3DirPath, flush = True)

        # check if bucket exists and create if not
        lines = (s3["ls"] | grep[dstBucket])().splitlines()
        if not lines:
            if self.verbose:
                print("Bucket doesn't exist. Creating...")
            (s3["mb", "s3://" + dstBucket] > stdout)()

        # create dir for processed backup files (if not exists)
        processedDir = join(srcDir, "_processed")
        mkdir("-p", processedDir)

        # process new files
        for f in listdir(srcDir):
            fullPath = join(srcDir, f)
            if isfile(fullPath) and getsize(fullPath) > 0:
                datePrefix = time.strftime("%Y-%m-%d-", time.localtime(getctime(fullPath)))
                processedFileName = datePrefix + f
                mv(fullPath, join(processedDir, processedFileName))

        # remove old backups, keep only requested count (--keep)
        for f in ls("-c", processedDir).splitlines()[self.keepCount:]:
            if self.verbose:
                print("Removing old backup", f, flush = True)
            rm(join(processedDir, f))

        # sync to s3
        (s3["sync", processedDir, s3DirPath, "--storage-class", "STANDARD_IA", "--delete"] > stdout)()
Пример #35
0
def refresh_dir(dir, cleanup=True, cleanup_anyways=False):
    rm('-rf', dir)
    mkdir('-p', dir)
    with local.cwd(dir):
        try:
            yield
            if cleanup:
                rm('-rf', dir)
        except:
            if cleanup_anyways:
                rm('-rf', dir)
            raise
Пример #36
0
 def clean(self):
     """ Clean the project build directory. """
     if path.exists(self.builddir) and listdir(self.builddir) == []:
         rmdir(self.builddir)
     elif path.exists(self.builddir) and listdir(self.builddir) != []:
         rm("-rf", self.builddir)
Пример #37
0
 def tearDownClass(cls):
     if os.path.exists(cls.tmp_dir):
         rm("-r", cls.tmp_dir)
Пример #38
0
from blessings import Terminal
import os
import glob
import csv
import re

# Setup the terminal object for blessings
term = Terminal()

# Get setup
data_path = '../data'
database_file_name = 'piction'
print term.bold('Processing data for the Digital Scrapbook')

# Cleanup from last time
print rm('-rf', database_file_name + '.csv')

# Get all the sheets into CSVs
print term.yellow('Extracting the CSVs')

sheets = [
    '1-Anza', '2-Cleve', '13-SDCst', '19-L.A.', '7-Moj', '12-Bern', '3-Color',
    '4-Imp', '5-Josh', '6-Torr', '8-NorthSD', '9-OC', '10-Palo', '11-Salton',
    '14-SDIn', '15-Gabr', '16-Jac', '17-Mon', '18-SoSD', '20-River',
    '21-Channel'
]

# Write a CSV for each sheet in the Excel files and save the filepaths
# in an array for merging
# We have to save these to files since it appears that csvfix only takes
# file input.
Пример #39
0
    'Event ID': dtype('int64'),
    'Event Date': dtype('O'),
    'Event Time': dtype('O'),
    'Event Millis': dtype('int64'),
    'Order ID': dtype('int64'),
    'Execution Options': dtype('O'),
    'Event Type': dtype('O'),
    'Symbol': dtype('O'),
    'Order Type': dtype('O'),
    'Side': dtype('O'),
    'Limit Price (USD)': dtype('float64'),
    'Original Quantity (BTC)': dtype('float64'),
    'Gross Notional Value (USD)': dtype('float64'),
    'Fill Price (USD)': dtype('float64'),
    'Fill Quantity (BTC)': dtype('float64'),
    'Total Exec Quantity (BTC)': dtype('float64'),
    'Remaining Quantity (BTC)': dtype('float64'),
    'Avg Price (USD)': dtype('float64')
}

for x in sorted(glob('cboe/parquet_BTCUSD/BTCUSD*.csv.lz4')):
    print(x)
    #df = pandas.read_csv(io.TextIOWrapper(lz4.frame.open(x)), dtype=dtypes) # low_memory=False,
    #df = pd.read_csv('BTCUSD_order_book_20171021.csv', low_memory=False)
    df = pd.read_csv(io.TextIOWrapper(lz4.frame.open(x)), low_memory=False)
    df = df.astype(dtype=dtypes)
    table = pa.Table.from_pandas(df)
    outfile = x.replace('.csv.lz4', '.parquet')
    pq.write_table(table, outfile, compression='snappy')
    rm(x)
Пример #40
0
import plumbum
from plumbum import local
import plumbum.cmd
from plumbum.cmd import rm

python = local["python"]

rm("db.sqlite3")
rm("-rf", "e_riding_app/migrations")
python("manage.py", "makemigrations", "e_riding_app")
python("manage.py", "migrate")

import os
import django

os.environ.setdefault("DJANGO_SETTINGS_MODULE", "e_riding.settings")
django.setup()


from django.contrib.auth import get_user_model

User = get_user_model()

u = User(username='******')
u.set_password('admin')
u.is_superuser = True
u.is_staff = True
u.save()

python("manage.py", *"loadtestdata e_riding_app.CustomUser:10 "
                     "e_riding_app.VetCard:10 "
Пример #41
0
 def tearDownClass(cls):
     if os.path.exists(cls.tmp_dir):
         rm("-r", cls.tmp_dir)
Пример #42
0
 def _setup_artifacts(self):
     # Delete old artifacts, if any.
     cmd.rm("-rf", self.artifacts)
     cmd.mkdir(self.artifacts)
     print("artifacts dir: %s" % self.artifacts)
Пример #43
0
# change working directory
local.cwd.chdir(srcDir)

# create dir for archived backup files (if not exists)
archiveDir = "_archived"
mkdir("-p", archiveDir)

for f in listdir(srcDir):
    # just in case
    if f == "" or f == '/': continue
    if isdir(f) and f != archiveDir and f != "_processed":
        # define archive file name
        datePrefix = time.strftime("%Y-%m-%d-", time.localtime(getctime(f)))
        archivedFileName = datePrefix + 'upsource-backup.tar.gz'
        archivePath = join(archiveDir, archivedFileName)

        # try to archive
        tarOk = tar["zcf", archivePath, f] & TF
        # check if archiving completed ok
        if tarOk:
            testOk = gunzip["-t", archivePath] & TF

        # remove processed directory if it's allright
        if tarOk and testOk:
            print("archive ok, removing directory...", f)
            rm("-rf", f)
        else:
            print("ERROR during archiving upsource backup directory:",
                  f,
                  file=stderr)
Пример #44
0
def get_tar_gz_str(files, out='output.tar.gz'):
    rm('-rf', out)
    local['tar']['cfz', out](*files)
    with open(out) as f:
        result = f.read()
    return result
Пример #45
0
from blessings import Terminal
import os
import glob
import csv
import re

# Setup the terminal object for blessings
term = Terminal()

# Get setup
data_path = "../data"
database_file_name = "piction"
print term.bold("Processing data for the Digital Scrapbook")

# Cleanup from last time
print rm("-rf", database_file_name + ".csv")

# Get all the sheets into CSVs
print term.yellow("Extracting the CSVs")

sheets = [
    "1-Anza",
    "2-Cleve",
    "13-SDCst",
    "19-L.A.",
    "7-Moj",
    "12-Bern",
    "3-Color",
    "4-Imp",
    "5-Josh",
    "6-Torr",
Пример #46
0
def get_tar_gz_file(files, out='output.tar.gz'):
    rm('-rf', out)
    local['tar']['cfz', out](*files)
    with open(out) as f:
        yield f
#!/usr/bin/env python3

from glob import glob
from plumbum.cmd import gunzip, lz4, rm

for x in glob('cboe/lz4/*.csv.gz'):
  print(x)
  gunzip[x]()
  csvfile = x.replace('.csv.gz', '.csv')
  outfile = x.replace('.csv.gz', '.csv.lz4')
  (lz4['-9', csvfile] > outfile)()
  rm(csvfile)
Пример #48
0
            return

        # and generate
        fl_pl = os.path.join(self.pc["fg_path"], "flamegraph.pl")
        svg = os.path.join(self.get_res_dir(),
                           "{0}-{1}.svg".format(self.query, runnum))
        fl = local[fl_pl][folded] > svg
        self.log("Running {}".format(fl))
        retcode, out, fl_stderr = fl.run(retcode=None)
        (echo[fl_stderr] >> fl_log)()
        rm(folded)
        if retcode != 0:
            self.log("flamegraph failed, check out fg_log.txt")


if __name__ == "__main__":
    with open("tmp_conf.json") as f:
        conf = json.load(f)
    rm("tmp_conf.json")
    pc = PgtpchConf(conf)

    if pc["runner"] == "standard":
        runner = StandardRunner(pc)
    elif pc["runner"] == "perfer":
        runner = PerfRunner(pc)
    else:
        print("Wrong runner: {}".format(pc["runner"]))
        sys.exit(1)

    runner.run()