Example #1
0
def clean():
    proj()
    print ". cleaning up build and dist"
    try:
        sh.rm("-r", sh.glob("dist/*"), sh.glob("build/*"))
    except:
        print ".. already clean"
Example #2
0
def restore_file(filename):
    ''' Context manager restores a file to its previous state.

        If the file exists on entry, it is backed up and restored.

        If the file does not exist on entry and does exists on exit,
        it is deleted.
    '''

    exists = os.path.exists(filename)

    if exists:
        # we just want the pathname, not the handle
        # tiny chance of race if someone else gets the temp filename
        handle, backup = tempfile.mkstemp()
        os.close(handle)
        sh.cp('--archive', filename, backup)

    try:
        yield

    finally:
        if os.path.exists(filename):
            sh.rm(filename)
        if exists:
            # restore to original state
            sh.mv(backup, filename)
Example #3
0
def trial(num_bins=1, size_bin=500, after_rm=None, max_delta=0.05):
    from sh import imgbase, rm, ls

    def img_free():
        return float(imgbase("layout", "--free-space"))

    imgbase = imgbase.bake("--debug")

    a = img_free()

    [dd(B, size_bin) for B in iter(range(0, num_bins))]
    print("Files which were created")
    print(ls("-shal", *glob.glob("/var/tmp/*.bin")))
    b = img_free()

    print("Files are getting removed")
    rm("-f", *glob.glob("/var/tmp/*.bin"))
    after_rm()
    c = img_free()

    ratio = a / c
    print(a, b, c, ratio)
    delta = 1 - ratio
    assert delta < max_delta, \
        "Delta %s is larger than %s" % (delta, max_delta)
Example #4
0
def build_opencv():
    sh.pip.install("numpy")
    clone_if_not_exists("opencv", "https://github.com/PolarNick239/opencv.git", branch="stable_3.0.0)
    clone_if_not_exists("opencv_contrib", "https://github.com/PolarNick239/opencv_contrib.git", branch="stable_3.0.0")
    sh.rm("-rf", "build")
    sh.mkdir("build")
    sh.cd("build")
    python_path = pathlib.Path(sh.pyenv.which("python").stdout.decode()).parent.parent
    version = "{}.{}".format(sys.version_info.major, sys.version_info.minor)
    sh.cmake(
        "..",
        "-DCMAKE_BUILD_TYPE=RELEASE",
        "-DCMAKE_INSTALL_PREFIX={}/usr/local".format(python_path),
        "-DWITH_CUDA=OFF",
        "-DWITH_FFMPEG=OFF",
        "-DINSTALL_C_EXAMPLES=OFF",
        "-DBUILD_opencv_legacy=OFF",
        "-DBUILD_NEW_PYTHON_SUPPORT=ON",
        "-DBUILD_opencv_python3=ON",
        "-DOPENCV_EXTRA_MODULES_PATH=~/opencv_contrib-3.4.1/modules",
        "-DBUILD_EXAMPLES=ON",
        "-DPYTHON_EXECUTABLE={}/bin/python".format(python_path),
        "-DPYTHON3_LIBRARY={}/lib/libpython{}m.so".format(python_path, version),
        "-DPYTHON3_PACKAGES_PATH={}/lib/python{}/site-packages/".format(python_path, version),
        "-DPYTHON3_NUMPY_INCLUDE_DIRS={}/lib/python{}/site-packages/numpy/core/include".format(python_path, version),
        "-DPYTHON_INCLUDE_DIR={}/include/python{}m".format(python_path, version),
        _out=sys.stdout,
    )
    sh.make("-j4", _out=sys.stdout)
    sh.make.install(_out=sys.stdout)
Example #5
0
File: tmp.py Project: drcloud/arx
def tmpdir():
    path = mkdtemp(prefix='arx.')
    try:
        log.debug('Temporary directory is: %s', path)
        yield path
    finally:
        rm('-rf', path)
Example #6
0
    def _generate(self, **kwargs):
        uni_key = 'University'
        uni_ext = '.nt'
        get_uni_id = lambda uni_file: int(uni_file.replace(uni_key, '').replace(uni_ext, '').strip())

        universities_rdf = {
            get_uni_id(f): os.path.join(self.output_path, f)
            for f in os.listdir(self.output_path)
            if f.startswith(uni_key)
        }

        pool = Pool(processes=self.num_workers)
        for uni_id, uni_rdf in universities_rdf.iteritems():
            pool.apply_async(self.distributor(uni_id, uni_rdf),
                             kwds=self._distributor_kwargs(uni_id, uni_rdf))
        pool.close()
        pool.join()

        # concat files
        site_files = lambda site_id: re.findall(r'site_{0}_uni_[0-9]+\.nt'.format(site_id),
                                                ' '.join(os.listdir(self._output_path)))
        for site in xrange(self.num_sites):
            site_parts = site_files(site)
            logger.info('[site = %s] site file parts = %s', site, site_parts)

            with io.open(self.site_path(site), 'w+') as SITE:
                for spart in site_parts:
                    spart_file = os.path.join(self._output_path, spart)
                    with io.open(spart_file, 'r+') as SPART:
                        SITE.write(SPART.read())
                    sh.rm(spart_file)
Example #7
0
def get_lambda_code(func_name, retries=1, cache_time=AWS_LAMBDA_CODE_CACHE_TIMEOUT):
    if MOCK_OBJ:
        return ''
    cmd = 'aws lambda get-function --function-name %s' % func_name
    out = run(cmd, cache_duration_secs=cache_time)
    out = json.loads(out)
    loc = out['Code']['Location']
    hash = md5(loc)
    # print("Location %s %s" % (hash, func_name))
    folder = TMP_DOWNLOAD_FILE_PATTERN.replace('*', '%s') % hash
    filename = 'archive.zip'
    archive = '%s/%s' % (folder, filename)
    try:
        run('mkdir -p %s' % folder)
        if not os.path.isfile(archive):
            print("Downloading %s" % archive)
            run("wget -O %s '%s'" % (archive, loc))
        if len(os.listdir(folder)) <= 1:
            print("Unzipping %s/%s" % (folder, filename))
            run("cd %s && unzip -o %s" % (folder, filename))
    except Exception, e:
        print("WARN: %s" % e)
        sh.rm('-f', archive)
        if retries > 0:
            return get_lambda_code(func_name, retries=retries - 1, cache_time=1)
        else:
            print("WARNING: Unable to retrieve lambda code: %s" % e)
Example #8
0
def packaging_lib(libmace_output_dir, project_name):
    print("* Package libs for %s" % project_name)
    tar_package_name = "libmace_%s.tar.gz" % project_name
    project_dir = "%s/%s" % (libmace_output_dir, project_name)
    tar_package_path = "%s/%s" % (project_dir, tar_package_name)
    if os.path.exists(tar_package_path):
        sh.rm("-rf", tar_package_path)

    print("Start packaging '%s' libs into %s" % (project_name,
                                                 tar_package_path))
    which_sys = platform.system()
    if which_sys == "Linux":
        sh.tar(
            "cvzf",
            "%s" % tar_package_path,
            glob.glob("%s/*" % project_dir),
            "--exclude",
            "%s/_tmp" % project_dir,
            _fg=True)
    elif which_sys == "Darwin":
        sh.tar(
            "--exclude",
            "%s/_tmp" % project_dir,
            "-cvzf",
            "%s" % tar_package_path,
            glob.glob("%s/*" % project_dir),
            _fg=True)
    print("Packaging Done!\n")
    return tar_package_path
Example #9
0
def test_console_script(cli):
    TEST_COMBINATIONS = (
        # quote_mode, var_name, var_value, expected_result
        ("always", "HELLO", "WORLD", 'HELLO="WORLD"\n'),
        ("never", "HELLO", "WORLD", 'HELLO=WORLD\n'),
        ("auto", "HELLO", "WORLD", 'HELLO=WORLD\n'),
        ("auto", "HELLO", "HELLO WORLD", 'HELLO="HELLO WORLD"\n'),
    )
    with cli.isolated_filesystem():
        for quote_mode, variable, value, expected_result in TEST_COMBINATIONS:
            sh.touch(dotenv_path)
            sh.dotenv('-f', dotenv_path, '-q', quote_mode, 'set', variable, value)
            output = sh.cat(dotenv_path)
            assert output == expected_result
            sh.rm(dotenv_path)

    # should fail for not existing file
    result = cli.invoke(dotenv.cli.set, ['my_key', 'my_value'])
    assert result.exit_code != 0

    # should fail for not existing file
    result = cli.invoke(dotenv.cli.get, ['my_key'])
    assert result.exit_code != 0

    # should fail for not existing file
    result = cli.invoke(dotenv.cli.list, [])
    assert result.exit_code != 0
Example #10
0
def add_sudoers_option(line):
    """
    Adds a option to /etc/sudoers file in safe manner.

    Generate a bash script which will be invoke itself as visudo EDITOR.

    http://stackoverflow.com/a/3706774/315168
    """

    from sh import chmod, rm

    with sudo:

        if not has_line("/etc/sudoers", line):

            print "Updating /etc/sudoers to enable %s" % line

            tmp = tempfile.NamedTemporaryFile(mode="wt", delete=False)
            # Generate visudo EDITOR which adds the line
            # https://www.ibm.com/developerworks/mydeveloperworks/blogs/brian/entry/edit_sudoers_file_from_a_script4?lang=en
            script = ADD_LINE_VISUDO.format(line=line)
            tmp.write(script)
            tmp.close()
            chmod("u+x", tmp.name)
            Command(tmp.name)()
            rm(tmp.name)
Example #11
0
    def before_job(self, provider, ctx={}, *args, **kwargs):
        log_dir = provider.log_dir
        self.ensure_log_dir(log_dir)
        log_file = provider.log_file.format(
            date=datetime.now().strftime("%Y-%m-%d_%H-%M"))
        ctx['log_file'] = log_file
        if log_file == "/dev/null":
            return

        log_link = os.path.join(log_dir, "latest")
        ctx['log_link'] = log_link

        lfiles = [os.path.join(log_dir, lfile)
                  for lfile in os.listdir(log_dir)
                  if lfile.startswith(provider.name)]

        lfiles_set = set(lfiles)
        # sort to get the newest 10 files
        lfiles_ts = sorted(
            [(os.path.getmtime(lfile), lfile) for lfile in lfiles],
            key=lambda x: x[0],
            reverse=True)
        lfiles_keep = set([x[1] for x in lfiles_ts[:self.limit]])
        lfiles_rm = lfiles_set - lfiles_keep
        # remove old files
        for lfile in lfiles_rm:
            try:
                sh.rm(lfile)
            except:
                pass

        # create a soft link
        self.create_link(log_link, log_file)
    def update_cache(self):
        if not self.test_cache():
            rm(self.cache_dir, '-rf')
            self.cache_dir = mkdtemp()
            self.cache_uuid = uuid4()
            mkdir(os.path.join(self.cache_dir, 'repodata'))

            index_file_url = '/'.join([self.repo_url, self.index_file])
            index_file_path = os.path.join(self.cache_dir, self.index_file)

            try:
                print("Downloading index file '{0}' --> '{1}' ...".format(
                    index_file_url, index_file_path
                ))
                wget(index_file_url, '-O', index_file_path)
            except:
                self.broken = True
                return

            try:
                xmlroot = etree.parse(index_file_path).getroot()
                xmlns = xmlroot.nsmap[None]
                for item in xmlroot.findall("{{{0}}}data".format(xmlns)):
                    for subitem in item.findall("{{{0}}}location".format(xmlns)):
                        location = subitem.get('href')
                        url = '/'.join([self.repo_url, location])
                        path = '/'.join([self.cache_dir, location])
                        print("Downloading file '{0}' --> '{1}' ...".format(
                            url, path
                        ))
                        wget(url, '-O', path)
            except:
                self.broken = True
Example #13
0
    def post_syslog(self, message, response):
        output = status.tar_syslog_files(
            "/run/shm/syslog-%s.tar.gz" %
            (datetime.datetime.now().strftime("%Y%m%d%H%M")))
        headers = message.data.get("headers", {})
        r = requests.post(
            message.data["url"],
            files={output: open(output, "rb")},
            headers=headers,
            verify=False
        )

        if r.status_code != requests.codes.ok:
            return response(
                code=r.status_code,
                data={"message": "Can't upload config."}
            )

        sh.rm("-rf", sh.glob("/run/shm/syslog-*.tar.gz"))
        resp = r.json()
        if "url" not in resp:
            return response(
                code=500, data={"message": "Can't get file link."})

        return response(data={"url": resp["url"]})
Example #14
0
def favicon():
    """generate the favicon... ugly"""
    proj()
    print(". generating favicons...")
    sizes = [16, 32, 64, 128]

    tmp_file = lambda size: "/tmp/favicon-%s.png" % size

    for size in sizes:
        print("... %sx%s" % (size, size))
        sh.convert(
            "design/logo.svg",
            "-resize",
            "%sx%s" % (size, size),
            tmp_file(size))

    print(".. generating bundle")

    sh.convert(
        *[tmp_file(size) for size in sizes] + [
            "-colors", 256,
            "static/img/favicon.ico"
        ]
    )

    print(".. cleaning up")
    sh.rm(sh.glob("/tmp/favicon-*.png"))
Example #15
0
def xxd_diff(old, new):
    '''
    just for fun
    '''

    # xxd -p for pure hexdump
    # -p must pass in before file name

    shell = check_shell()

    # support : bash, zsh
    # not support : dash
    cmd = 'diff <(xxd -p {}) <(xxd -p {})'.format(old, new)

    if shell['bash']:
        return sh.bash('-c', cmd, _ok_code=[0, 1])
    elif shell['zsh']:
        return sh.zsh('-c', cmd, _ok_code=[0, 1])
    else:
        tmp_old = '/var/tmp/old_hex'
        tmp_new = '/var/tmp/new_hex'
        sh.xxd('-p', old, _out=tmp_old)
        sh.xxd('-p', new, _out=tmp_new)
        patch = sh.diff(old, new, _iter=True, _ok_code=[0, 1])
        sh.rm('-f', tmp_old)
        sh.rm('-f', tmp_new)
        return patch
Example #16
0
def pull_file_from_device(serial_num, file_path, file_name, output_dir):
    if not os.path.exists(output_dir):
        sh.mkdir("-p", output_dir)
    output_path = "%s/%s" % (output_dir, file_path)
    if os.path.exists(output_path):
        sh.rm('-rf', output_path)
    adb_pull(file_path + '/' + file_name, output_dir, serial_num)
Example #17
0
def make_random_fast():
    ''' Link /dev/random to /dev/urandom.

        See
            Myths about /dev/urandom
                http://www.2uo.de/myths-about-urandom/
            Hacker news discussion where cperciva does not disagree with "Myths about /dev/urandom"
                https://news.ycombinator.com/item?id=10149019

        The risk of using urandom is that it can be deterministic.
        If you know the seed, you may know all of urandom's output forever.

        That is why we prefer to add entropy at runtime, using e.g. haveged.
        But does haveged affect urandom? The urandom seed may be set just
        once and saved, or set at boot before haveged runs.
    '''
    
    # delete /dev/random
    if os.path.exists('/dev/random'):
        if os.path.isfile('/dev/random') or os.path.islink('/dev/random'):
            os.remove('/dev/random')
        else:
            try:
                sh.umount('/dev/random')
            except:
                sh.rm('--force', '--recursive', '/dev/random')

    # "rngd -r /dev/urandom" should only be used during testing, if ever
    # if not is_program_running('rngd'):
    #     sh.rngd('-r', '/dev/urandom')

    sh.ln('--symbolic', '/dev/urandom', '/dev/random')
Example #18
0
def clean_directory(directory):
    """Remove all files in a directory"""

    for filename in os.listdir(directory):
        f = os.path.join(directory, filename)
        if os.path.isfile(f):
            sh.rm(f)
Example #19
0
def initialize():
    # noinspection PyUnresolvedReferences
    from sh import wget, tar, rm, shasum
    if not os.path.exists(prefix):
        os.makedirs(prefix)

    if (not os.path.exists(dirs['inputs'])) or (not os.path.exists(dirs['intermediates'])):
        try:
            if not os.path.exists(prefix):
                logger.info("Creating {DIR}".format(DIR=prefix))
                os.makedirs(prefix)
            logger.info("Downloading data from {URL} to {DIR}".format(URL=data_url, DIR=prefix))
            tar(wget(data_url, "-qO-", _piped=True), "xz", _cwd=prefix)
            logger.info("Checking checksums of downloaded files")
            for line in shasum("-c", _cwd=prefix, _in=checksums, _iter=True):
                logger.info(line)
        except Exception as e:
            logger.info("Error: {}".format(e.message))
            logger.info("Deleting {DIR}".format(DIR=dirs['inputs']))
            rm(dirs['inputs'], '-rf')
            logger.info("Deleting {DIR}".format(DIR=dirs['intermediates']))
            rm(dirs['intermediates'], '-rf')
            raise

    # make sure all those directories exist
    for d in (dirs['outputs'], dirs['plots']):
        if not os.path.exists(d):
            logger.info("Creating {DIR}".format(DIR=d))
            os.makedirs(d)
Example #20
0
File: mv.py Project: yunify/qsctl
def step_impl(context):
    for row in context.input:
        assert_that(bucket.head_object(row["name"]).status_code
                    ).is_equal_to(404)
        assert_that(os.path.isfile("tmp/" + row["name"])).is_equal_to(True)

    sh.rm("-rf", "tmp").wait()
Example #21
0
def copy_assets():
    """copy assets for static serving"""
    proj()

    print(". copying assets ...")

    copy_patterns = {
        "dist": ["./static/lib/jquery-1.8.3.min.js"] +
        sh.glob("./static/config/*.json") +
        sh.glob("./static/fragile-min.*"),

        "dist/font": sh.glob("./static/lib/awesome/font/*"),
        "dist/svg": sh.glob("./static/svg/*.svg"),
        "dist/img": sh.glob("./static/img/*.*") or [],
        
        "dist/docs/assets": sh.glob("./docs/assets/*.*") or [],
    }

    for dst, copy_files in copy_patterns.items():
        if not os.path.exists(dst):
            sh.mkdir("-p", dst)

        for c_file in copy_files:
            print "... copying", c_file, dst
            sh.cp("-r", c_file, dst)

    wa_cache = "./dist/.webassets-cache"

    if os.path.exists(wa_cache):
        sh.rm("-r", wa_cache)
Example #22
0
    def tree_construction(self,root = None, sccs = False):
        threads = 16 
        print "build a tree"
        if os.path.exists(self.base + "RAxML/" ):
            sh.rm("-r", self.base + "RAxML/")
        os.makedirs(self.base + "RAxML/")

        if self.seq_type == "proteins" :
            model = "PROTGAMMALG"
        else:
            model = "GTRGAMMA"

        alignment = self.base + "_scc_cat_align.fasta" if sccs else self.base + "_cat_align.fasta"
        
        sh.raxmlHPC_PTHREADS_AVX("-w", self.base + "RAxML/", "-T", threads-2, "-m", model, "-p", self.seed, "-#", 20, "-s", alignment, "-n", "T13", "-o", root) 
        print "boostrap dat tree"
        sh.raxmlHPC_PTHREADS_AVX("-w", self.base + "RAxML/", "-T", threads-2, "-m", model, "-p", self.seed, "-b", self.seed, "-#", 100, "-s", alignment, "-n", "T14", "-o", root)
        print "combine"
        sh.raxmlHPC_AVX("-m", "GTRCAT", "-w", self.base + "RAxML/", "-p", self.seed, "-f", "b", "-t", self.base + "RAxML/"+"RAxML_bestTree.T13", "-z",self.base + "RAxML/"+ "RAxML_bootstrap.T14", "-n", "T15", "-o", root)
        print "clean up"
        if os.path.exists(self.base + "_branches_labeled.tree"):
            os.remove(self.base + "_branches_labeled.tree")
            os.remove(self.base + "_nodes_labeled.tree")
        sh.ln("-s",  self.base + "RAxML/RAxML_bipartitionsBranchLabels.T15", self.base +"_branches_labeled.tree")
        sh.ln("-s",  self.base + "RAxML/RAxML_bipartitions.T15", self.scg_tree)
Example #23
0
def download_package(destination, product, version, compiler):
  remove_existing_package(destination, product, version)

  label = get_release_label()
  file_name = "{0}-{1}-{2}-{3}.tar.gz".format(product, version, compiler, label)
  url_path="/{0}/{1}-{2}/{0}-{1}-{2}-{3}.tar.gz".format(product, version, compiler, label)
  download_path = HOST + url_path

  print "URL {0}".format(download_path)
  print "Downloading {0} to {1}".format(file_name, destination)
  # --no-clobber avoids downloading the file if a file with the name already exists
  sh.wget(download_path, directory_prefix=destination, no_clobber=True)
  print "Extracting {0}".format(file_name)
  sh.tar(z=True, x=True, f=os.path.join(destination, file_name), directory=destination)
  sh.rm(os.path.join(destination, file_name))

  if product == "kudu":
    # The Kudu tarball is actually a renamed parcel. Rename the contents to match the
    # naming convention.
    kudu_dirs = glob.glob("{0}/KUDU*{1}*".format(destination, version))
    if not kudu_dirs:
      raise Exception("Could not find contents of Kudu tarball")
    if len(kudu_dirs) > 1:
      raise Exception("Found too many Kudu folders: %s" % (kudu_dirs, ))
    new_dir = "{0}/{1}-{2}".format(destination, product, version)
    if os.path.exists(new_dir):
      shutil.rmtree(new_dir)
    os.rename(kudu_dirs[0], new_dir)

  write_version_file(destination, product, version, compiler, label)
Example #24
0
 def close_stream(self):
     """ Zamykanie urządzenia i kasowanie plików tymczasowych
     """
     if self.source_mount is not None:
         sh.sync()
         sh.umount(self.source_mount, self.destination_mount)
         sh.rm(self.mount_folder, '-rf')
Example #25
0
    def setup(self, portno=8000):

        self.load_port()
        # print self.port_used
        some_no = portno
        conf_file = self.real_proj_d + '.conf'
        while 1:
            if some_no in self.port_used:
                some_no += 1
            else:
                print 'the port allocated is: ', some_no
                break

        conf = open(conf_file, 'w')
        conf.write('server {\n')
        conf.write('    listen' + ' ' + unicode(some_no) + ';\n')
        conf.write('    access_log /var/log/nginx/access.log;\n')
        conf.write('    error_log /var/log/nginx/error.log;\n')
        conf.write('    location / {\n')
        conf.write('        uwsgi_pass unix:' + self.load_sock() + ';\n')
        conf.write('        include uwsgi_params;\n')
        conf.write('    }\n')
        conf.write('    location ~ ^/static/ {\n')
        conf.write('        root ' + self.load_static() + ';\n')
        conf.write('    }\n')
        conf.write('}')
        conf.close()

        cp(conf_file, '/etc/nginx/conf.d')
        rm(conf_file)
        system("service nginx restart")

        return
def get_receive_errors(rev_old, rev_new, file_type, function):
  checkable = True
  if file_type == "js":
    checkable = config.getboolean("receive", "CHECK_JAVASCRIPT")
  elif file_type == "php":
    checkable = config.getboolean("receive", "CHECK_PHP")

  if not checkable:
    return None

  files = _get_receive_files(rev_old, rev_new, file_type)
  if not files:
    return None

  tmp_dir = config.get("receive", "TMP_DIR")
  errors = []
  for file_path in files:
    mkdir("-p", "/".join((tmp_dir + file_path).split("/")[:-1]))
    system("git show %s:%s > %s" % (rev_new, file_path, tmp_dir + file_path))
    if path.exists(tmp_dir + file_path):
      file_error = function(tmp_dir + file_path)
      if file_error:
        errors.append(file_path + file_error)

  rm("-rf", tmp_dir)
  return "\n".join(errors)
    def pdf_output(self):
        try:
            out = self.gdoc_to(*(self.options + ['-f', 'tex', self.test_file]),
                               _err="/dev/null")
            assert out.stdout.startswith('% -*- TeX-engine')
            csum = checksum(out.stdout)
        except ErrorReturnCode:
            csum = checksum(str(random.random()))

        sha = self.test_file + '.tex.sha'
        if self.cache and os.path.isfile(sha) and slurp(sha) == csum:
            self.say(yellow('Same as cached TeX output. Skipping...'))
        else:
            sh.rm('-f', sha)
            spit(csum, sha)
            try:
                self.gdoc_to(*(self.options + ['-f', 'pdf', self.test_file]),
                             _err=self.test_err + ".pdf.log",
                             _out=self.test_out + ".pdf")
                assert mimetype(self.test_out + '.pdf') == 'application/pdf'
            except ErrorReturnCode:
                self.say(red("PDF generation failed!"))
                self.say(red("\tSee {}.pdf.log"), self.test_err)
                self.say(red("\tRan {} as {} {} -f pdf"),
                         self.test_file, GDOC_TO,
                         shell_join(self.options))
                self.failed = True
    def html_output(self):
        ext = '.html'
        today = datetime.date.today().isoformat()
        sha = self.test_file + ".html.sha"
        # cannot recover if generating html fails
        options = (['--zip'] + self.options
                   + ['-f', 'html', self.test_file,
                      self.test_out + ext + '.zip'])
        try:
            self.gdoc_to(*options,
                         _err=self.test_err + ".html.log")
            # XXX it hangs without -n, didn't have time to figure out why
            out_dir = os.path.dirname(self.test_out)
            sh.unzip('-n', '-d', out_dir, self.test_out + ext + '.zip')
            sh.sed('-i', '-e', 's/%s/TODAYS_DATE/g' % today,
                   self.test_out + ext)
            test_result = slurp('%s.html' % self.test_out)
        except ErrorReturnCode as e:
            self.say(red("gdoc-to failed: {}. See {}.html.log"),
                     e, self.test_err)
            self.say(red("Ran in {}"), os.getcwd())
            self.failed = True
            sh.rm('-f', sha)
            return
        try:
            html5check(self.test_out + ext,
                       _out=self.test_out + ".html.errors")
        except ErrorReturnCode:
            self.say(red("Test output did not validate as XHTML5!"))
            self.say(red("\tSee {}.html.errors"), self.test_out)
            self.failed = True

        if test_result != slurp(self.test_file + ext):
            # the file changed, but the change might be okay
            spit(self._canonical_body(self.test_out + ext),
                 self.test_out + ".body")
            spit(self._canonical_body(self.test_file + ext),
                 self.test_out + ".canon.body")

            if (slurp(self.test_out + '.body')
                    == slurp(self.test_out + '.canon.body')):
                self.say(yellow("File changed. Updating canonical file."))
                sh.cp(self.test_out + ext, self.test_file + ext)
            else:
                self.say(red("HTML body changed!"))
                self.say(red("\tSee {}.*"), fail_path(self.test_name))
                sh.cp(self.test_out + ext, fail_path(self.test_name + ext))
                sh.diff('-u', self.test_file + ext, self.test_out + ext,
                        _out=fail_path(self.test_name + ".html.diff"),
                        _ok_code=[0, 1])
                sh.cp(self.test_out + ".body",
                      fail_path(self.test_name + ".body"))
                sh.cp(self.test_out + ".canon.body",
                      fail_path(self.test_name + ".body.expected"))
                sh.diff('-u', self.test_out + ".canon.body",
                        self.test_out + ".body",
                        _out=fail_path(self.test_name + '.body.diff'),
                        _ok_code=[0, 1])
                self.failed = True
def wget_and_unpack_package(download_path, file_name, destination, wget_no_clobber):
  print "URL {0}".format(download_path)
  print "Downloading {0} to {1}".format(file_name, destination)
  # --no-clobber avoids downloading the file if a file with the name already exists
  sh.wget(download_path, directory_prefix=destination, no_clobber=wget_no_clobber)
  print "Extracting {0}".format(file_name)
  sh.tar(z=True, x=True, f=os.path.join(destination, file_name), directory=destination)
  sh.rm(os.path.join(destination, file_name))
Example #30
0
def cleaner():
    sh.cd(IMG_PATH)
    while True:
        try:
            sh.rm('-f', glob.glob("[0-9]*.jpg"))
        except sh.ErrorReturnCode:
            pass
        sleep(10)
Example #31
0
def build_example(configs, target_abi, toolchain,
                  enable_openmp, mace_lib_type, cl_binary_to_code, device):
    library_name = configs[YAMLKeyword.library_name]
    hexagon_mode = get_hexagon_mode(configs)

    build_tmp_binary_dir = get_build_binary_dir(library_name, target_abi)
    if os.path.exists(build_tmp_binary_dir):
        sh.rm("-rf", build_tmp_binary_dir)
    os.makedirs(build_tmp_binary_dir)

    if cl_binary_to_code:
        sh_commands.gen_opencl_binary_cpps(
            get_opencl_binary_output_path(
                library_name, target_abi, device),
            get_opencl_parameter_output_path(
                library_name, target_abi, device),
            OPENCL_CODEGEN_DIR + '/opencl_binary.cc',
            OPENCL_CODEGEN_DIR + '/opencl_parameter.cc')
    else:
        sh_commands.gen_opencl_binary_cpps(
            "", "",
            OPENCL_CODEGEN_DIR + '/opencl_binary.cc',
            OPENCL_CODEGEN_DIR + '/opencl_parameter.cc')

    symbol_hidden = True

    libmace_target = LIBMACE_STATIC_TARGET
    if mace_lib_type == MACELibType.dynamic:
        symbol_hidden = False
        libmace_target = LIBMACE_SO_TARGET

    sh_commands.bazel_build(libmace_target,
                            abi=target_abi,
                            toolchain=toolchain,
                            enable_openmp=enable_openmp,
                            enable_opencl=get_opencl_mode(configs),
                            enable_quantize=get_quantize_mode(configs),
                            hexagon_mode=hexagon_mode,
                            address_sanitizer=flags.address_sanitizer,
                            symbol_hidden=symbol_hidden)

    if os.path.exists(LIB_CODEGEN_DIR):
        sh.rm("-rf", LIB_CODEGEN_DIR)
    sh.mkdir("-p", LIB_CODEGEN_DIR)

    build_arg = ""
    if configs[YAMLKeyword.model_graph_format] == ModelFormat.code:
        mace_check(os.path.exists(ENGINE_CODEGEN_DIR),
                   ModuleName.RUN,
                   "You should convert model first.")
        model_lib_path = get_model_lib_output_path(library_name,
                                                   target_abi)
        sh.cp("-f", model_lib_path, LIB_CODEGEN_DIR)
        build_arg = "--per_file_copt=mace/examples/cli/example.cc@-DMODEL_GRAPH_FORMAT_CODE"  # noqa

    if mace_lib_type == MACELibType.dynamic:
        example_target = EXAMPLE_DYNAMIC_TARGET
        sh.cp("-f", LIBMACE_DYNAMIC_PATH, LIB_CODEGEN_DIR)
    else:
        example_target = EXAMPLE_STATIC_TARGET
        sh.cp("-f", LIBMACE_STATIC_PATH, LIB_CODEGEN_DIR)

    sh_commands.bazel_build(example_target,
                            abi=target_abi,
                            toolchain=toolchain,
                            enable_openmp=enable_openmp,
                            enable_opencl=get_opencl_mode(configs),
                            enable_quantize=get_quantize_mode(configs),
                            hexagon_mode=hexagon_mode,
                            address_sanitizer=flags.address_sanitizer,
                            extra_args=build_arg)

    target_bin = "/".join(sh_commands.bazel_target_to_bin(example_target))
    sh.cp("-f", target_bin, build_tmp_binary_dir)
    if os.path.exists(LIB_CODEGEN_DIR):
        sh.rm("-rf", LIB_CODEGEN_DIR)
Example #32
0
def Delete(filePath):
    sh.rm(filePath)
PROTOTYPE_URL = "https://github.com/gregtmobile/gitbook-prototype.git"
PROTOTYPE_NAME = "gitbook-prototype"

######################################################################
# DO NOT EDIT BELOW THIS LINE.


def prompt(s):
    sys.stdout.write(s + ': ')
    sys.stdout.flush()
    return input()


original_working_directory = os.getcwd()
sh.rm('-rf', 'gitbook-prototype.git')

username = prompt('GitHub Username: '******'GitHub Password: '******'Name for New Book: ')

book_library = 'dropbox'
Example #34
0
def main():
    log = logging.getLogger("zulip-provisioner")

    if platform.architecture()[0] == '64bit':
        arch = 'amd64'
        phantomjs_arch = 'x86_64'
    elif platform.architecture()[0] == '32bit':
        arch = "i386"
        phantomjs_arch = 'i686'
    else:
        log.critical(
            "Only x86 is supported; ping [email protected] if you want another architecture."
        )
        sys.exit(1)

    vendor, version, codename = platform.dist()

    if not (vendor in SUPPORTED_PLATFORMS
            and codename in SUPPORTED_PLATFORMS[vendor]):
        log.critical("Unsupported platform: {} {}".format(vendor, codename))

    with sh.sudo:
        sh.apt_get.update(**LOUD)

        sh.apt_get.install(*APT_DEPENDENCIES["trusty"],
                           assume_yes=True,
                           **LOUD)

    temp_deb_path = sh.mktemp("package_XXXXXX.deb", tmpdir=True)

    sh.wget("{}/{}_{}_{}.deb".format(
        TSEARCH_URL_BASE,
        TSEARCH_PACKAGE_NAME["trusty"],
        TSEARCH_VERSION,
        arch,
    ),
            output_document=temp_deb_path,
            **LOUD)

    with sh.sudo:
        sh.dpkg("--install", temp_deb_path, **LOUD)

    with sh.sudo:
        PHANTOMJS_PATH = "/srv/phantomjs"
        PHANTOMJS_BASENAME = "phantomjs-1.9.8-linux-%s" % (phantomjs_arch, )
        PHANTOMJS_TARBALL_BASENAME = PHANTOMJS_BASENAME + ".tar.bz2"
        PHANTOMJS_TARBALL = os.path.join(PHANTOMJS_PATH,
                                         PHANTOMJS_TARBALL_BASENAME)
        PHANTOMJS_URL = "https://bitbucket.org/ariya/phantomjs/downloads/%s" % (
            PHANTOMJS_TARBALL_BASENAME, )
        sh.mkdir("-p", PHANTOMJS_PATH, **LOUD)
        if not os.path.exists(PHANTOMJS_TARBALL):
            sh.wget(PHANTOMJS_URL, output_document=PHANTOMJS_TARBALL, **LOUD)
        sh.tar("xj", directory=PHANTOMJS_PATH, file=PHANTOMJS_TARBALL, **LOUD)
        sh.ln(
            "-sf",
            os.path.join(PHANTOMJS_PATH, PHANTOMJS_BASENAME, "bin",
                         "phantomjs"), "/usr/local/bin/phantomjs", **LOUD)

    with sh.sudo:
        sh.rm("-rf", VENV_PATH, **LOUD)
        sh.mkdir("-p", VENV_PATH, **LOUD)
        sh.chown("{}:{}".format(os.getuid(), os.getgid()), VENV_PATH, **LOUD)

    sh.virtualenv(VENV_PATH, **LOUD)

    # Add the ./tools and ./scripts/setup directories inside the repository root to
    # the system path; we'll reference them later.
    orig_path = os.environ["PATH"]
    os.environ["PATH"] = os.pathsep.join(
        (os.path.join(ZULIP_PATH,
                      "tools"), os.path.join(ZULIP_PATH, "scripts",
                                             "setup"), orig_path))

    # Put Python virtualenv activation in our .bash_profile.
    with open(os.path.expanduser('~/.bash_profile'), 'w+') as bash_profile:
        bash_profile.writelines([
            "source .bashrc\n",
            "source %s\n" % (os.path.join(VENV_PATH, "bin", "activate"), ),
        ])

    # Switch current Python context to the virtualenv.
    activate_this = os.path.join(VENV_PATH, "bin", "activate_this.py")
    execfile(activate_this, dict(__file__=activate_this))

    sh.pip.install(requirement=os.path.join(ZULIP_PATH, "requirements.txt"),
                   **LOUD)

    with sh.sudo:
        sh.cp(REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH, **LOUD)

    # npm install and management commands expect to be run from the root of the project.
    os.chdir(ZULIP_PATH)

    sh.npm.install(**LOUD)

    os.system("tools/download-zxcvbn")
    os.system("tools/emoji_dump/build_emoji")
    os.system("generate_secrets.py -d")
    if "--travis" in sys.argv:
        os.system("sudo service rabbitmq-server restart")
        os.system("sudo service redis-server restart")
        os.system("sudo service memcached restart")
    elif "--docker" in sys.argv:
        os.system("sudo service rabbitmq-server restart")
        os.system("sudo pg_dropcluster --stop 9.3 main")
        os.system("sudo pg_createcluster -e utf8 --start 9.3 main")
        os.system("sudo service redis-server restart")
        os.system("sudo service memcached restart")
    sh.configure_rabbitmq(**LOUD)
    sh.postgres_init_dev_db(**LOUD)
    sh.do_destroy_rebuild_database(**LOUD)
    sh.postgres_init_test_db(**LOUD)
    sh.do_destroy_rebuild_test_database(**LOUD)
    return 0
Example #35
0
    clean_old_backups(buck, path, False, name)

    #
    # Kodebasen
    #

    print("Cloning git codebase and sending to S3...")
    name = 'sherpa'
    path = 'backups/kodebase/'
    filename = '%s-%s.txz' % (name, now.strftime(dateformat))

    git('clone', 'https://github.com/Turistforeningen/sherpa.git')
    tar('-cJf', filename, 'sherpa')

    key = boto.s3.key.Key(buck, name="%s%s" % (path, filename))
    key.set_contents_from_filename(filename)

    rm('-r', 'sherpa/')
    rm(filename)
    clean_old_backups(buck, path, True, name)

except Exception as e:
    if hasattr(e, 'message'):
        message = e.message
    else:
        message = "(no message)"
    print((u"Exception (sent to Sentry): %s" % message).encode('utf-8'))
    raven.captureException()
    time.sleep(10)  # Give raven time to send the error to Sentry
Example #36
0
def convert_func(flags):
    configs = config_parser.parse(flags.config)
    print(configs)
    library_name = configs[YAMLKeyword.library_name]
    if not os.path.exists(BUILD_OUTPUT_DIR):
        os.makedirs(BUILD_OUTPUT_DIR)
    elif os.path.exists(os.path.join(BUILD_OUTPUT_DIR, library_name)):
        sh.rm("-rf", os.path.join(BUILD_OUTPUT_DIR, library_name))
    os.makedirs(os.path.join(BUILD_OUTPUT_DIR, library_name))
    if not os.path.exists(BUILD_DOWNLOADS_DIR):
        os.makedirs(BUILD_DOWNLOADS_DIR)

    model_output_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_OUTPUT_DIR_NAME)
    model_header_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_HEADER_DIR_PATH)
    # clear output dir
    if os.path.exists(model_output_dir):
        sh.rm("-rf", model_output_dir)
    os.makedirs(model_output_dir)
    if os.path.exists(model_header_dir):
        sh.rm("-rf", model_header_dir)

    if os.path.exists(MODEL_CODEGEN_DIR):
        sh.rm("-rf", MODEL_CODEGEN_DIR)
    if os.path.exists(ENGINE_CODEGEN_DIR):
        sh.rm("-rf", ENGINE_CODEGEN_DIR)

    if flags.quantize_stat:
        configs[YAMLKeyword.quantize_stat] = flags.quantize_stat

    if flags.model_data_format:
        model_data_format = flags.model_data_format
    else:
        model_data_format = configs.get(YAMLKeyword.model_data_format, "file")
    embed_model_data = model_data_format == ModelFormat.code

    if flags.model_graph_format:
        model_graph_format = flags.model_graph_format
    else:
        model_graph_format = configs.get(YAMLKeyword.model_graph_format,
                                         "file")
    embed_graph_def = model_graph_format == ModelFormat.code
    if flags.enable_micro:
        mace_check((not embed_model_data) and (not embed_graph_def),
                   ModuleName.YAML_CONFIG,
                   "You should specify file mode to convert micro model.")
    if embed_graph_def:
        os.makedirs(model_header_dir)
        sh_commands.gen_mace_engine_factory_source(
            configs[YAMLKeyword.models].keys(), embed_model_data)
        sh.cp("-f", glob.glob("mace/codegen/engine/*.h"), model_header_dir)

    convert.convert(configs, MODEL_CODEGEN_DIR, flags.enable_micro)

    for model_name, model_config in configs[YAMLKeyword.models].items():
        if flags.enable_micro:
            data_type = model_config.get(YAMLKeyword.data_type, "")
            mace_check(
                data_type == FPDataType.fp32_fp32.value
                or data_type == FPDataType.bf16_fp32.value,
                ModuleName.YAML_CONFIG,
                "You should specify fp32_fp32 or bf16_fp32 data type "
                "to convert micro model.")
        model_codegen_dir = "%s/%s" % (MODEL_CODEGEN_DIR, model_name)
        encrypt.encrypt(
            model_name, "%s/model/%s.pb" % (model_codegen_dir, model_name),
            "%s/model/%s.data" % (model_codegen_dir, model_name),
            config_parser.parse_device_type(model_config[YAMLKeyword.runtime]),
            model_codegen_dir, bool(model_config.get(YAMLKeyword.obfuscate,
                                                     1)),
            model_graph_format == "code", model_data_format == "code")

        if model_graph_format == ModelFormat.file:
            sh.mv("-f", '%s/model/%s.pb' % (model_codegen_dir, model_name),
                  model_output_dir)
            sh.mv("-f", '%s/model/%s.data' % (model_codegen_dir, model_name),
                  model_output_dir)
            if flags.enable_micro:
                sh.mv(
                    "-f", '%s/model/%s_micro.tar.gz' %
                    (model_codegen_dir, model_name), model_output_dir)
        else:
            if not embed_model_data:
                sh.mv("-f",
                      '%s/model/%s.data' % (model_codegen_dir, model_name),
                      model_output_dir)

            sh.cp("-f", glob.glob("mace/codegen/models/*/code/*.h"),
                  model_header_dir)

        MaceLogger.summary(
            StringFormatter.block("Model %s converted" % model_name))

    if model_graph_format == ModelFormat.code:
        build_model_lib(configs, flags.address_sanitizer, flags.debug_mode)

    print_library_summary(configs)
Example #37
0
def build_run_throughput_test(abi,
                              serialno,
                              vlog_level,
                              run_seconds,
                              merged_lib_file,
                              model_input_dir,
                              embed_model_data,
                              input_nodes,
                              output_nodes,
                              input_shapes,
                              output_shapes,
                              cpu_model_tag,
                              gpu_model_tag,
                              dsp_model_tag,
                              phone_data_dir,
                              strip="always",
                              input_file_name="model_input"):
    six.print_("* Build and run throughput_test")

    model_tag_build_flag = ""
    if cpu_model_tag:
        model_tag_build_flag += "--copt=-DMACE_CPU_MODEL_TAG=%s " % \
                                cpu_model_tag
    if gpu_model_tag:
        model_tag_build_flag += "--copt=-DMACE_GPU_MODEL_TAG=%s " % \
                                gpu_model_tag
    if dsp_model_tag:
        model_tag_build_flag += "--copt=-DMACE_DSP_MODEL_TAG=%s " % \
                                dsp_model_tag

    sh.cp("-f", merged_lib_file, "mace/benchmark/libmace_merged.a")
    sh.bazel("build",
             "-c",
             "opt",
             "--strip",
             strip,
             "--verbose_failures",
             "//mace/benchmark:model_throughput_test",
             "--crosstool_top=//external:android/crosstool",
             "--host_crosstool_top=@bazel_tools//tools/cpp:toolchain",
             "--cpu=%s" % abi,
             "--copt=-std=c++11",
             "--copt=-D_GLIBCXX_USE_C99_MATH_TR1",
             "--copt=-Werror=return-type",
             "--copt=-O3",
             "--define",
             "neon=true",
             "--define",
             "openmp=true",
             model_tag_build_flag,
             _fg=True)

    sh.rm("mace/benchmark/libmace_merged.a")
    sh.adb("-s", serialno, "shell", "mkdir", "-p", phone_data_dir)
    adb_push(
        "%s/%s_%s" % (model_input_dir, input_file_name, ",".join(input_nodes)),
        phone_data_dir, serialno)
    adb_push("bazel-bin/mace/benchmark/model_throughput_test", phone_data_dir,
             serialno)
    if not embed_model_data:
        adb_push("codegen/models/%s/%s.data" % cpu_model_tag, phone_data_dir,
                 serialno)
        adb_push("codegen/models/%s/%s.data" % gpu_model_tag, phone_data_dir,
                 serialno)
        adb_push("codegen/models/%s/%s.data" % dsp_model_tag, phone_data_dir,
                 serialno)
    adb_push("third_party/nnlib/libhexagon_controller.so", phone_data_dir,
             serialno)

    sh.adb(
        "-s",
        serialno,
        "shell",
        "LD_LIBRARY_PATH=%s" % phone_data_dir,
        "MACE_CPP_MIN_VLOG_LEVEL=%s" % vlog_level,
        "MACE_RUN_PARAMETER_PATH=%s/mace_run.config" % phone_data_dir,
        "%s/model_throughput_test" % phone_data_dir,
        "--input_node=%s" % ",".join(input_nodes),
        "--output_node=%s" % ",".join(output_nodes),
        "--input_shape=%s" % ":".join(input_shapes),
        "--output_shape=%s" % ":".join(output_shapes),
        "--input_file=%s/%s" % (phone_data_dir, input_file_name),
        "--cpu_model_data_file=%s/%s.data" % (phone_data_dir, cpu_model_tag),
        "--gpu_model_data_file=%s/%s.data" % (phone_data_dir, gpu_model_tag),
        "--dsp_model_data_file=%s/%s.data" % (phone_data_dir, dsp_model_tag),
        "--run_seconds=%s" % run_seconds,
        _fg=True)

    six.print_("throughput_test done!\n")
Example #38
0
def setup_heroku_server(task_name):
    print("Heroku: Collecting files...")
    # Install Heroku CLI
    os_name = None
    bit_architecture = None

    # Get the platform we are working on
    platform_info = platform.platform()
    if 'Darwin' in platform_info:  # Mac OS X
        os_name = 'darwin'
    elif 'Linux' in platform_info:  # Linux
        os_name = 'linux'
    else:
        os_name = 'windows'

    # Find our architecture
    bit_architecture_info = platform.architecture()[0]
    if '64bit' in bit_architecture_info:
        bit_architecture = 'x64'
    else:
        bit_architecture = 'x86'

    # Remove existing heroku client files
    existing_heroku_directory_names = \
        glob.glob(os.path.join(parent_dir, 'heroku-cli-*'))
    if len(existing_heroku_directory_names) == 0:
        if os.path.exists(os.path.join(parent_dir, 'heroku.tar.gz')):
            os.remove(os.path.join(parent_dir, 'heroku.tar.gz'))

        # Get the heroku client and unzip
        os.chdir(parent_dir)
        sh.wget(shlex.split('{}-{}-{}.tar.gz -O heroku.tar.gz'.format(
            heroku_url,
            os_name,
            bit_architecture
        )))
        sh.tar(shlex.split('-xvzf heroku.tar.gz'))

    heroku_directory_name = \
        glob.glob(os.path.join(parent_dir, 'heroku-cli-*'))[0]
    heroku_directory_path = os.path.join(parent_dir, heroku_directory_name)
    heroku_executable_path = \
        os.path.join(heroku_directory_path, 'bin', 'heroku')

    server_source_directory_path = \
        os.path.join(parent_dir, server_source_directory_name)
    heroku_server_directory_path = os.path.join(parent_dir, '{}_{}'.format(
        heroku_server_directory_name,
        task_name
    ))

    # Delete old server files
    sh.rm(shlex.split('-rf ' + heroku_server_directory_path))

    # Copy over a clean copy into the server directory
    shutil.copytree(server_source_directory_path, heroku_server_directory_path)

    print("Heroku: Starting server...")

    os.chdir(heroku_server_directory_path)
    sh.git('init')

    # get heroku credentials
    heroku_user_identifier = None
    while not heroku_user_identifier:
        try:
            subprocess.check_output(
                shlex.split(heroku_executable_path + ' auth:token')
            )
            heroku_user_identifier = (
                netrc.netrc(os.path.join(os.path.expanduser("~"), '.netrc'))
                     .hosts['api.heroku.com'][0]
            )
        except subprocess.CalledProcessError:
            raise SystemExit(
                'A free Heroku account is required for launching MTurk tasks. '
                'Please register at https://signup.heroku.com/ and run `{} '
                'login` at the terminal to login to Heroku, and then run this '
                'program again.'.format(heroku_executable_path)
            )

    heroku_app_name = ('{}-{}-{}'.format(
        user_name,
        task_name,
        hashlib.md5(heroku_user_identifier.encode('utf-8')).hexdigest()
    ))[:30]

    while heroku_app_name[-1] == '-':
        heroku_app_name = heroku_app_name[:-1]

    # Create or attach to the server
    try:
        subprocess.check_output(shlex.split(
            '{} create {}'.format(heroku_executable_path, heroku_app_name)
        ), stderr=subprocess.STDOUT)
    except subprocess.CalledProcessError as e:
        error_text = bytes.decode(e.output)
        if "Name is already taken" in error_text:  # already running this app
            do_continue = input(
                'An app is already running with that name, do you want to '
                'restart a new run with it? (y/N): '
            )
            if do_continue != 'y':
                raise SystemExit('User chose not to re-run the app.')
            else:
                delete_heroku_server(task_name)
                try:
                    subprocess.check_output(shlex.split(
                        '{} create {}'.format(heroku_executable_path,
                                              heroku_app_name)
                    ), stderr=subprocess.STDOUT)
                except subprocess.CalledProcessError as e:
                    error_text = bytes.decode(e.output)
                    sh.rm(shlex.split('-rf {}'.format(
                        heroku_server_directory_path)))
                    print(error_text)
                    raise SystemExit(
                        'Something unexpected happened trying to set up the '
                        'heroku server - please use the above printed error '
                        'to debug the issue however necessary.'
                    )
        elif "Delete some apps" in error_text:  # too many apps running
            sh.rm(shlex.split('-rf {}'.format(heroku_server_directory_path)))
            raise SystemExit(
                'You have hit your limit on concurrent apps with heroku, '
                'which are required to run multiple concurrent tasks.\nPlease '
                'wait for some of your existing tasks to complete. If you '
                'have no tasks running, login to heroku.com and delete some '
                'of the running apps or verify your account to allow more '
                'concurrent apps.'
            )
        else:
            sh.rm(shlex.split('-rf {}'.format(heroku_server_directory_path)))
            print(error_text)
            raise SystemExit(
                'Something unexpected happened trying to set up the heroku '
                'server - please use the above printed error to debug the '
                'issue however necessary.'
            )

    # Enable WebSockets
    try:
        subprocess.check_output(shlex.split(
            '{} features:enable http-session-affinity'.format(
                heroku_executable_path
            )
        ))
    except subprocess.CalledProcessError:  # Already enabled WebSockets
        pass

    # commit and push to the heroku server
    os.chdir(heroku_server_directory_path)
    sh.git(shlex.split('add -A'))
    sh.git(shlex.split('commit -m "app"'))
    sh.git(shlex.split('push -f heroku master'))
    subprocess.check_output(shlex.split('{} ps:scale web=1'.format(
        heroku_executable_path)
    ))
    os.chdir(parent_dir)

    # Clean up heroku files
    if os.path.exists(os.path.join(parent_dir, 'heroku.tar.gz')):
        os.remove(os.path.join(parent_dir, 'heroku.tar.gz'))

    sh.rm(shlex.split('-rf {}'.format(heroku_server_directory_path)))

    return 'https://{}.herokuapp.com'.format(heroku_app_name)
Example #39
0
def gen_input(model_output_dir,
              input_nodes,
              input_shapes,
              input_files=None,
              input_ranges=None,
              input_data_types=None,
              input_data_map=None,
              input_file_name="model_input"):
    for input_name in input_nodes:
        formatted_name = common.formatted_file_name(
            input_file_name, input_name)
        if os.path.exists("%s/%s" % (model_output_dir, formatted_name)):
            sh.rm("%s/%s" % (model_output_dir, formatted_name))
    input_file_list = []
    if isinstance(input_files, list):
        input_file_list.extend(input_files)
    else:
        input_file_list.append(input_files)
    if input_data_map:
        for i in range(len(input_nodes)):
            dst_input_file = model_output_dir + '/' + \
                             common.formatted_file_name(input_file_name,
                                                        input_nodes[i])
            input_name = input_nodes[i]
            common.mace_check(input_name in input_data_map,
                              common.ModuleName.RUN,
                              "The preprocessor API in PrecisionValidator"
                              " script should return all inputs of model")
            if input_data_types[i] == 'float32' or \
                    input_data_types[i] == 'float16' or \
                    input_data_types[i] == 'bfloat16':
                input_data = np.array(input_data_map[input_name],
                                      dtype=np.float32)
            elif input_data_types[i] == 'int32':
                input_data = np.array(input_data_map[input_name],
                                      dtype=np.int32)
            else:
                common.mace_check(
                    False,
                    common.ModuleName.RUN,
                    'Do not support input data type %s' % input_data_types[i])
            common.mace_check(
                list(map(int, common.split_shape(input_shapes[i])))
                == list(input_data.shape),
                common.ModuleName.RUN,
                "The shape return from preprocessor API of"
                " PrecisionValidator script is not same with"
                " model deployment file. %s vs %s"
                % (str(input_shapes[i]), str(input_data.shape)))
            input_data.tofile(dst_input_file)
    elif len(input_file_list) != 0:
        input_name_list = []
        if isinstance(input_nodes, list):
            input_name_list.extend(input_nodes)
        else:
            input_name_list.append(input_nodes)
        common.mace_check(len(input_file_list) == len(input_name_list),
                          common.ModuleName.RUN,
                          'If input_files set, the input files should '
                          'match the input names.')
        for i in range(len(input_file_list)):
            if input_file_list[i] is not None:
                dst_input_file = model_output_dir + '/' + \
                                 common.formatted_file_name(input_file_name,
                                                            input_name_list[i])
                if input_file_list[i].startswith("http://") or \
                        input_file_list[i].startswith("https://"):
                    six.moves.urllib.request.urlretrieve(input_file_list[i],
                                                         dst_input_file)
                else:
                    sh.cp("-f", input_file_list[i], dst_input_file)
    else:
        # generate random input files
        input_nodes_str = ",".join(input_nodes)
        input_shapes_str = ":".join(input_shapes)
        input_ranges_str = ":".join(input_ranges)
        input_data_types_str = ",".join(input_data_types)
        generate_input_data("%s/%s" % (model_output_dir, input_file_name),
                            input_nodes_str,
                            input_shapes_str,
                            input_ranges_str,
                            input_data_types_str)
def setup_heroku_server(
    task_name,
    task_files_to_copy=None,
    heroku_team=None,
    use_hobby=False,
    tmp_dir=parent_dir,
):

    print("Heroku: Collecting files... for ", tmp_dir)
    # Install Heroku CLI
    os_name = None
    bit_architecture = None

    # Get the platform we are working on
    platform_info = platform.platform()
    if 'Darwin' in platform_info:  # Mac OS X
        os_name = 'darwin'
    elif 'Linux' in platform_info:  # Linux
        os_name = 'linux'
    else:
        os_name = 'windows'

    # Find our architecture
    bit_architecture_info = platform.architecture()[0]
    if '64bit' in bit_architecture_info:
        bit_architecture = 'x64'
    else:
        bit_architecture = 'x86'

    # Remove existing heroku client files
    existing_heroku_directory_names = glob.glob(
        os.path.join(tmp_dir, 'heroku-cli-*'))
    if len(existing_heroku_directory_names) == 0:
        if os.path.exists(os.path.join(tmp_dir, 'heroku.tar.gz')):
            os.remove(os.path.join(tmp_dir, 'heroku.tar.gz'))

        # Get the heroku client and unzip
        os.chdir(tmp_dir)
        sh.wget(
            shlex.split('{}-{}-{}.tar.gz -O heroku.tar.gz'.format(
                heroku_url, os_name, bit_architecture)))
        sh.tar(shlex.split('-xvzf heroku.tar.gz'))

    heroku_directory_name = glob.glob(os.path.join(tmp_dir, 'heroku-cli-*'))[0]
    heroku_directory_path = os.path.join(tmp_dir, heroku_directory_name)
    heroku_executable_path = os.path.join(heroku_directory_path, 'bin',
                                          'heroku')

    server_source_directory_path = os.path.join(parent_dir,
                                                server_source_directory_name)
    heroku_server_development_path = os.path.join(
        tmp_dir, '{}_{}'.format(heroku_server_directory_name, task_name))

    # Delete old server files
    sh.rm(shlex.split('-rf ' + heroku_server_development_path))

    # Copy over a clean copy into the server directory
    shutil.copytree(server_source_directory_path,
                    heroku_server_development_path)

    # Check to see if we need to build
    custom_component_dir = os.path.join(heroku_server_development_path, 'dev',
                                        'components',
                                        'built_custom_components')
    if task_files_to_copy['needs_build'] is not None:
        # Build the directory, then pull the custom component out.
        print('Build: Detected custom package.json, prepping build')
        task_files_to_copy['components'] = []

        frontend_dir = task_files_to_copy['needs_build']

        sh.rm(shlex.split('-rf ' + custom_component_dir))
        shutil.copytree(frontend_dir, custom_component_dir)

        os.chdir(heroku_server_development_path)
        packages_installed = subprocess.call(
            ['npm', 'install', custom_component_dir])
        if packages_installed != 0:
            raise Exception('please make sure npm is installed, otherwise view'
                            ' the above error for more info.')

        os.chdir(custom_component_dir)

        webpack_complete = subprocess.call(['npm', 'run', 'dev'])
        if webpack_complete != 0:
            raise Exception('Webpack appears to have failed to build your '
                            'custom components. See the above for more info.')
    else:
        os.chdir(heroku_server_development_path)
        packages_installed = subprocess.call(
            ['npm', 'install', custom_component_dir])
        if packages_installed != 0:
            raise Exception('please make sure npm is installed, otherwise view'
                            ' the above error for more info.')

    # Move dev resource files to their correct places
    for resource_type in ['css', 'components']:
        target_resource_dir = os.path.join(heroku_server_development_path,
                                           'dev', resource_type)
        for file_path in task_files_to_copy[resource_type]:
            try:
                file_name = os.path.basename(file_path)
                target_path = os.path.join(target_resource_dir, file_name)
                print('copying {} to {}'.format(file_path, target_path))
                shutil.copy2(file_path, target_path)
            except IsADirectoryError:  # noqa: F821
                dir_name = os.path.basename(os.path.normpath(file_path))
                shutil.copytree(file_path,
                                os.path.join(target_resource_dir, dir_name))
            except FileNotFoundError:  # noqa: F821
                pass

    # Compile the frontend
    os.chdir(heroku_server_development_path)

    packages_installed = subprocess.call(['npm', 'install'])
    if packages_installed != 0:
        raise Exception('please make sure npm is installed, otherwise view '
                        'the above error for more info.')

    webpack_complete = subprocess.call(['npm', 'run', 'dev'])
    if webpack_complete != 0:
        raise Exception('Webpack appears to have failed to build your '
                        'frontend. See the above error for more information.')

    # all the important files should've been moved to bundle.js in
    # server/static, now copy the rest into static
    target_resource_dir = os.path.join(heroku_server_development_path,
                                       'server', 'static')
    for file_path in task_files_to_copy['static']:
        try:
            file_name = os.path.basename(file_path)
            target_path = os.path.join(target_resource_dir, file_name)
            shutil.copy2(file_path, target_path)
        except IsADirectoryError:  # noqa: F821 we don't support python2
            dir_name = os.path.basename(os.path.normpath(file_path))
            shutil.copytree(file_path,
                            os.path.join(target_resource_dir, dir_name))
        except FileNotFoundError:  # noqa: F821 we don't support python2
            pass

    hit_config_file_path = os.path.join(tmp_dir, 'hit_config.json')
    sh.mv(hit_config_file_path, target_resource_dir)

    print("Heroku: Starting server...")

    heroku_server_directory_path = os.path.join(heroku_server_development_path,
                                                'server')
    os.chdir(heroku_server_directory_path)
    sh.git('init')

    # get heroku credentials
    heroku_user_identifier = None
    while not heroku_user_identifier:
        try:
            subprocess.check_output(
                shlex.split(heroku_executable_path + ' auth:token'))
            heroku_user_identifier = netrc.netrc(
                os.path.join(os.path.expanduser("~"),
                             '.netrc')).hosts['api.heroku.com'][0]
        except subprocess.CalledProcessError:
            raise SystemExit(
                'A free Heroku account is required for launching MTurk tasks. '
                'Please register at https://signup.heroku.com/ and run `{} '
                'login` at the terminal to login to Heroku, and then run this '
                'program again.'.format(heroku_executable_path))

    heroku_app_name = ('{}-{}-{}'.format(
        user_name,
        task_name,
        hashlib.md5(heroku_user_identifier.encode('utf-8')).hexdigest(),
    ))[:30]

    while heroku_app_name[-1] == '-':
        heroku_app_name = heroku_app_name[:-1]

    # Create or attach to the server
    try:
        if heroku_team is not None:
            subprocess.check_output(
                shlex.split('{} create {} --team {}'.format(
                    heroku_executable_path, heroku_app_name, heroku_team)))
        else:
            subprocess.check_output(
                shlex.split('{} create {}'.format(heroku_executable_path,
                                                  heroku_app_name)))
    except subprocess.CalledProcessError:  # User has too many apps
        sh.rm(shlex.split('-rf {}'.format(heroku_server_directory_path)))
        raise SystemExit(
            'You have hit your limit on concurrent apps with heroku, which are'
            ' required to run multiple concurrent tasks.\nPlease wait for some'
            ' of your existing tasks to complete. If you have no tasks '
            'running, login to heroku and delete some of the running apps or '
            'verify your account to allow more concurrent apps')

    # Enable WebSockets
    try:
        subprocess.check_output(
            shlex.split('{} features:enable http-session-affinity'.format(
                heroku_executable_path)))
    except subprocess.CalledProcessError:  # Already enabled WebSockets
        pass

    # commit and push to the heroku server
    os.chdir(heroku_server_directory_path)
    sh.git(shlex.split('add -A'))
    sh.git(shlex.split('commit -m "app"'))
    sh.git(shlex.split('push -f heroku master'))

    subprocess.check_output(
        shlex.split('{} ps:scale web=1'.format(heroku_executable_path)))

    if use_hobby:
        try:
            subprocess.check_output(
                shlex.split(
                    '{} dyno:type Hobby'.format(heroku_executable_path)))
        except subprocess.CalledProcessError:  # User doesn't have hobby access
            delete_heroku_server(task_name)
            sh.rm(shlex.split('-rf {}'.format(heroku_server_directory_path)))
            raise SystemExit(
                'Server launched with hobby flag but account cannot create '
                'hobby servers.')
    os.chdir(parent_dir)

    # Clean up heroku files
    if os.path.exists(os.path.join(tmp_dir, 'heroku.tar.gz')):
        os.remove(os.path.join(tmp_dir, 'heroku.tar.gz'))

    sh.rm(shlex.split('-rf {}'.format(heroku_server_development_path)))

    return 'https://{}.herokuapp.com'.format(heroku_app_name)
Example #41
0
    def _install_android_packages(self):

        # if any of theses value change into the buildozer.spec, retry the
        # update
        cache_key = 'android:sdk_installation'
        cache_value = [
            self.android_api, self.android_minapi, self.android_ndk_version,
            self.android_sdk_dir, self.android_ndk_dir
        ]
        if self.buildozer.state.get(cache_key, None) == cache_value:
            return True

        # 3 pass installation.
        if not os.access(self.android_cmd, os.X_OK):
            self.buildozer.cmd('chmod ug+x {}'.format(self.android_cmd))

        # 1. update the tool and platform-tools if needed
        packages = self._android_list_sdk()
        skip_upd = self.buildozer.config.getdefault('app',
                                                    'android.skip_update',
                                                    False)
        if 'tools' in packages or 'platform-tools' in packages:
            if not skip_upd:
                if WSL:
                    # WSL (Windows Subsystem for Linux) allows running
                    # linux from windows 10, but some windows
                    # limitations still apply, namely you can't rename a
                    # directory that a program was started from, which
                    # is what the tools updates cause, and we end up
                    # with an empty dir, so we need to run from a
                    # different place, and the updater is still looking
                    # for things in tools, and specifically renames the
                    # tool dir, hence, moving and making a symlink
                    # works.
                    sh.mv(join(self.android_sdk_dir, 'tools'),
                          join(self.android_sdk_dir, 'tools.save'))
                    sh.ln('-s', join(self.android_sdk_dir, 'tools.save'),
                          join(self.android_sdk_dir, 'tools'))
                    old_android_cmd = self.android_cmd
                    self.android_cmd = join(self.android_sdk_dir, 'tools.save',
                                            self.android_cmd.split('/')[-1])

                self._android_update_sdk('tools,platform-tools')

                if WSL:
                    self.android_cmd = old_android_cmd
                    sh.rm('-rf', join(self.android_sdk_dir, 'tools.save'))
            else:
                self.buildozer.info(
                    'Skipping Android SDK update due to spec file setting')

        # 2. install the latest build tool
        v_build_tools = self._read_version_subdir(self.android_sdk_dir,
                                                  'build-tools')
        packages = self._android_list_sdk(include_all=True)
        ver = self._find_latest_package(packages, 'build-tools-')
        if ver and ver > v_build_tools and not skip_upd:
            self._android_update_sdk(
                self._build_package_string('build-tools', ver))
        # 2. check aidl can be run
        self._check_aidl(v_build_tools)

        # 3. finally, install the android for the current api
        android_platform = join(self.android_sdk_dir, 'platforms',
                                'android-{0}'.format(self.android_api))
        if not self.buildozer.file_exists(android_platform):
            packages = self._android_list_sdk()
            android_package = 'android-{}'.format(self.android_api)
            if android_package in packages and not skip_upd:
                self._android_update_sdk(android_package)

        self.buildozer.info('Android packages installation done.')

        self.buildozer.state[cache_key] = cache_value
        self.buildozer.state.sync()
Example #42
0
 def _remove_workdir(self):
     rm('-rf', self._wd)
Example #43
0
pdfscrBrd = os.path.join(here, '..', 'scr', 'pdf-brd.scr')
numberpdf = os.path.join(here, 'number_pdf.sh')
titlepdf = os.path.join(here, 'pdf_titles.py')

# Figure out the name of the schematic to run this on.
for sch in glob('*.sch'):
    sch_name, sch_ext = os.path.splitext(sch)
    brd = sch_name + '.brd'
    if not os.path.exists(brd):
        print("ERR: No .brd file for {}?".format(sch))

    print("Running for {}".format(sch_name))

    # Delete the old pdfs if they exist
    for pdf, title in pdf_files + [('layer_test', '~')]:
        rm('-f', '{}_{}.pdf'.format(sch_name, pdf))

    # Delete the merged version
    rm('-f', '{}.pdf'.format(sch))

    # Something broke in Eagle with the section specifiers and command blocking,
    # so work around by just making new scr files that includes a traling quit
    print("  Generating schematic pdf")
    with tempfile.NamedTemporaryFile() as temp_scr:
        with open(pdfscrSch) as source:
            for line in source:
                temp_scr.write(line.encode('utf-8'))
        temp_scr.write('\nquit;'.encode('utf-8'))
        temp_scr.flush()

        # Generate the schematic pdfs
Example #44
0
def setup_heroku_server(
    task_name,
    tmp_dir=parent_dir,
):

    print("Heroku: Collecting files... for ", tmp_dir)

    heroku_executable_path = install_heroku_cli(tmp_dir)

    server_source_directory_path = os.path.join(parent_dir, 'app')
    heroku_server_development_path = os.path.join(
        tmp_dir, '{}_{}'.format(heroku_server_directory_name, task_name))

    # Delete old server files
    sh.rm(shlex.split('-rf ' + heroku_server_development_path))

    # Delete existing generated node modules files
    if os.path.exists(
            os.path.join(server_source_directory_path, 'react_app',
                         'node_modules')):
        sh.rm(
            shlex.split('-rf ' + os.path.join(server_source_directory_path,
                                              'react_app', 'node_modules')))
    if os.path.exists(
            os.path.join(server_source_directory_path, 'react_app', 'build')):
        sh.rm(
            shlex.split('-rf ' + os.path.join(server_source_directory_path,
                                              'react_app', 'build')))

    # Copy over a clean copy into the server directory
    shutil.copytree(server_source_directory_path,
                    heroku_server_development_path)

    # Check to see if we need to build
    react_app_dir = os.path.join(heroku_server_development_path, 'react_app')
    # Build the directory, then pull the custom component out.
    print('Build: Detected package.json, prepping build')

    os.chdir(react_app_dir)
    packages_installed = subprocess.call(['npm', 'install', react_app_dir])
    if packages_installed != 0:
        raise Exception('please make sure npm is installed, otherwise view'
                        ' the above error for more info.')
    os.chdir(react_app_dir)

    webpack_complete = subprocess.call(['npm', 'run', 'build'])
    if webpack_complete != 0:
        raise Exception('Webpack appears to have failed to build your '
                        'custom components. See the above for more info.')

    print("Heroku: Starting server...")

    heroku_server_directory_path = heroku_server_development_path
    os.chdir(heroku_server_directory_path)
    sh.rm(shlex.split('-rf .git'))
    sh.git('init')

    # get heroku credentials
    heroku_user_identifier = None
    while not heroku_user_identifier:
        try:
            subprocess.check_output(
                shlex.split(heroku_executable_path + ' auth:token'))
            heroku_user_identifier = netrc.netrc(
                os.path.join(os.path.expanduser("~"),
                             '.netrc')).hosts['api.heroku.com'][0]
        except subprocess.CalledProcessError:
            raise SystemExit(
                'A free Heroku account is required for launching MTurk tasks. '
                'Please register at https://signup.heroku.com/ and run `{} '
                'login` at the terminal to login to Heroku, and then run this '
                'program again.'.format(heroku_executable_path))

    heroku_app_name = ('{}-{}-{}'.format(
        user_name,
        task_name,
        hashlib.md5(heroku_user_identifier.encode('utf-8')).hexdigest(),
    ))[:30]

    while heroku_app_name[-1] == '-':
        heroku_app_name = heroku_app_name[:-1]

    # Save the task name and app name to retrieve data in the future
    os.chdir(parent_dir)
    app_names = {}
    if os.path.exists('appname.json'):
        f = open('appname.json', 'r')
        content = f.read()
        app_names = json.loads(content)
        f.close()

    os.chdir(heroku_server_directory_path)
    # Create or attach to the server
    bool_new_app = True
    try:
        subprocess.check_output(
            shlex.split('{} create {}'.format(heroku_executable_path,
                                              heroku_app_name)))
    except Exception as error:
        # if the app has been created
        print("This app exists, trying to push new changes...")
        bool_new_app = False
        if task_name in app_names:
            pass
        else:
            sh.rm(shlex.split('-rf {}'.format(heroku_server_directory_path)))
            raise SystemExit(
                'You have hit your limit on concurrent apps with heroku, which are'
                ' required to run multiple concurrent tasks.\nPlease wait for some'
                ' of your existing tasks to complete. If you have no tasks '
                'running, login to heroku and delete some of the running apps or '
                'verify your account to allow more concurrent apps')
    try:
        subprocess.check_output(
            shlex.split('{} git:remote -a {}'.format(heroku_executable_path,
                                                     heroku_app_name)))
    except subprocess.CalledProcessError:
        raise SystemExit(
            'Setting git remote error! Please check the appname.json file under git root path and '
            'make sure all the apps are running in your heroku account.')

    # if this task is not in the app list, add it to the app names json file.
    if task_name not in app_names or (task_name in app_names and
                                      app_names[task_name] != heroku_app_name):
        app_names[task_name] = heroku_app_name
        b = json.dumps(app_names)
        os.chdir(parent_dir)
        f2 = open('appname.json', 'w')
        f2.write(b)
        f2.close()

    os.chdir(heroku_server_directory_path)
    # Enable WebSockets
    try:
        subprocess.check_output(
            shlex.split('{} features:enable http-session-affinity'.format(
                heroku_executable_path)))
    except subprocess.CalledProcessError:  # Already enabled WebSockets
        pass

    # commit and push to the heroku server
    os.chdir(heroku_server_directory_path)
    sh.git(shlex.split('add -A'))
    sh.git(shlex.split('commit -m "app"'))
    sh.git(shlex.split('push -f heroku master'))

    subprocess.check_output(
        shlex.split('{} ps:scale web=1'.format(heroku_executable_path)))

    os.chdir(parent_dir)

    # Clean up heroku files
    if os.path.exists(os.path.join(tmp_dir, 'heroku.tar.gz')):
        os.remove(os.path.join(tmp_dir, 'heroku.tar.gz'))

    sh.rm(shlex.split('-rf {}'.format(heroku_server_development_path)))

    # create the postgresql add on if creating a new app
    if bool_new_app:
        try:
            print("Creating the heroku postgresql addon...")
            subprocess.check_output(
                shlex.split(
                    '{} addons:create heroku-postgresql:hobby-dev --version=10 --app {}'
                    .format(heroku_executable_path, heroku_app_name)))

        except subprocess.CalledProcessError:
            print("Fail to create the heroku postgresql addon")
            pass

    return 'https://{}.herokuapp.com'.format(heroku_app_name)
Example #45
0
def validate_model(abi,
                   device,
                   model_file_path,
                   weight_file_path,
                   docker_image_tag,
                   dockerfile_path,
                   platform,
                   device_type,
                   input_nodes,
                   output_nodes,
                   input_shapes,
                   output_shapes,
                   input_data_formats,
                   output_data_formats,
                   model_output_dir,
                   input_data_types,
                   caffe_env,
                   input_file_name="model_input",
                   output_file_name="model_out",
                   validation_threshold=0.9,
                   backend="tensorflow",
                   validation_outputs_data=[],
                   log_file=""):
    if not validation_outputs_data:
        six.print_("* Validate with %s" % platform)
    else:
        six.print_("* Validate with file: %s" % validation_outputs_data)
    if abi != "host":
        for output_name in output_nodes:
            formatted_name = common.formatted_file_name(
                output_file_name, output_name)
            if os.path.exists("%s/%s" % (model_output_dir, formatted_name)):
                sh.rm("-rf", "%s/%s" % (model_output_dir, formatted_name))
            device.pull_from_data_dir(formatted_name, model_output_dir)

    if platform == "tensorflow" or platform == "onnx":
        validate(platform, model_file_path, "",
                 "%s/%s" % (model_output_dir, input_file_name),
                 "%s/%s" % (model_output_dir, output_file_name), device_type,
                 ":".join(input_shapes), ":".join(output_shapes),
                 ",".join(input_data_formats), ",".join(output_data_formats),
                 ",".join(input_nodes), ",".join(output_nodes),
                 validation_threshold, ",".join(input_data_types), backend,
                 validation_outputs_data, log_file)
    elif platform == "caffe":
        image_name = "mace-caffe:" + docker_image_tag
        container_name = "mace_caffe_" + docker_image_tag + "_validator"

        if caffe_env == common.CaffeEnvType.LOCAL:
            try:
                import caffe
            except ImportError:
                logging.error('There is no caffe python module.')
            validate(platform, model_file_path, weight_file_path,
                     "%s/%s" % (model_output_dir, input_file_name),
                     "%s/%s" % (model_output_dir, output_file_name),
                     device_type, ":".join(input_shapes),
                     ":".join(output_shapes), ",".join(input_data_formats),
                     ",".join(output_data_formats), ",".join(input_nodes),
                     ",".join(output_nodes), validation_threshold,
                     ",".join(input_data_types), backend,
                     validation_outputs_data, log_file)
        elif caffe_env == common.CaffeEnvType.DOCKER:
            docker_image_id = sh.docker("images", "-q", image_name)
            if not docker_image_id:
                six.print_("Build caffe docker")
                sh.docker("build", "-t", image_name, dockerfile_path)

            container_id = sh.docker("ps", "-qa", "-f",
                                     "name=%s" % container_name)
            if container_id and not sh.docker("ps", "-qa", "--filter",
                                              "status=running", "-f",
                                              "name=%s" % container_name):
                sh.docker("rm", "-f", container_name)
                container_id = ""
            if not container_id:
                six.print_("Run caffe container")
                sh.docker("run", "-d", "-it", "--name", container_name,
                          image_name, "/bin/bash")

            for input_name in input_nodes:
                formatted_input_name = common.formatted_file_name(
                    input_file_name, input_name)
                sh.docker("cp",
                          "%s/%s" % (model_output_dir, formatted_input_name),
                          "%s:/mace" % container_name)

            for output_name in output_nodes:
                formatted_output_name = common.formatted_file_name(
                    output_file_name, output_name)
                sh.docker("cp",
                          "%s/%s" % (model_output_dir, formatted_output_name),
                          "%s:/mace" % container_name)
            model_file_name = os.path.basename(model_file_path)
            weight_file_name = os.path.basename(weight_file_path)
            sh.docker("cp", "tools/common.py", "%s:/mace" % container_name)
            sh.docker("cp", "tools/validate.py", "%s:/mace" % container_name)
            sh.docker("cp", model_file_path, "%s:/mace" % container_name)
            sh.docker("cp", weight_file_path, "%s:/mace" % container_name)

            sh.docker("exec",
                      container_name,
                      "python",
                      "-u",
                      "/mace/validate.py",
                      "--platform=caffe",
                      "--model_file=/mace/%s" % model_file_name,
                      "--weight_file=/mace/%s" % weight_file_name,
                      "--input_file=/mace/%s" % input_file_name,
                      "--mace_out_file=/mace/%s" % output_file_name,
                      "--device_type=%s" % device_type,
                      "--input_node=%s" % ",".join(input_nodes),
                      "--output_node=%s" % ",".join(output_nodes),
                      "--input_shape=%s" % ":".join(input_shapes),
                      "--output_shape=%s" % ":".join(output_shapes),
                      "--input_data_format=%s" % ",".join(input_data_formats),
                      "--output_data_format=%s" %
                      ",".join(output_data_formats),
                      "--validation_threshold=%f" % validation_threshold,
                      "--input_data_type=%s" % ",".join(input_data_types),
                      "--backend=%s" % ",".join(backend),
                      "--validation_outputs_data=%s" %
                      ",".join(validation_outputs_data),
                      "--log_file=%s" % log_file,
                      _fg=True)

    six.print_("Validation done!\n")
Example #46
0
def validate_model(abi,
                   serialno,
                   model_file_path,
                   weight_file_path,
                   platform,
                   device_type,
                   input_nodes,
                   output_nodes,
                   input_shapes,
                   output_shapes,
                   model_output_dir,
                   phone_data_dir,
                   input_data_types,
                   caffe_env,
                   input_file_name="model_input",
                   output_file_name="model_out",
                   validation_threshold=0.9):
    six.print_("* Validate with %s" % platform)
    if abi != "host":
        for output_name in output_nodes:
            formatted_name = common.formatted_file_name(
                output_file_name, output_name)
            if os.path.exists("%s/%s" % (model_output_dir, formatted_name)):
                sh.rm("-rf", "%s/%s" % (model_output_dir, formatted_name))
            adb_pull("%s/%s" % (phone_data_dir, formatted_name),
                     model_output_dir, serialno)

    if platform == "tensorflow":
        validate(platform, model_file_path, "",
                 "%s/%s" % (model_output_dir, input_file_name),
                 "%s/%s" % (model_output_dir, output_file_name), device_type,
                 ":".join(input_shapes), ":".join(output_shapes),
                 ",".join(input_nodes), ",".join(output_nodes),
                 validation_threshold, ",".join(input_data_types))
    elif platform == "caffe":
        image_name = "mace-caffe:latest"
        container_name = "mace_caffe_validator"

        if caffe_env == common.CaffeEnvType.LOCAL:
            import imp
            try:
                imp.find_module('caffe')
            except ImportError:
                logger.error('There is no caffe python module.')
            validate(platform, model_file_path, weight_file_path,
                     "%s/%s" % (model_output_dir, input_file_name),
                     "%s/%s" % (model_output_dir, output_file_name),
                     device_type, ":".join(input_shapes),
                     ":".join(output_shapes), ",".join(input_nodes),
                     ",".join(output_nodes), validation_threshold,
                     ",".join(input_data_types))
        elif caffe_env == common.CaffeEnvType.DOCKER:
            docker_image_id = sh.docker("images", "-q", image_name)
            if not docker_image_id:
                six.print_("Build caffe docker")
                sh.docker("build", "-t", image_name, "third_party/caffe")

            container_id = sh.docker("ps", "-qa", "-f",
                                     "name=%s" % container_name)
            if container_id and not sh.docker("ps", "-qa", "--filter",
                                              "status=running", "-f",
                                              "name=%s" % container_name):
                sh.docker("rm", "-f", container_name)
                container_id = ""
            if not container_id:
                six.print_("Run caffe container")
                sh.docker("run", "-d", "-it", "--name", container_name,
                          image_name, "/bin/bash")

            for input_name in input_nodes:
                formatted_input_name = common.formatted_file_name(
                    input_file_name, input_name)
                sh.docker("cp",
                          "%s/%s" % (model_output_dir, formatted_input_name),
                          "%s:/mace" % container_name)

            for output_name in output_nodes:
                formatted_output_name = common.formatted_file_name(
                    output_file_name, output_name)
                sh.docker("cp",
                          "%s/%s" % (model_output_dir, formatted_output_name),
                          "%s:/mace" % container_name)
            model_file_name = os.path.basename(model_file_path)
            weight_file_name = os.path.basename(weight_file_path)
            sh.docker("cp", "tools/common.py", "%s:/mace" % container_name)
            sh.docker("cp", "tools/validate.py", "%s:/mace" % container_name)
            sh.docker("cp", model_file_path, "%s:/mace" % container_name)
            sh.docker("cp", weight_file_path, "%s:/mace" % container_name)

            sh.docker("exec",
                      container_name,
                      "python",
                      "-u",
                      "/mace/validate.py",
                      "--platform=caffe",
                      "--model_file=/mace/%s" % model_file_name,
                      "--weight_file=/mace/%s" % weight_file_name,
                      "--input_file=/mace/%s" % input_file_name,
                      "--mace_out_file=/mace/%s" % output_file_name,
                      "--device_type=%s" % device_type,
                      "--input_node=%s" % ",".join(input_nodes),
                      "--output_node=%s" % ",".join(output_nodes),
                      "--input_shape=%s" % ":".join(input_shapes),
                      "--output_shape=%s" % ":".join(output_shapes),
                      "--validation_threshold=%f" % validation_threshold,
                      "--input_data_type=%s" % ",".join(input_data_types),
                      _fg=True)

    six.print_("Validation done!\n")
Example #47
0
import pytorch_lightning as pl
import transformers
import sh
import os
from absl import app, flags, logging
from torch.utils.data import Dataset, DataLoader, random_split
from dataset import trainset, valset, testset

flags.DEFINE_boolean('debug', True, '')
flags.DEFINE_string('device', 'cpu', '')
flags.DEFINE_string('modelname', 'bert-base_uncased', '')
flags.DEFINE_integer('batch_size', 16, '')

FLAGS = flags.FLAGS

sh.rm('-r', '-f', 'logs')
sh.mkdir('logs')


class BERT_SPC(pl.LightningModule):
    def __init__(self, dropout=0.1, bert_dim=768, polarities_dim=3):
        super(BERT_SPC, self).__init__()
        self.bert = transformers.BertModel.from_pretrained('bert-base-uncased')
        self.dropout = th.nn.Dropout(dropout)
        self.dense = th.nn.Linear(bert_dim, polarities_dim)
        self.loss = th.nn.CrossEntropyLoss(reduction='none')

    def forward(self, inputs):
        text_bert_indices, bert_segments_ids = inputs[0], inputs[1]
        _, pooled_output = self.bert(text_bert_indices, bert_segments_ids)
        pooled_output = self.dropout(pooled_output)
Example #48
0
def upload():
    try:
        print("begin upload routine")
        # delete previously uploaded files and start clean
        if len(glob.glob(os.path.join(session['new_files'], "*"))) > 0:
            sh.rm(glob.glob(os.path.join(session['new_files'], "*")))
        if len(glob.glob(os.path.join(session['original_files'], "*"))) > 0:
            sh.rm(glob.glob(os.path.join(session['original_files'], "*")))
        email = request.form.get("email")
        print("email")
        print(email)

        uploaded_files = request.files.getlist('files[]')
        assert len(uploaded_files) > 0, "No files found"
        uploaded_filenames = [
            secure_filename(x.filename) for x in uploaded_files
        ]

        # this is used a few times throughout the script
        file_pat = re.compile(r"(.*)(?=\.)\.(.*)")

        # check to make sure they have only valid file names uploaded
        assert \
        all([
            bool(re.search(file_pat, x)) for x in uploaded_filenames
        ]), \
        "Invalid file name {}" \
        .format(
            ",".join([
                x
                for x in uploaded_filenames
                if not bool(re.search(file_pat, x))
            ])
        )

        # This should be fine since if it gets here it passed the above assertion
        filetypes = [x.split('.')[-1].lower() for x in uploaded_filenames]
        print(filetypes)

        # Routine for checking the filetypes they submitted, since they can now submit multiple files
        allowed_filetypes = ['xls', 'xlsx', 'png', 'jpg']
        assert set(filetypes) - set(allowed_filetypes) == set(), \
            f"""
            This application will only accept files with the following extensions: {','.join(allowed_filetypes)}
            """


        assert len([x for x in filetypes if 'xls' in x]) < 2, \
            """
            This application has detected multiple excel files. 
            Please only submit one excel file at a time, 
            along with the corresponding images associated with the data.
            """

        assert len([x for x in filetypes if 'xls' in x
                    ]) != 0, "No excel file found"

        # Sort the uploaded files list so that the pictures go first in the loop.
        # if the excel file runs through the loop first, it never sees the photos
        # Because we are only accepting png, jpg and xls or xlsx,
        #   the image filename extensions are always alphabetically before the excel files.
        # Therefore we sort the uploaded files list, letting the key be the extension
        print("uploading images")
        for filename, file in {
                k: v
                for k, v in zip(uploaded_filenames, uploaded_files)
        }.items():
            groups = re.search(file_pat, filename).groups()
            print(groups)

            filename = groups[0]
            extension = groups[-1]
            print(
                os.path.join(session['original_files'],
                             '{}.{}'.format(filename, extension.lower())))
            file.save(
                os.path.join(session['original_files'],
                             '{}.{}'.format(filename, extension.lower())))

        if email != '':
            connection = pika.BlockingConnection(
                pika.ConnectionParameters(host='rabbitmq'))
            channel = connection.channel()

            channel.queue_declare(queue='mp_reformat')

            msgbody = json.dumps({
                'original_dir': session['original_files'],
                'new_dir': session['new_files'],
                'base_dir': session['basedir'],
                'email': email,
                'sessionid': session['sessionid']
            })

            channel.basic_publish(exchange='',
                                  routing_key='mp_reformat',
                                  body=msgbody)
            print(f"Sent {msgbody}")
            connection.close()

            raise Exception(f"email will be sent to {email}")

        res = full_reformat(original_dir=session['original_files'],
                            new_dir=session['new_files'],
                            base_dir=session['basedir'],
                            email=email,
                            sessionid=session['sessionid'])

        res = json.loads(res)

        return \
        jsonify(
            message=res['message'],
            unaccounted_photos=res['unaccounted_photos'],
            missing_photos=res['missing_photos']
        )

    except Exception as e:
        print("Exception occurred")
        print(e)
        return jsonify(error="true", message=str(e))
Example #49
0
File: sim.py Project: ipbus/ipbb
def genproject(ictx, aOptimise, aToScript, aToStdout):
    """
    Creates the modelsim project

    \b
    1. Compiles the source code into the 'work' simulation library. A different name can be specified with the `sim.library` dep file setting.
    2. Generates a 'run_sim' wrapper that sets the simulation environment before invoking vsim. The list of desing units to run can be specified with the `sim.run_sim.desing_units` dep file setting.

    NOTE: The ip/mac address of ipbus desings implementing a fli and exposing the ip/mac addresses via  top level generics can be set by defining the following user settings:

    \b
    - 'ipbus.fli.mac_address': mapped to MAC_ADDR top-level generic
    - 'ipbus.fli.ip_address': mapped to IP_ADDR top-level generic

    """

    # lSessionId = 'genproject'

    # -------------------------------------------------------------------------
    # Must be in a build area
    if ictx.currentproj.name is None:
        raise click.ClickException(
            'Project area not defined. Move into a project area and try again.'
        )

    if ictx.currentproj.settings['toolset'] != 'sim':
        raise click.ClickException(
            f"Project area toolset mismatch. Expected 'sim', found '{ictx.currentproj.settings['toolset']}'"
        )
    # -------------------------------------------------------------------------

    # -------------------------------------------------------------------------
    if not which('vsim'):
        raise click.ClickException(
            'ModelSim (vsim) not found. Please add Modelsim to PATH and execute the command again.'
        )
    # -------------------------------------------------------------------------

    lDepFileParser = ictx.depParser

    lSimLibrary = lDepFileParser.settings.get(f'{_toolset}.library', 'work')

    # Ensure that no parsing errors are present
    ensureNoParsingErrors(ictx.currentproj.name, lDepFileParser)

    # Ensure that all dependencies are resolved
    ensureNoMissingFiles(ictx.currentproj.name, lDepFileParser)

    lSimProjMaker = ModelSimGenerator(ictx.currentproj, lSimLibrary, kIPVivadoProjName, aOptimise)

    lDryRun = aToStdout or aToScript

    if not lDryRun:
        sh.rm('-rf', lSimLibrary)

    try:
        with mentor.ModelSimBatch(aToScript, echo=aToStdout, dryrun=lDryRun) as lSim:
            lSimProjMaker.write(
                lSim,
                lDepFileParser.settings,
                lDepFileParser.packages,
                lDepFileParser.commands,
                lDepFileParser.libs,
            )
    except sh.ErrorReturnCode as e:
        console.log(
            f'ERROR: Sim exit code: {e.exit_code}.\nCommand:\n\n   {e.full_cmd}\n',
            style='red',
        )
        raise click.ClickException("Compilation failed")

    if lDryRun:
        return

    # ----------------------------------------------------------
    # Create a wrapper to force default bindings at load time
    cprint(f"Writing modelsim wrapper '{kVsimWrapper}'")

    lVsimArgStr = f"{lDepFileParser.settings.get(f'{_toolset}.{kVsimWrapper}.design_units', '')}"

    lVsimOpts = collections.OrderedDict()
    lVsimOpts['MAC_ADDR'] = validateMacAddress(
        ictx.currentproj.usersettings.get('ipbus.fli.mac_address', None)
    )
    lVsimOpts['IP_ADDR'] = validateIpAddress(
        ictx.currentproj.usersettings.get('ipbus.fli.ip_address', None)
    )

    lVsimOptStr = ' '.join(
        ['-G{}=\'{}\''.format(k, v) for k, v in lVsimOpts.items() if v is not None]
    )

    lVsimCmd = ' '.join(['vsim', lVsimArgStr, lVsimOptStr])

    lVsimBody = f'''#!/bin/sh

if [ ! -f modelsim.ini ]; then
    echo "WARNING: modelsim.ini not found. Vivado simulation libraries won't be loaded."
fi

export MTI_VCO_MODE=64
export MODELSIM_DATAPATH="mif/"
{lVsimCmd} "$@"
    '''
    with SmartOpen(kVsimWrapper) as lVsimSh:
        lVsimSh(lVsimBody)

    # Make it executable
    os.chmod(kVsimWrapper, 0o755)

    print(f"Vsim wrapper script '{kVsimWrapper}' created")
    if lVsimCmd:
        print(f"   Command: '{lVsimCmd}'")
Example #50
0
import git  # pip install gitpython

# Get an updated version of sh with proper pushd support
sys.path.insert(0, '/Users/ppannuto/code/sh')
import sh
from sh import mkdir, rm

GRADES_PATH = '../../../../../../f16_grades/'

if 'rerun' not in sys.argv:
    if os.path.exists('/tmp/q1') or os.path.exists(
            '/tmp/q2a') or os.path.exists('/tmp/q2b'):
        input(
            'WARN: About to delete old runs. Use "rerun" to run again. Ctrl-C to quit, enter to continue'
        )
    rm('-r', '-f', '/tmp/q1')
    rm('-r', '-f', '/tmp/q2a')
    rm('-r', '-f', '/tmp/q2b')

for a in sys.argv:
    if '/emails/' in a:
        TIMEDIR = a
        break
else:
    TIME = time.asctime()
    TIMEDIR = '/tmp/emails/' + TIME + '/'

mkdir('-p', TIMEDIR)

ACTUALLY_SEND = False
Example #51
0
 def purge_cache(self):
     if os.path.exists(self.cache_dir):
         rm('-r', '-f', self.cache_dir)
     mkdir('-p', self.cache_dir)
Example #52
0
def merge_components_cd(dir=os.getcwd() + '/'):
    """
	Merge components.

	"""

    print('-------------------------------------------------------------------------')
    print('Searching for combinations inside 3 sigma confidence interval...')
    print('-------------------------------------------------------------------------')

    dir = dir + 'Workspace/Processing/Interp_proc/Degeneracy/'
    os.chdir(dir)
    dir_b1 = dir[:len(dir) - 11]
    dir_b2 = dir[:len(dir_b1) - 33]

    fobs = dir_b2 + 'New_tau_GA.txt'
    fileb = dir_b1 + 'Best_comb.csv'

    tdata0 = pd.read_csv(fobs, sep='\s+', header=None)
    xd = tdata0[0]
    yd = tdata0[1]
    ey = tdata0[2]

    pathb4 = fileb
    tb4 = pd.read_csv(pathb4, sep=',', header=1)
    size4 = tb4.shape[0]
    n_genes = tb4.shape[1] - 3

    path0 = pathb4[:len(pathb4) - 25] + 'Store_interp/'

    n = 0
    for i in range(0, size4, n_genes):
        c_vals = np.loadtxt(pathb4, dtype=float, delimiter=',', usecols=list(range(n_genes)), skiprows=1).T
        cc = []
        for j in range(n_genes):
            cc.append(c_vals[j][i])

        sp_names = np.loadtxt(pathb4, dtype=str, delimiter=',', usecols=(n_genes), skiprows=1).T
        # print sp_names
        step_i = i
        step_f = step_i + n_genes
        sp = []
        sp_id = []

        for k in range(step_i, step_f):
            sp.append(path0 + sp_names[k] + '.dat')
            sp_id.append(sp_names[k])

        if sys.version_info[0] == 3:
            from ENIIGMA.Stats import create3
            create3.create_file3four(sp)
        else:
            import create
            create.create_file2four(sp)

        fileout = dir + 'output_file4.txt'

        t1 = pd.read_csv(dir + 'output_file4.txt', sep='\s+', header=None)
        Ysp = pd.read_csv(dir + 'output_file4.txt', sep='\s+', header=None, usecols=list(range(1, t1.shape[1], 2)))

        crange = list(range(n_genes))
        ysprange = list(range(1, t1.shape[1], 2))

        f0 = 0.
        count = 1
        for i, j in zip(crange, ysprange):
            name = sp_id[i]
            f0 += cc[i] * Ysp[j]

            f0c = cc[i] * Ysp[j]

            Data1 = {str(name): f0c}
            df1 = DataFrame(Data1, columns=[str(name)])
            df1.to_csv('Comp_' + str(count) + '_' + str(name) + '.csv', index=False)
            count = count + 1

        # print 'f0 is:'
        # print f0

        chi_calc = (1. / (len(yd) - 1 - n_genes)) * np.sum(((yd - f0) / ey) ** 2)

        red_chi2_vals = chi_values()

        if chi_calc <= red_chi2_vals[2]:
            # print red_chi2_vals[2]
            Wav = {'Wavelength': t1[0]}
            df_wav = DataFrame(Wav, columns=['Wavelength'])
            df_wav.to_csv('Comp_0_wav.csv', index=False)

            D0 = {'all': f0}
            df_wav = DataFrame(D0, columns=['all'])
            df_wav.to_csv('Comp_' + str(n_genes + 1) + '_all.csv', index=False)

            all_filenames = [i for i in sorted(glob.glob(dir + 'Comp_*'))]
            combined_csv = pd.concat([pd.read_csv(f) for f in all_filenames], axis=1)
            combined_csv.to_csv("Components_" + str(n) + ".csv", index=False, encoding='utf-8-sig')
            sh.rm(sh.glob(dir + 'Comp_*'))
        else:
            sh.rm(sh.glob(dir + 'Comp_*'))

        n = n + 1

    from ENIIGMA.Stats import deconvolution as dcv
    from ENIIGMA.Stats.Merge_colden import mergecd_no_bp

    pathdir = dir + '/'
    files_comp = 'Components_*.csv'
    file_csv = sorted(glob.glob(pathdir + files_comp))

    print('-------------------------------------------------------------------------')
    print(len(file_csv), ' combinations found inside 3 sigma confidence interval')
    print('')
    print('Deconvolving components and merging column densities...')
    print('')
    print('Creating file: All_merge_final.csv')
    print('-------------------------------------------------------------------------')

    count = 0
    for index in range(len(file_csv)):
        file = file_csv[index]
        dcv.deconv_all(file)
        filename = pathdir + 'Column_density_*.csv'
        mergecd_no_bp(filename, pathdir)

        try:
            orig_name = pathdir + 'MergeCD5.csv'
            new_name = pathdir + 'MergeCD5_' + str(count) + '.csv'
            os.rename(orig_name, new_name)

            for f1 in glob.glob(pathdir + 'Analytic*.dat'):
                os.remove(f1)
            for f2 in glob.glob(pathdir + 'Column_*.csv'):
                os.remove(f2)
            for f3 in glob.glob(pathdir + 'trans_*.csv'):
                os.remove(f3)
            for f4 in glob.glob(pathdir + 'Column_density_*.csv'):
                os.remove(f4)
            os.remove(pathdir + 'MergeCD.csv')
            os.remove(pathdir + 'MergeCD2.csv')
            os.remove(pathdir + 'MergeCD3.csv')
            os.remove(pathdir + 'MergeCD4.csv')
        except:
            pass

        count = count + 1

    sp_files = 'MergeCD5*.csv'
    f_sp = sorted(glob.glob(pathdir + sp_files))

    cnt = 0
    for index2 in range(len(f_sp)):
        pd.read_csv(f_sp[index2], header=None).T.to_csv('output_' + str(cnt) + '.csv', header=False, index=False)
        cnt = cnt + 1

    df = pd.concat(list(map(pd.read_csv, glob.glob(os.path.join('', "output_*.csv")))),
                   sort='False')  # sort = False because pandas warning
    df.fillna('nan', inplace=True)
    df.replace(0.0, np.nan, inplace=True)
    df.fillna('nan', inplace=True)
    df = df[~df['Label'].isin(['CDinmix', 'CDpure'])]
    df.insert(0, 'index', list(range(1, len(f_sp) + 1)))
    df = df.drop(['Label'], axis=1)
    df.to_csv('All_merge_final.csv', index=False)

    for f5 in glob.glob(pathdir + 'MergeCD5_*.csv'):
        os.remove(f5)
    for f6 in glob.glob(pathdir + 'output_*.csv'):
        os.remove(f6)
def main():
    sh.mkdir("-p","log")
    sh.fuser("-k","-n","tcp","4040", _ok_code=[0,1])

    printc("COMPILE BACKEND", color="blue")
    start = time.time()
    try:
        compile(logfile=open("log/compile.txt","wb"), MIX_ENV="prod")
    except:
        printc("ERROR COMPILING", color="red")
        with open("log/compile.txt") as fd:
            print( fd.read() )
            sys.exit(1)
    end = time.time()

    with chdir("frontend"):
        sh.rm("-rf", "shots")
        sh.mkdir("-p", "shots")

    printc("COMPILE FRONTEND", color="blue")
    with chdir("frontend"):
        sh.make("compile", _out="../log/compile.txt")

    token = uuid.uuid4()
    fail = False
    dbname = random_dbname()
    dburl = "postgresql://*****:*****@localhost/%s"%dbname
    fail = False

    with tmpdb(dbname), \
         envset(MIX_ENV="prod", SERVERBOARDS_DATABASE_URL=dburl, SERVERBOARDS_TCP_PORT="4040", SERVERBOARDS_INI="test/plugins.ini"), \
         running("mix", "run", "--no-halt", _out="log/serverboards.txt", _err_to_out=True, _cwd="backend"):
        printc("CREATE USER", color="blue")
        create_user(dburl, token)

        printc("WAIT FOR RUNNING BACKEND", color="blue")
        wait_for_port(8080, timeout=20)

        printc("UI TESTS", color="blue")
        try:
            if show_ui:
                with chdir("frontend"):
                    sh.Command("node_modules/.bin/wdio")("wdio.conf.js", _out="../log/wdio.txt")
            else:
                with chdir("frontend"), running("Xvfb",":5"), envset(DISPLAY=":5"):
                    sh.Command("node_modules/.bin/wdio")("wdio.conf.js", _out="../log/wdio.txt")
            printc("PASS UI TESTS", color="green")
        except:
            with open("log/wdio.txt","r") as fd:
                print(fd.read())
            printc("FAIL UI TESTS", color="red")
            if wait_at_error:
                printc("Server runing at http://localhost:8080. User: [email protected] / asdfasdf")
                printc("WAIT FOR <Crtl + C>", color="yellow")
                try:
                    time.sleep(1000)
                except:
                    printc("STOPPING", color="yellow")
            fail=True
    if fail:
        printc("FAIL", color="red")
        sys.exit(1)
    else:
        printc("SUCCESS", color="green")
    sys.exit(0)
Example #54
0
def extractSeq(fastqDir, outDir, lmdbPath, threads, splitInput, cutoff):
    try:
        os.mkdir(outDir)
    except:
        logger.warning(f"{outDir} existed!!")
    if not splitInput:
        allR1Path = glob.glob(f"{fastqDir}*R1*")
        allR2Path = [x.replace("R1", "R2") for x in allR1Path]
    else:

        fastqTemp = outDir + "tempSplited/"
        try:
            sh.mkdir(fastqTemp)
        except:
            logger.warning(f"{fastqTemp} existed!!")

        allR1Path = glob.glob(f"{fastqDir}*_R1*")
        allR2Path = [x.replace("R1", "R2") for x in allR1Path]
        allSplitedPath = [
            fastqTemp + re.search(r"[\w\W]+?(?=_R1)",
                                  x.split("/")[-1])[0] + "/" for x in allR1Path
        ]

        if allR1Path[0].endswith(".gz"):
            formatGz = True
        else:
            formatGz = False

        splitedNum = threads // len(allSplitedPath)

        if splitedNum <= 1:
            allR1Path = glob.glob(f"{fastqDir}*R1*")
            allR2Path = [x.replace("R1", "R2") for x in allR1Path]
            if allR1Path[0].endswith(".gz"):
                logger.error("format gz, please uncompress it.")
                1 / 0
        else:
            mPResults = []
            with multiP(threads // 2) as mP:
                for singleR1Path, singleR2Path, singleSplitedPath in zip(
                        allR1Path, allR2Path, allSplitedPath):
                    mPResults.append(
                        mP.submit(
                            sh.seqkit,
                            "split2",
                            "-f",
                            "-1",
                            singleR1Path,
                            "-2",
                            singleR2Path,
                            p=splitedNum,
                            O=singleSplitedPath,
                            j=2,
                        ))

            tempAllSplitedR1Path = glob.glob(f"{fastqTemp}*/*R1*")
            tempAllSplitedR2Path = [
                x.replace("R1", "R2") for x in tempAllSplitedR1Path
            ]
            sampleId = set([
                re.search(r"(?<=tempSplited/)[\w\W]+?(?=_L)", x)[0]
                for x in tempAllSplitedR1Path
            ])

            if len(sampleId) != 1:
                allSample = ", ".join(sampleId)
                logger.warning(f"MORE THAN ONE INPUT SAMPLES: {allSample}")
                sampleId = sampleId.pop()
                logger.warning(f"The prefix will change to {sampleId}")
            else:
                sampleId = sampleId.pop()

            i = 0
            formatGzUseThreadContents = []
            for tempSingleSplitedR1Path, tempSingleSplitedR2Path in zip(
                    tempAllSplitedR1Path, tempAllSplitedR2Path):
                i += 1
                if formatGz:
                    sh.mv(
                        tempSingleSplitedR1Path,
                        f"{fastqTemp}{sampleId}_L{i:03}_R1_001.fastq.gz",
                    )
                    sh.mv(
                        tempSingleSplitedR2Path,
                        f"{fastqTemp}{sampleId}_L{i:03}_R2_001.fastq.gz",
                    )
                    formatGzUseThreadContents.append(
                        sh.gzip(
                            "-d",
                            f"{fastqTemp}{sampleId}_L{i:03}_R1_001.fastq.gz",
                            _bg=True,
                        ))
                    formatGzUseThreadContents.append(
                        sh.gzip(
                            "-d",
                            f"{fastqTemp}{sampleId}_L{i:03}_R2_001.fastq.gz",
                            _bg=True,
                        ))
                else:
                    sh.mv(
                        tempSingleSplitedR1Path,
                        f"{fastqTemp}{sampleId}_L{i:03}_R1_001.fastq",
                    )
                    sh.mv(
                        tempSingleSplitedR2Path,
                        f"{fastqTemp}{sampleId}_L{i:03}_R2_001.fastq",
                    )
            if formatGz:
                [x.wait() for x in formatGzUseThreadContents]

            for singleTempDir in glob.glob(f"{fastqTemp}*/"):
                sh.rmdir(singleTempDir)

            allR1Path = glob.glob(f"{fastqTemp}*R1*")
            allR2Path = [x.replace("R1", "R2") for x in allR1Path]

    allSubProcess = []
    with multiP(threads) as mP:
        for singleR1Path, singleR2Path in zip(allR1Path, allR2Path):
            allSubProcess.append(
                mP.submit(
                    processOneFastq,
                    singleR1Path,
                    singleR2Path,
                    lmdbPath,
                    outDir,
                    cutoff,
                ))
    [x.result() for x in allSubProcess]

    if not splitInput:
        pass
    else:
        sh.rm("-rf", fastqTemp)
Example #55
0
 def clear_modes(self):
     with pushd(self.cache_dir):
         rm('-Rf', 'modes')
Example #56
0
def setup_heroku_server(task_name, task_files_to_copy=None):
    print("Heroku: Collecting files...")
    # Install Heroku CLI
    os_name = None
    bit_architecture = None

    # Get the platform we are working on
    platform_info = platform.platform()
    if 'Darwin' in platform_info:  # Mac OS X
        os_name = 'darwin'
    elif 'Linux' in platform_info:  # Linux
        os_name = 'linux'
    else:
        os_name = 'windows'

    # Find our architecture
    bit_architecture_info = platform.architecture()[0]
    if '64bit' in bit_architecture_info:
        bit_architecture = 'x64'
    else:
        bit_architecture = 'x86'

    # Remove existing heroku client files
    existing_heroku_directory_names = \
        glob.glob(os.path.join(parent_dir, 'heroku-cli-*'))
    if len(existing_heroku_directory_names) == 0:
        if os.path.exists(os.path.join(parent_dir, 'heroku.tar.gz')):
            os.remove(os.path.join(parent_dir, 'heroku.tar.gz'))

        # Get the heroku client and unzip
        os.chdir(parent_dir)
        sh.wget(shlex.split('{}-{}-{}.tar.gz -O heroku.tar.gz'.format(
            heroku_url,
            os_name,
            bit_architecture
        )))
        sh.tar(shlex.split('-xvzf heroku.tar.gz'))

    heroku_directory_name = \
        glob.glob(os.path.join(parent_dir, 'heroku-cli-*'))[0]
    heroku_directory_path = os.path.join(parent_dir, heroku_directory_name)
    heroku_executable_path = \
        os.path.join(heroku_directory_path, 'bin', 'heroku')

    server_source_directory_path = \
        os.path.join(parent_dir, server_source_directory_name)
    heroku_server_directory_path = os.path.join(parent_dir, '{}_{}'.format(
        heroku_server_directory_name,
        task_name
    ))

    # Delete old server files
    sh.rm(shlex.split('-rf '+heroku_server_directory_path))

    # Copy over a clean copy into the server directory
    shutil.copytree(server_source_directory_path, heroku_server_directory_path)

    # Consolidate task files
    task_directory_path = \
        os.path.join(heroku_server_directory_path, task_directory_name)
    sh.mv(
        os.path.join(heroku_server_directory_path, 'html'),
        task_directory_path
    )

    hit_config_file_path = os.path.join(parent_dir, 'hit_config.json')
    sh.mv(hit_config_file_path, task_directory_path)

    for file_path in task_files_to_copy:
        try:
            shutil.copy2(file_path, task_directory_path)
        except IsADirectoryError:  # noqa: F821 we don't support python2
            dir_name = os.path.basename(os.path.normpath(file_path))
            shutil.copytree(
                file_path, os.path.join(task_directory_path, dir_name))
        except FileNotFoundError:  # noqa: F821 we don't support python2
            pass

    print("Heroku: Starting server...")

    os.chdir(heroku_server_directory_path)
    sh.git('init')

    # get heroku credentials
    heroku_user_identifier = None
    while not heroku_user_identifier:
        try:
            subprocess.check_output(
                shlex.split(heroku_executable_path+' auth:token')
            )
            heroku_user_identifier = (
                netrc.netrc(os.path.join(os.path.expanduser("~"), '.netrc'))
                     .hosts['api.heroku.com'][0]
            )
        except subprocess.CalledProcessError:
            raise SystemExit(
                'A free Heroku account is required for launching MTurk tasks. '
                'Please register at https://signup.heroku.com/ and run `{} '
                'login` at the terminal to login to Heroku, and then run this '
                'program again.'.format(heroku_executable_path)
            )

    heroku_app_name = ('{}-{}-{}'.format(
        user_name,
        task_name,
        hashlib.md5(heroku_user_identifier.encode('utf-8')).hexdigest()
    ))[:30]

    while heroku_app_name[-1] == '-':
        heroku_app_name = heroku_app_name[:-1]

    # Create or attach to the server
    try:
        subprocess.check_output(shlex.split(
            '{} create {}'.format(heroku_executable_path, heroku_app_name)
        ))
    except subprocess.CalledProcessError:  # User has too many apps
        sh.rm(shlex.split('-rf {}'.format(heroku_server_directory_path)))
        raise SystemExit(
            'You have hit your limit on concurrent apps with heroku, which are'
            ' required to run multiple concurrent tasks.\nPlease wait for some'
            ' of your existing tasks to complete. If you have no tasks '
            'running, login to heroku and delete some of the running apps or '
            'verify your account to allow more concurrent apps'
        )

    # Enable WebSockets
    try:
        subprocess.check_output(shlex.split(
            '{} features:enable http-session-affinity'.format(
                heroku_executable_path
            )
        ))
    except subprocess.CalledProcessError:  # Already enabled WebSockets
        pass

    # commit and push to the heroku server
    os.chdir(heroku_server_directory_path)
    sh.git(shlex.split('add -A'))
    sh.git(shlex.split('commit -m "app"'))
    sh.git(shlex.split('push -f heroku master'))
    subprocess.check_output(shlex.split('{} ps:scale web=1'.format(
        heroku_executable_path)
    ))
    os.chdir(parent_dir)

    # Clean up heroku files
    if os.path.exists(os.path.join(parent_dir, 'heroku.tar.gz')):
        os.remove(os.path.join(parent_dir, 'heroku.tar.gz'))

    sh.rm(shlex.split('-rf {}'.format(heroku_server_directory_path)))

    return 'https://{}.herokuapp.com'.format(heroku_app_name)
Example #57
0
import sh
from sh import git, cd, make, rm, sudo


def write_output(line):
    sys.stdout.write(line)


install_env = os.environ.copy()
install_env['CC'] = "gcc"

directory = os.path.dirname(os.path.realpath(__file__))

mongo_c_driver = os.path.join(directory, "mongo-c-driver")

rm("-r", "-f", mongo_c_driver)
autogen_location = os.path.join(mongo_c_driver, "autogen.sh")

git.clone(
    "https://github.com/mongodb/mongo-c-driver.git",
    mongo_c_driver,
    branch="1.1.5",
    depth="1",
    _out=write_output,
)

cd(mongo_c_driver)
autogen = sh.Command(autogen_location)
autogen(prefix="/usr", _out=write_output, _env=install_env)
make(_out=write_output, _env=install_env)
Example #58
0
            sh.wget(download_path,
                    directory_prefix=destination,
                    no_clobber=wget_no_clobber)
            break
        except Exception, e:
            if attempt == NUM_ATTEMPTS:
                raise
            logging.error("Download failed; retrying after sleep: " + str(e))
            time.sleep(10 +
                       random.random() * 5)  # Sleep between 10 and 15 seconds.
    logging.info("Extracting {0}".format(file_name))
    sh.tar(z=True,
           x=True,
           f=os.path.join(destination, file_name),
           directory=destination)
    sh.rm(os.path.join(destination, file_name))


def download_package(destination, package, compiler, platform_release=None):
    remove_existing_package(destination, package.name, package.version)

    toolchain_build_id = os.environ["IMPALA_TOOLCHAIN_BUILD_ID"]
    label = get_platform_release_label(release=platform_release).toolchain
    format_params = {
        'product': package.name,
        'version': package.version,
        'compiler': compiler,
        'label': label,
        'toolchain_build_id': toolchain_build_id
    }
    file_name = "{product}-{version}-{compiler}-{label}.tar.gz".format(
Example #59
0
def convert_func(flags):
    configs = config_parser.parse(flags.config)
    print(configs)
    library_name = configs[YAMLKeyword.library_name]
    if not os.path.exists(BUILD_OUTPUT_DIR):
        os.makedirs(BUILD_OUTPUT_DIR)
    elif os.path.exists(os.path.join(BUILD_OUTPUT_DIR, library_name)):
        sh.rm("-rf", os.path.join(BUILD_OUTPUT_DIR, library_name))
    os.makedirs(os.path.join(BUILD_OUTPUT_DIR, library_name))
    if not os.path.exists(BUILD_DOWNLOADS_DIR):
        os.makedirs(BUILD_DOWNLOADS_DIR)

    model_output_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_OUTPUT_DIR_NAME)
    model_header_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_HEADER_DIR_PATH)
    # clear output dir
    if os.path.exists(model_output_dir):
        sh.rm("-rf", model_output_dir)
    os.makedirs(model_output_dir)
    if os.path.exists(model_header_dir):
        sh.rm("-rf", model_header_dir)

    if os.path.exists(MODEL_CODEGEN_DIR):
        sh.rm("-rf", MODEL_CODEGEN_DIR)
    if os.path.exists(ENGINE_CODEGEN_DIR):
        sh.rm("-rf", ENGINE_CODEGEN_DIR)

    if flags.model_data_format:
        model_data_format = flags.model_data_format
    else:
        model_data_format = configs.get(YAMLKeyword.model_data_format, "file")
    embed_model_data = model_data_format == ModelFormat.code

    if flags.model_graph_format:
        model_graph_format = flags.model_graph_format
    else:
        model_graph_format = configs.get(YAMLKeyword.model_graph_format,
                                         "file")
    if model_graph_format == ModelFormat.code:
        os.makedirs(model_header_dir)
        sh_commands.gen_mace_engine_factory_source(
            configs[YAMLKeyword.models].keys(), embed_model_data)
        sh.cp("-f", glob.glob("mace/codegen/engine/*.h"), model_header_dir)

    convert.convert(configs, MODEL_CODEGEN_DIR)

    for model_name, model_config in configs[YAMLKeyword.models].items():
        model_codegen_dir = "%s/%s" % (MODEL_CODEGEN_DIR, model_name)
        encrypt.encrypt(
            model_name, "%s/model/%s.pb" % (model_codegen_dir, model_name),
            "%s/model/%s.data" % (model_codegen_dir, model_name),
            config_parser.parse_device_type(model_config[YAMLKeyword.runtime]),
            model_codegen_dir, bool(model_config.get(YAMLKeyword.obfuscate,
                                                     1)),
            model_graph_format == "code", model_data_format == "code")

        if model_graph_format == ModelFormat.file:
            sh.mv("-f", '%s/model/%s.pb' % (model_codegen_dir, model_name),
                  model_output_dir)
            sh.mv("-f", '%s/model/%s.data' % (model_codegen_dir, model_name),
                  model_output_dir)
        else:
            if not embed_model_data:
                sh.mv("-f",
                      '%s/model/%s.data' % (model_codegen_dir, model_name),
                      model_output_dir)

            sh.cp("-f", glob.glob("mace/codegen/models/*/code/*.h"),
                  model_header_dir)

        MaceLogger.summary(
            StringFormatter.block("Model %s converted" % model_name))

    if model_graph_format == ModelFormat.code:
        build_model_lib(configs, flags.address_sanitizer, flags.debug_mode)

    print_library_summary(configs)
Example #60
0
 def clear_cached_files(self):
     sh.rm('-f', sh.glob(os.path.join(self.cache_path, '*.shp')))
     sh.rm('-f', sh.glob(os.path.join(self.cache_path, '*.shx')))
     sh.rm('-f', sh.glob(os.path.join(self.cache_path, '*.dbf')))
     sh.rm('-f', sh.glob(os.path.join(self.cache_path, '*.prj')))