示例#1
0
def gen_private_key(domain, dirname, private_key_name):
    ''' Generate an openssl private key for the domain. '''

    log('starting to generate private key')

    # remove the following DEBUG lines by 2016-09-30
    OPENSSL_CNF_FILENAME = os.path.join(dirname, 'openssl.cnf') #DEBUG
    log('openssl.cnf exists: {}'.format(os.path.exists(OPENSSL_CNF_FILENAME)))  #DEBUG
    if os.path.exists(OPENSSL_CNF_FILENAME): #DEBUG
        log('openssl.cnf filesize: {}'.format(os.stat(OPENSSL_CNF_FILENAME).st_size)) #DEBUG

    private_key = os.path.join(dirname, 'private', private_key_name)
    temp_private_key = '{}.tmp'.format(private_key)
    kwargs = dict(_clilog=sh_out, _env=minimal_env(), _err=sh_err)

    responses = [
        ('Enter pass phrase for {}:'.format(private_key), 'secret'),
        ('Verifying - Enter pass phrase for {}:'.format(private_key), 'secret'),
        ]

    args = ['genrsa', '-aes256', '-out', private_key, '4096']
    #args = ['genpkey', '-out', private_key, '-outform', 'PEM', '-aes256', '-algorithm', 'rsa', '-pkeyopt', 'rsa_keygen_bits:4096']
    Responder(responses, 'openssl', *args, **kwargs)
    assert os.path.exists(private_key), 'could not generate {}'.format(private_key)

    sh.cp(private_key, temp_private_key)
    responses = [('Enter pass phrase for {}:'.format(temp_private_key), 'secret')]
    args = ['rsa', '-in', temp_private_key, '-out', private_key]
    Responder(responses, 'openssl', *args, **kwargs)
    os.remove(temp_private_key)
示例#2
0
def main():
    filename = 'my_config'
    include_path = 'inc'
    test_path = '/home/leha/personal/configen/configen/test/data'
    test_files = glob(os.path.join(test_path, '*.json'))
    #test_files = [os.path.join(test_path, 'test_schema.json')]
    # copy header with helper test functions
    sh.cp(os.path.join(test_path, 'serialization_tests.h'), '.')
    # iterate over all files in test directory
    for test_filename in test_files:
        test_name = os.path.basename(test_filename).split('.')[0]
        print('Test file: ' + test_name)
        string_of_json = open(test_filename, 'r').read()
        code = cg.convert_json(string_of_json, language='c++',
                               namespace=['config'], filename=filename,
                               include_path=include_path);
        # write header, source and main
        with open(os.path.join(include_path, filename + '.h'), 'w') as header:
            header.write(code['header'])
        with open(os.path.join(filename + '.cc'), 'w') as src:
            src.write(code['source'])
        main_filename = os.path.join(test_path, test_name + '.cc')
        if os.path.exists(main_filename):
            sh.cp(main_filename, 'main.cc')
        else:
            print('Default main')
            with open('main.cc', 'w') as main_:
                main_.write('\n'.join(DEFAULT_MAIN))
        sh.make()
        # check c code
        run_main = sh.Command('./configen_test')
        check_output(run_main())
示例#3
0
def gen_tmpfile(kb_size, query_size, query_class):
	from sh import cp
	from experiments_pl import convertToQuery
	import re

	queryfile = "./{0}-{1}-queries.txt".format(query_class, query_size)
	tmpfile = make_tmpfile_name(kb_size, query_size, query_class)
	cp(
			"{0}.pl".format(kb_size),
			tmpfile
	)

	r = []

	with open(tmpfile, "a") as tmp:
		with open(queryfile) as queries:
			for i, query in enumerate(queries.readlines()):
				rule = convertToQuery(query.strip())
				args = ",".join([chr(65+n) for n,_ in enumerate(re.finditer('tweet',rule))])
				pred_sig = make_pred_sig(args, i)
				tmp.write("{0} :- {1}.\n".format(pred_sig, rule))
				r.append({
					'args': args,
					'i': i,
					'kb_size': kb_size,
					'query_size': query_size,
					'query_class': query_class,
					'orig_query': query.strip()
				})

	return r
示例#4
0
def copy_assets():
    """copy assets for static serving"""
    proj()

    print(". copying assets ...")

    copy_patterns = {
        "dist": ["./static/lib/jquery-1.8.3.min.js"] +
        sh.glob("./static/config/*.json") +
        sh.glob("./static/fragile-min.*"),

        "dist/font": sh.glob("./static/lib/awesome/font/*"),
        "dist/svg": sh.glob("./static/svg/*.svg"),
        "dist/img": sh.glob("./static/img/*.*") or [],
        
        "dist/docs/assets": sh.glob("./docs/assets/*.*") or [],
    }

    for dst, copy_files in copy_patterns.items():
        if not os.path.exists(dst):
            sh.mkdir("-p", dst)

        for c_file in copy_files:
            print "... copying", c_file, dst
            sh.cp("-r", c_file, dst)

    wa_cache = "./dist/.webassets-cache"

    if os.path.exists(wa_cache):
        sh.rm("-r", wa_cache)
示例#5
0
    def setup(self, portno=8000):

        self.load_port()
        # print self.port_used
        some_no = portno
        conf_file = self.real_proj_d + '.conf'
        while 1:
            if some_no in self.port_used:
                some_no += 1
            else:
                print 'the port allocated is: ', some_no
                break

        conf = open(conf_file, 'w')
        conf.write('server {\n')
        conf.write('    listen' + ' ' + unicode(some_no) + ';\n')
        conf.write('    access_log /var/log/nginx/access.log;\n')
        conf.write('    error_log /var/log/nginx/error.log;\n')
        conf.write('    location / {\n')
        conf.write('        uwsgi_pass unix:' + self.load_sock() + ';\n')
        conf.write('        include uwsgi_params;\n')
        conf.write('    }\n')
        conf.write('    location ~ ^/static/ {\n')
        conf.write('        root ' + self.load_static() + ';\n')
        conf.write('    }\n')
        conf.write('}')
        conf.close()

        cp(conf_file, '/etc/nginx/conf.d')
        rm(conf_file)
        system("service nginx restart")

        return
示例#6
0
def install_theme(name, profile=None, toolbar=False, jupyter=True):
    """ copy given theme to theme.css and import css in custom.css """
    from sh import cp  # @UnresolvedImport (annotation for pydev)
    source_path = glob('%s/%s.css' % (THEMES_PATH, name))[0]
    paths = install_path(profile, jupyter)

    for i, target_path in enumerate(paths):
        # -- install theme
        themecss_path = '%s/theme.css' % target_path
        customcss_path = '%s/custom.css' % target_path
        cp(source_path, themecss_path)
        cp(source_path, customcss_path)

        print "Installing %s at %s" % (name, target_path)
        # -- check if theme import is already there, otherwise add it
        with open(customcss_path, 'r+a') as customcss:
            if not 'theme.css' in ' '.join(customcss.readlines()):
                customcss.seek(0, os.SEEK_END)
                customcss.write("\n@import url('theme.css');")

        # -- enable toolbar if requested
        if toolbar:
            print "Enabling toolbar"
            with open(themecss_path, 'rs+w') as themefile:
                # TODO do some proper css rewrite
                lines = (line.replace('div#maintoolbar', 'div#maintoolbar_active')
                                  for line in themefile.readlines())
                themefile.seek(0)
                themefile.writelines(lines)
                themefile.truncate()
        else:
            print "Toolbar is disabled. Set -T to enable"
示例#7
0
def restore_file(filename):
    ''' Context manager restores a file to its previous state.

        If the file exists on entry, it is backed up and restored.

        If the file does not exist on entry and does exists on exit,
        it is deleted.
    '''

    exists = os.path.exists(filename)

    if exists:
        # we just want the pathname, not the handle
        # tiny chance of race if someone else gets the temp filename
        handle, backup = tempfile.mkstemp()
        os.close(handle)
        sh.cp('--archive', filename, backup)

    try:
        yield

    finally:
        if os.path.exists(filename):
            sh.rm(filename)
        if exists:
            # restore to original state
            sh.mv(backup, filename)
示例#8
0
    def compile( self, source_dir, build_dir, install_dir ):
        package_source_dir = os.path.join( source_dir, self.dirname )
        assert( os.path.exists( package_source_dir ) )
        package_build_dir = os.path.join( build_dir, self.dirname )

        sh.cd( os.path.join( package_source_dir, 'scripts/Resources' ) )
        sh.sh( './copyresources.sh' )
        # the install target doesn't copy the stuff that copyresources.sh puts in place
        sh.cp( '-v', os.path.join( package_source_dir, 'bin/Release/Readme.txt' ), os.path.join( install_dir, 'Readme.meshy.txt' ) )
        sh.cp( '-v', '-r', os.path.join( package_source_dir, 'bin/Release_Linux/Resources/' ), install_dir )

        sh.mkdir( '-p', package_build_dir )
        sh.cd( package_build_dir )
        if ( platform.system() == 'Darwin' ):
            sh.cmake(
                '-G', 'Xcode',
                '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir,
                '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'CMake' ),
                package_source_dir,
                _out = sys.stdout )
            sh.xcodebuild( '-configuration', 'Release', _out = sys.stdout )
        else:
            sh.cmake(
                '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir,
                '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'lib/OGRE/cmake' ),
                package_source_dir,
                _out = sys.stdout )
            sh.make( '-j4', 'VERBOSE=1', _out = sys.stdout )
            sh.make.install( _out = sys.stdout )
示例#9
0
def prepare_file(file_path, build_dir):
    """ Move file into build folder before launching actual command (also compile coffeescript) """
    logger.info("Preparing file {0}".format(file_path))
    if file_path.endswith(".coffee"):
        logger.info("Compiling coffee script")
        f = open(file_path, 'r')
        js_content = ""
        coffee_content = ""
        for line in f:
            content_to_add = "{0}\n".format(line)
            if line.strip().startswith("#import"):
                js_content += content_to_add
            else:
                coffee_content += content_to_add

        compiled_content = coffeescript.compile(coffee_content).split('\n')

        js_content += '\n'.join(compiled_content[1:-2])

        logger.debug(js_content)
        file_name = path.splitext(path.basename(file_path))[0] + ".js"
        build_path = path.join(build_dir, file_name)
        f = open(build_path, 'w')
        f.write(js_content)
        return build_path
    else:
        logger.info("Copy JS file to build dir")
        build_path = path.join(build_dir, path.basename(file_path))
        sh.cp(file_path, build_path)

        return build_path
示例#10
0
def handle_task(task, dotfiles_dir):
    click.echo('handling task: {}'.format(task))

    if 'src' not in task:
        click.echo("you must define at least a 'src' in each task")
        raise click.Abort

    source = os.path.expanduser(task['src'])
    if not os.path.exists(source):
        click.echo('file not found: {}'.format(source))
        raise click.Abort

    _, filename = os.path.split(source)
    subdir = task['subdir'].rstrip('/') if 'subdir' in task else '.'
    target = os.path.abspath(os.path.join(dotfiles_dir, subdir, filename))
    
    # make sure the target directory exists, e.g. .dotfiles/bash/
    target_path, _ = os.path.split(target)
    mkdir('-p', target_path)

    # copy the files
    msg_fmt = 'copying {}: from [{}] to [{}]'
    if os.path.isdir(source):
        click.echo(msg_fmt.format('dir', source, target))
        cp("-r", os.path.join(source, "."), target)
    else:
        click.echo(msg_fmt.format('file', source, target))
        cp(source, target)    
示例#11
0
def copy_statics():
    if path.isdir('output/static'):
        sh.rm('-rf', 'output/static')

    if not path.isdir(OUTPUT_DIR):
        os.mkdir(OUTPUT_DIR)

    sh.cp('-r', 'static', 'output/static')
示例#12
0
文件: main.py 项目: dnene/nestegg
def update_versions(config, dir_type, pkg_name, pkg_path, versions) :
    pkg_dir = getattr(config,dir_type + "_dir")[pkg_name]
    if pkg_dir.exists() :
        for fname in os.listdir(+pkg_dir) :
            fpath = pkg_dir[fname]
            if fpath.isfile() :
                cp(+fpath, +pkg_path)
                versions[fname] =  "md5={}".format(file_md5(+fpath))
示例#13
0
 def copy(self, source, dest, dir=False):
     s = self.mount_point + source
     d = self.mount_point + dest
     log.debug("Copying from '{0}' to '{1}'".format(s, d))
     if dir:
         cp('-r', s, d)
     else:
         cp(s, d)
     time.sleep(1)
示例#14
0
    def setup(self, git_repo_dir):
        sh.mkdir("-p", join(self.btd, "var/log", self.pkg_name))
        
        lib_base = join(self.btd, "usr/lib/api")
        sh.mkdir("-p", lib_base)

        for subdir in ["api", "models"]:
            merge = lib_base.endswith(subdir)
            sh.cp("-r", join(git_repo_dir, "src", subdir), lib_base, T=merge)
示例#15
0
    def build_compiled_components(self,arch):
        super(CppCompiledComponentsPythonRecipe, self).build_compiled_components(arch)

        # Copy libgnustl_shared.so
        with current_directory(self.get_build_dir(arch.arch)):
            sh.cp(
                "{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}/libgnustl_shared.so".format(ctx=self.ctx,arch=arch),
                self.ctx.get_libs_dir(arch.arch)
            )
示例#16
0
文件: local.py 项目: prpole/git-lit
 def copy_files(self):
     """ Copy the LICENSE and CONTRIBUTING files to each folder repo """
     files = ['LICENSE.md', 'CONTRIBUTING.md']
     this_dir = sh.pwd().strip()
     for _file in files:
         sh.cp(
             '{0}/templates/{1}'.format(this_dir, _file),
             '{0}/'.format(self.book.textdir)
         )
示例#17
0
 def copy_files(self):
     """ Copy the LICENSE and CONTRIBUTING files to each folder repo """
     # TODO: Add .gitattributes for line endings (and .gitignore?)
     FILES = ['LICENSE.md', 'CONTRIBUTING.md']
     this_dir = sh.pwd().strip()
     for _file in FILES:
         sh.cp(
             '{0}/templates/{1}'.format(this_dir, _file),
             '{0}/'.format(self.directory)
         )
def backup_workloads():
  """Copy the workload folder to a temporary directory and returns its name.

  Used to keep workloads from being clobbered by git checkout.
  """
  temp_dir = mkdtemp()
  sh.cp(os.path.join(IMPALA_HOME, "testdata", "workloads"),
        temp_dir, R=True, _out=sys.stdout, _err=sys.stderr)
  print "Backed up workloads to {0}".format(temp_dir)
  return temp_dir
示例#19
0
def pagesBranch():
    try:
        git.checkout("--orphan", "gh-pages")
        git.rm("-r", "-f", "--ignore-unmatch", ".")
    except: print("No files exist in the gh-pages branch")
    try: sh.cp("-R", os.path.dirname(os.path.realpath(__file__)) + "/www/coming-soon/*", "./")
    except: print("Template not found")
    pushAll("Creating the first instance of the coming-soon site")
    try: git.checkout("dev")
    except: git.checkout("-b", "dev")
示例#20
0
文件: quickie.py 项目: erik/quickie
def set_repository(repo):
    """Jump to a new directory to avoid overwriting any work that might not
    have been commited yet"""

    # Copy repository verbatim
    tmp_dir = tempfile.mkdtemp(prefix='quickie-')
    sh.cd(tmp_dir)
    sh.cp('-R', repo + '/.', tmp_dir)

    return tmp_dir
示例#21
0
    def setup(self, git_repo_dir):
        sh.mkdir("-p", join(self.btd, "var/log", self.pkg_name))
        sh.mkdir("-p", join(self.btd, "var/lib", self.pkg_name))
        
        lib_base = join(self.btd, "usr/lib/resultharvester")
        sh.mkdir("-p", lib_base)

        for subdir in ["resultharvester", "models", "thirdparty"]:
            merge = lib_base.endswith(subdir)
            sh.cp("-r", join(git_repo_dir, "src", subdir), lib_base, T=merge)
示例#22
0
文件: runners.py 项目: reem/pyvzutil
 def copy_to(self, src, dest, quiet=False):
     """
     Copy from a location `src` on the local machine to a location
     `dest` on the remote machine.
     """
     if quiet:
         return sh.cp('-r', src, dest)
     else:
         return sh.cp('-r', src, dest,
                      _out=pse, _err=pse, _tee=True)
示例#23
0
 def build_arch(self, arch):
     with current_directory(join(self.get_build_dir(arch.arch))):
         env = self.get_recipe_env(arch)
         hostpython = sh.Command(self.ctx.hostpython)
         shprint(
             hostpython, 'ref10/build.py',
             _env=env
         )
         # the library could be `_crypto_sign.cpython-37m-x86_64-linux-gnu.so`
         # or simply `_crypto_sign.so` depending on the platform/distribution
         sh.cp('-a', sh.glob('_crypto_sign*.so'), self.ctx.get_site_packages_dir())
         self.install_python_package(arch)
示例#24
0
文件: build.py 项目: rootwzrd/docker
def setupConfigDirs(dirname, fullPrimaryEmail, primaryEmail, name, uid):
    basedir = settings.SCRIPT_DIR + "/kontact"
    print(basedir);
    try:
        sh.rm("-R", )
    except:
        print("nothing to remove")
    sh.cp("-R", "{}/config".format(basedir), "{}/{}".format(basedir, dirname))
    sh.find("{}/{}".format(basedir, dirname), "-type", "f", "-exec", "sed", "-i", "s/{fullPrimaryEmail}/" + fullPrimaryEmail + "/g", "{}", "+")
    sh.find("{}/{}".format(basedir, dirname), "-type", "f", "-exec", "sed", "-i", "s/{primaryEmail}/" +  primaryEmail + "/g", "{}", "+")
    sh.find("{}/{}".format(basedir, dirname), "-type", "f", "-exec", "sed", "-i", "s/{name}/" + name + "/g", "{}", "+")
    sh.find("{}/{}".format(basedir, dirname), "-type", "f", "-exec", "sed", "-i", "s/{uid}/" + uid + "/g", "{}", "+")
示例#25
0
    def postbuild_arch(self, arch):
        super(LocalTriblerRecipe, self).postbuild_arch(arch)

        # Install twistd plugins
        cp('-rf', join(self.get_build_dir(arch.arch), 'twisted'),
           join(self.ctx.get_python_install_dir(), 'lib/python2.7/site-packages'))

        # Install ffmpeg binary
        source = self.get_recipe('ffmpeg', self.ctx).get_build_bin(arch)
        target = join(self.src_root, 'android/TriblerService/service/ffmpeg')
        if not exists(target):
            cp('-f', source, target)
示例#26
0
文件: local.py 项目: Git-Lit/git-lit
 def copy_files(self):
     """ Copy the LICENSE and CONTRIBUTING files to each folder repo """
     # TODO: Add .gitattributes for line endings (and .gitignore?)
     # license = resource_filename(__name__, 'templates/LICENSE')
     contributing = resource_filename(__name__, 'templates/CONTRIBUTING.md')
     FILES = [contributing] 
     this_dir = sh.pwd().strip()
     for _file in FILES:
         sh.cp(
             _file,
             '{0}/'.format(self.directory)
         )
示例#27
0
def bootstrap(source_app, appname, confirm):
    # remove mypackage.app if it already exists
    print('Copy Kivy.app/source.app if it exists')
    if exists(appname):
        print('{} already exists removing it...'.format(appname))
        sh.rm('-rf', appname)

    # check if Kivy.app exists and copy it
    if not exists(source_app):
        error("source app {} doesn't exist")
    print('copying {} to {}'.format(source_app, appname))
    sh.cp('-a', source_app, appname)
示例#28
0
文件: create_vm.py 项目: bmoar/vm
    def copy_file(self, src="", dest=""):
        """
            Creates a COW copy of src in dest
        """
        if not os.path.exists(src):
            sys.exit("Path to base image does not exist")

        with sh.sudo:
            sh.cp('--reflink=auto', "%s" % (src), "%s" % (dest))

        if not os.path.exists(dest):
            sys.exit("Should have created a new image")
示例#29
0
    def prebuild_arch(self, arch):
        # Remove from site-packages
        super(LocalTriblerRecipe, self).clean_build(arch.arch)

        # Create empty build dir
        container_dir = self.get_build_container_dir(arch.arch)
        mkdir('-p', container_dir)

        with current_directory(container_dir):
            # Copy source from working copy
            cp('-rf', self.src_root, self.name)

        super(LocalTriblerRecipe, self).prebuild_arch(arch)
 def new_meta_output(self):
     diff = sh.diff(self.test_out + ".oldmeta.yml",
                    self.test_file + ".meta.yml",
                    _out=self.test_out + ".oldmeta.yml.diff",
                    _ok_code=[0, 1])
     if diff.exit_code != 0:
         self.say(red("Flag --new-meta changed output!"))
         self.say(red("\tSee {}.oldmeta.yml*"), fail_path(self.test_name))
         sh.cp(self.test_out + ".oldmeta.yml.diff",
               fail_path(self.test_name + ".oldmeta.yml.diff"))
         sh.cp(self.test_out + ".oldmeta.yml",
               fail_path(self.test_name + ".oldmeta.yml"))
         self.failed = True
示例#31
0
def main():
    log = logging.getLogger("zulip-provisioner")

    if platform.architecture()[0] == '64bit':
        arch = 'amd64'
        phantomjs_arch = 'x86_64'
    elif platform.architecture()[0] == '32bit':
        arch = "i386"
        phantomjs_arch = 'i686'
    else:
        log.critical(
            "Only x86 is supported; ping [email protected] if you want another architecture."
        )
        sys.exit(1)

    vendor, version, codename = platform.dist()

    if not (vendor in SUPPORTED_PLATFORMS
            and codename in SUPPORTED_PLATFORMS[vendor]):
        log.critical("Unsupported platform: {} {}".format(vendor, codename))

    with sh.sudo:
        sh.apt_get.update(**LOUD)

        sh.apt_get.install(*APT_DEPENDENCIES["trusty"],
                           assume_yes=True,
                           **LOUD)

    temp_deb_path = sh.mktemp("package_XXXXXX.deb", tmpdir=True)

    sh.wget("{}/{}_{}_{}.deb".format(
        TSEARCH_URL_BASE,
        TSEARCH_PACKAGE_NAME["trusty"],
        TSEARCH_VERSION,
        arch,
    ),
            output_document=temp_deb_path,
            **LOUD)

    with sh.sudo:
        sh.dpkg("--install", temp_deb_path, **LOUD)

    with sh.sudo:
        PHANTOMJS_PATH = "/srv/phantomjs"
        PHANTOMJS_BASENAME = "phantomjs-1.9.8-linux-%s" % (phantomjs_arch, )
        PHANTOMJS_TARBALL_BASENAME = PHANTOMJS_BASENAME + ".tar.bz2"
        PHANTOMJS_TARBALL = os.path.join(PHANTOMJS_PATH,
                                         PHANTOMJS_TARBALL_BASENAME)
        PHANTOMJS_URL = "https://bitbucket.org/ariya/phantomjs/downloads/%s" % (
            PHANTOMJS_TARBALL_BASENAME, )
        sh.mkdir("-p", PHANTOMJS_PATH, **LOUD)
        if not os.path.exists(PHANTOMJS_TARBALL):
            sh.wget(PHANTOMJS_URL, output_document=PHANTOMJS_TARBALL, **LOUD)
        sh.tar("xj", directory=PHANTOMJS_PATH, file=PHANTOMJS_TARBALL, **LOUD)
        sh.ln(
            "-sf",
            os.path.join(PHANTOMJS_PATH, PHANTOMJS_BASENAME, "bin",
                         "phantomjs"), "/usr/local/bin/phantomjs", **LOUD)

    with sh.sudo:
        sh.rm("-rf", VENV_PATH, **LOUD)
        sh.mkdir("-p", VENV_PATH, **LOUD)
        sh.chown("{}:{}".format(os.getuid(), os.getgid()), VENV_PATH, **LOUD)

    sh.virtualenv(VENV_PATH, **LOUD)

    # Add the ./tools and ./scripts/setup directories inside the repository root to
    # the system path; we'll reference them later.
    orig_path = os.environ["PATH"]
    os.environ["PATH"] = os.pathsep.join(
        (os.path.join(ZULIP_PATH,
                      "tools"), os.path.join(ZULIP_PATH, "scripts",
                                             "setup"), orig_path))

    # Put Python virtualenv activation in our .bash_profile.
    with open(os.path.expanduser('~/.bash_profile'), 'w+') as bash_profile:
        bash_profile.writelines([
            "source .bashrc\n",
            "source %s\n" % (os.path.join(VENV_PATH, "bin", "activate"), ),
        ])

    # Switch current Python context to the virtualenv.
    activate_this = os.path.join(VENV_PATH, "bin", "activate_this.py")
    execfile(activate_this, dict(__file__=activate_this))

    sh.pip.install(requirement=os.path.join(ZULIP_PATH, "requirements.txt"),
                   **LOUD)

    with sh.sudo:
        sh.cp(REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH, **LOUD)

    # npm install and management commands expect to be run from the root of the project.
    os.chdir(ZULIP_PATH)

    sh.npm.install(**LOUD)

    os.system("tools/download-zxcvbn")
    os.system("tools/emoji_dump/build_emoji")
    os.system("generate_secrets.py -d")
    if "--travis" in sys.argv:
        os.system("sudo service rabbitmq-server restart")
        os.system("sudo service redis-server restart")
        os.system("sudo service memcached restart")
    elif "--docker" in sys.argv:
        os.system("sudo service rabbitmq-server restart")
        os.system("sudo pg_dropcluster --stop 9.3 main")
        os.system("sudo pg_createcluster -e utf8 --start 9.3 main")
        os.system("sudo service redis-server restart")
        os.system("sudo service memcached restart")
    sh.configure_rabbitmq(**LOUD)
    sh.postgres_init_dev_db(**LOUD)
    sh.do_destroy_rebuild_database(**LOUD)
    sh.postgres_init_test_db(**LOUD)
    sh.do_destroy_rebuild_test_database(**LOUD)
    return 0
示例#32
0
    def build_arch(self, arch):
        build_dir = join(self.get_build_dir(arch.arch), 'build')
        shprint(sh.mkdir, '-p', build_dir)
        with current_directory(build_dir):
            env = self.get_recipe_env(arch)

            python_major = self.ctx.python_recipe.version[0]
            python_include_root = self.ctx.python_recipe.include_root(
                arch.arch)
            python_site_packages = self.ctx.get_site_packages_dir()
            python_link_root = self.ctx.python_recipe.link_root(arch.arch)
            python_link_version = self.ctx.python_recipe.major_minor_version_string
            if 'python3' in self.ctx.python_recipe.name:
                python_link_version += 'm'
            python_library = join(python_link_root,
                                  'libpython{}.so'.format(python_link_version))
            python_include_numpy = join(python_site_packages, 'numpy', 'core',
                                        'include')

            shprint(
                sh.cmake,
                '-DP4A=ON',
                '-DANDROID_ABI={}'.format(arch.arch),
                '-DANDROID_STANDALONE_TOOLCHAIN={}'.format(self.ctx.ndk_dir),
                '-DANDROID_NATIVE_API_LEVEL={}'.format(self.ctx.ndk_api),
                '-DANDROID_EXECUTABLE={}/tools/android'.format(
                    env['ANDROID_SDK']),
                '-DCMAKE_TOOLCHAIN_FILE={}'.format(
                    join(self.ctx.ndk_dir, 'build', 'cmake',
                         'android.toolchain.cmake')),
                # Make the linkage with our python library, otherwise we
                # will get dlopen error when trying to import cv2's module.
                '-DCMAKE_SHARED_LINKER_FLAGS=-L{path} -lpython{version}'.
                format(path=python_link_root, version=python_link_version),
                '-DBUILD_WITH_STANDALONE_TOOLCHAIN=ON',
                # Force to build as shared libraries the cv2's dependant
                # libs or we will not be able to link with our python
                '-DBUILD_SHARED_LIBS=ON',
                '-DBUILD_STATIC_LIBS=OFF',

                # Disable some opencv's features
                '-DBUILD_opencv_java=OFF',
                '-DBUILD_opencv_java_bindings_generator=OFF',
                # '-DBUILD_opencv_highgui=OFF',
                # '-DBUILD_opencv_imgproc=OFF',
                # '-DBUILD_opencv_flann=OFF',
                '-DBUILD_TESTS=OFF',
                '-DBUILD_PERF_TESTS=OFF',
                '-DENABLE_TESTING=OFF',
                '-DBUILD_EXAMPLES=OFF',
                '-DBUILD_ANDROID_EXAMPLES=OFF',

                # Force to only build our version of python
                '-DBUILD_OPENCV_PYTHON{major}=ON'.format(major=python_major),
                '-DBUILD_OPENCV_PYTHON{major}=OFF'.format(
                    major='2' if python_major == '3' else '3'),

                # Force to install the `cv2.so` library directly into
                # python's site packages (otherwise the cv2's loader fails
                # on finding the cv2.so library)
                '-DOPENCV_SKIP_PYTHON_LOADER=ON',
                '-DOPENCV_PYTHON{major}_INSTALL_PATH={site_packages}'.format(
                    major=python_major, site_packages=python_site_packages),

                # Define python's paths for: exe, lib, includes, numpy...
                '-DPYTHON_DEFAULT_EXECUTABLE={}'.format(self.ctx.hostpython),
                '-DPYTHON{major}_EXECUTABLE={host_python}'.format(
                    major=python_major, host_python=self.ctx.hostpython),
                '-DPYTHON{major}_INCLUDE_PATH={include_path}'.format(
                    major=python_major, include_path=python_include_root),
                '-DPYTHON{major}_LIBRARIES={python_lib}'.format(
                    major=python_major, python_lib=python_library),
                '-DPYTHON{major}_NUMPY_INCLUDE_DIRS={numpy_include}'.format(
                    major=python_major, numpy_include=python_include_numpy),
                '-DPYTHON{major}_PACKAGES_PATH={site_packages}'.format(
                    major=python_major, site_packages=python_site_packages),
                self.get_build_dir(arch.arch),
                _env=env)
            shprint(sh.make, '-j' + str(cpu_count()),
                    'opencv_python' + python_major)
            # Install python bindings (cv2.so)
            shprint(sh.cmake, '-DCOMPONENT=python', '-P',
                    './cmake_install.cmake')
            # Copy third party shared libs that we need in our final apk
            sh.cp('-a', sh.glob('./lib/{}/lib*.so'.format(arch.arch)),
                  self.ctx.get_libs_dir(arch.arch))
示例#33
0
文件: converter.py 项目: overut/mace
def build_example(configs, target_abi, toolchain,
                  enable_openmp, mace_lib_type, cl_binary_to_code, device):
    library_name = configs[YAMLKeyword.library_name]
    hexagon_mode = get_hexagon_mode(configs)

    build_tmp_binary_dir = get_build_binary_dir(library_name, target_abi)
    if os.path.exists(build_tmp_binary_dir):
        sh.rm("-rf", build_tmp_binary_dir)
    os.makedirs(build_tmp_binary_dir)

    if cl_binary_to_code:
        sh_commands.gen_opencl_binary_cpps(
            get_opencl_binary_output_path(
                library_name, target_abi, device),
            get_opencl_parameter_output_path(
                library_name, target_abi, device),
            OPENCL_CODEGEN_DIR + '/opencl_binary.cc',
            OPENCL_CODEGEN_DIR + '/opencl_parameter.cc')
    else:
        sh_commands.gen_opencl_binary_cpps(
            "", "",
            OPENCL_CODEGEN_DIR + '/opencl_binary.cc',
            OPENCL_CODEGEN_DIR + '/opencl_parameter.cc')

    symbol_hidden = True

    libmace_target = LIBMACE_STATIC_TARGET
    if mace_lib_type == MACELibType.dynamic:
        symbol_hidden = False
        libmace_target = LIBMACE_SO_TARGET

    sh_commands.bazel_build(libmace_target,
                            abi=target_abi,
                            toolchain=toolchain,
                            enable_openmp=enable_openmp,
                            enable_opencl=get_opencl_mode(configs),
                            enable_quantize=get_quantize_mode(configs),
                            hexagon_mode=hexagon_mode,
                            address_sanitizer=flags.address_sanitizer,
                            symbol_hidden=symbol_hidden)

    if os.path.exists(LIB_CODEGEN_DIR):
        sh.rm("-rf", LIB_CODEGEN_DIR)
    sh.mkdir("-p", LIB_CODEGEN_DIR)

    build_arg = ""
    if configs[YAMLKeyword.model_graph_format] == ModelFormat.code:
        mace_check(os.path.exists(ENGINE_CODEGEN_DIR),
                   ModuleName.RUN,
                   "You should convert model first.")
        model_lib_path = get_model_lib_output_path(library_name,
                                                   target_abi)
        sh.cp("-f", model_lib_path, LIB_CODEGEN_DIR)
        build_arg = "--per_file_copt=mace/examples/cli/example.cc@-DMODEL_GRAPH_FORMAT_CODE"  # noqa

    if mace_lib_type == MACELibType.dynamic:
        example_target = EXAMPLE_DYNAMIC_TARGET
        sh.cp("-f", LIBMACE_DYNAMIC_PATH, LIB_CODEGEN_DIR)
    else:
        example_target = EXAMPLE_STATIC_TARGET
        sh.cp("-f", LIBMACE_STATIC_PATH, LIB_CODEGEN_DIR)

    sh_commands.bazel_build(example_target,
                            abi=target_abi,
                            toolchain=toolchain,
                            enable_openmp=enable_openmp,
                            enable_opencl=get_opencl_mode(configs),
                            enable_quantize=get_quantize_mode(configs),
                            hexagon_mode=hexagon_mode,
                            address_sanitizer=flags.address_sanitizer,
                            extra_args=build_arg)

    target_bin = "/".join(sh_commands.bazel_target_to_bin(example_target))
    sh.cp("-f", target_bin, build_tmp_binary_dir)
    if os.path.exists(LIB_CODEGEN_DIR):
        sh.rm("-rf", LIB_CODEGEN_DIR)
示例#34
0
def copy_apk(flavor, version_name):
    folder_path = 'releases'
    sh.mkdir("-p", folder_path)
    output_file = '%s/wikipedia-%s.apk' % (folder_path, version_name)
    sh.cp(get_original_apk_file_name(flavor), output_file)
    print ' apk: %s' % output_file
示例#35
0
def merge_libs(target_soc, serial_num, abi, project_name, build_output_dir,
               library_output_dir, model_build_type, hexagon_mode):
    print("* Merge mace lib")
    project_output_dir = "%s/%s" % (build_output_dir, project_name)
    hexagon_lib_file = "third_party/nnlib/libhexagon_controller.so"
    library_dir = "%s/%s" % (project_output_dir, library_output_dir)
    model_bin_dir = "%s/%s/" % (library_dir, abi)

    if not os.path.exists(model_bin_dir):
        sh.mkdir("-p", model_bin_dir)
    if hexagon_mode:
        sh.cp("-f", hexagon_lib_file, library_dir)

    # make static library
    mri_stream = ""
    if abi == "host":
        mri_stream += "create %s/libmace_%s.a\n" % \
                      (model_bin_dir, project_name)
        mri_stream += ("addlib "
                       "bazel-bin/mace/codegen/libgenerated_opencl.pic.a\n")
        mri_stream += (
            "addlib "
            "bazel-bin/mace/codegen/libgenerated_tuning_params.pic.a\n")
        mri_stream += ("addlib "
                       "bazel-bin/mace/codegen/libgenerated_version.pic.a\n")
        mri_stream += ("addlib " "bazel-bin/mace/core/libcore.pic.lo\n")
        mri_stream += ("addlib " "bazel-bin/mace/kernels/libkernels.pic.a\n")
        mri_stream += ("addlib " "bazel-bin/mace/utils/libutils.pic.a\n")
        mri_stream += ("addlib " "bazel-bin/mace/proto/libmace_cc.pic.a\n")
        mri_stream += (
            "addlib "
            "bazel-bin/external/com_google_protobuf/libprotobuf_lite.pic.a\n")
        mri_stream += ("addlib " "bazel-bin/mace/ops/libops.pic.lo\n")
        if model_build_type == BuildType.code:
            mri_stream += (
                "addlib "
                "bazel-bin/mace/codegen/libgenerated_models.pic.a\n")
    else:
        if not target_soc:
            mri_stream += "create %s/libmace_%s.a\n" % \
                          (model_bin_dir, project_name)
        else:
            device_name = adb_get_device_name_by_serialno(serial_num)
            mri_stream += "create %s/libmace_%s.%s.%s.a\n" % \
                          (model_bin_dir, project_name,
                           device_name, target_soc)
        if model_build_type == BuildType.code:
            mri_stream += ("addlib "
                           "bazel-bin/mace/codegen/libgenerated_models.a\n")
        mri_stream += ("addlib "
                       "bazel-bin/mace/codegen/libgenerated_opencl.a\n")
        mri_stream += ("addlib "
                       "bazel-bin/mace/codegen/libgenerated_tuning_params.a\n")
        mri_stream += ("addlib "
                       "bazel-bin/mace/codegen/libgenerated_version.a\n")
        mri_stream += ("addlib " "bazel-bin/mace/core/libcore.lo\n")
        mri_stream += ("addlib " "bazel-bin/mace/kernels/libkernels.a\n")
        mri_stream += ("addlib " "bazel-bin/mace/utils/libutils.a\n")
        mri_stream += ("addlib " "bazel-bin/mace/proto/libmace_cc.a\n")
        mri_stream += (
            "addlib "
            "bazel-bin/external/com_google_protobuf/libprotobuf_lite.a\n")
        mri_stream += ("addlib " "bazel-bin/mace/ops/libops.lo\n")

    mri_stream += "save\n"
    mri_stream += "end\n"

    cmd = sh.Command("%s/toolchains/" % os.environ["ANDROID_NDK_HOME"] +
                     "aarch64-linux-android-4.9/prebuilt/linux-x86_64/" +
                     "bin/aarch64-linux-android-ar")

    cmd("-M", _in=mri_stream)

    print("Libs merged!\n")
示例#36
0
def build_run_throughput_test(abi,
                              serialno,
                              vlog_level,
                              run_seconds,
                              merged_lib_file,
                              model_input_dir,
                              embed_model_data,
                              input_nodes,
                              output_nodes,
                              input_shapes,
                              output_shapes,
                              cpu_model_tag,
                              gpu_model_tag,
                              dsp_model_tag,
                              phone_data_dir,
                              strip="always",
                              input_file_name="model_input"):
    six.print_("* Build and run throughput_test")

    model_tag_build_flag = ""
    if cpu_model_tag:
        model_tag_build_flag += "--copt=-DMACE_CPU_MODEL_TAG=%s " % \
                                cpu_model_tag
    if gpu_model_tag:
        model_tag_build_flag += "--copt=-DMACE_GPU_MODEL_TAG=%s " % \
                                gpu_model_tag
    if dsp_model_tag:
        model_tag_build_flag += "--copt=-DMACE_DSP_MODEL_TAG=%s " % \
                                dsp_model_tag

    sh.cp("-f", merged_lib_file, "mace/benchmark/libmace_merged.a")
    sh.bazel("build",
             "-c",
             "opt",
             "--strip",
             strip,
             "--verbose_failures",
             "//mace/benchmark:model_throughput_test",
             "--crosstool_top=//external:android/crosstool",
             "--host_crosstool_top=@bazel_tools//tools/cpp:toolchain",
             "--cpu=%s" % abi,
             "--copt=-std=c++11",
             "--copt=-D_GLIBCXX_USE_C99_MATH_TR1",
             "--copt=-Werror=return-type",
             "--copt=-O3",
             "--define",
             "neon=true",
             "--define",
             "openmp=true",
             model_tag_build_flag,
             _fg=True)

    sh.rm("mace/benchmark/libmace_merged.a")
    sh.adb("-s", serialno, "shell", "mkdir", "-p", phone_data_dir)
    adb_push(
        "%s/%s_%s" % (model_input_dir, input_file_name, ",".join(input_nodes)),
        phone_data_dir, serialno)
    adb_push("bazel-bin/mace/benchmark/model_throughput_test", phone_data_dir,
             serialno)
    if not embed_model_data:
        adb_push("codegen/models/%s/%s.data" % cpu_model_tag, phone_data_dir,
                 serialno)
        adb_push("codegen/models/%s/%s.data" % gpu_model_tag, phone_data_dir,
                 serialno)
        adb_push("codegen/models/%s/%s.data" % dsp_model_tag, phone_data_dir,
                 serialno)
    adb_push("third_party/nnlib/libhexagon_controller.so", phone_data_dir,
             serialno)

    sh.adb(
        "-s",
        serialno,
        "shell",
        "LD_LIBRARY_PATH=%s" % phone_data_dir,
        "MACE_CPP_MIN_VLOG_LEVEL=%s" % vlog_level,
        "MACE_RUN_PARAMETER_PATH=%s/mace_run.config" % phone_data_dir,
        "%s/model_throughput_test" % phone_data_dir,
        "--input_node=%s" % ",".join(input_nodes),
        "--output_node=%s" % ",".join(output_nodes),
        "--input_shape=%s" % ":".join(input_shapes),
        "--output_shape=%s" % ":".join(output_shapes),
        "--input_file=%s/%s" % (phone_data_dir, input_file_name),
        "--cpu_model_data_file=%s/%s.data" % (phone_data_dir, cpu_model_tag),
        "--gpu_model_data_file=%s/%s.data" % (phone_data_dir, gpu_model_tag),
        "--dsp_model_data_file=%s/%s.data" % (phone_data_dir, dsp_model_tag),
        "--run_seconds=%s" % run_seconds,
        _fg=True)

    six.print_("throughput_test done!\n")
示例#37
0
 def takeADebugPhoto(self, savePath):
     sh.cp("./apps/DeviceApp/data/testimages/5aa2867e.jpg", savePath)
示例#38
0
def package(ctx):
    '''Package bitfile with address table and file list'''

    env = ctx.obj

    ensureVivado(env)

    lTopProjPath = 'top'

    if not exists(lTopProjPath):
        secho('Vivado project does not exist. Creating the project...',
              fg='yellow')
        ctx.invoke(project)

    lBitPath = join(lTopProjPath, 'top.runs', 'impl_1', 'top.bit')
    if not exists(lBitPath):
        secho('Bitfile does not exist. Attempting a build ...', fg='yellow')
        ctx.invoke(bitfile)

    lPkgPath = 'package'
    lSrcPath = join(lPkgPath, 'src')

    # Cleanup first
    sh.rm('-rf', lPkgPath, _out=sys.stdout)

    # Create the folders
    try:
        os.makedirs(join(lSrcPath, 'addrtab'))
    except OSError:
        pass

    # -------------------------------------------------------------------------
    # Generate a json signature file
    import socket
    import time
    secho("Generating summary files", fg='blue')

    # -------------------------------------------------------------------------
    from .dep import hash
    lHash = ctx.invoke(hash, output=join(lSrcPath, 'hashes.txt'), verbose=True)
    # -------------------------------------------------------------------------

    # -------------------------------------------------------------------------
    lSummary = dict(env.projectConfig)
    lSummary.update({
        'time': socket.gethostname().replace('.', '_'),
        'build host': time.strftime("%a, %d %b %Y %H:%M:%S +0000"),
        'md5': lHash.hexdigest(),
    })

    with open(join(lSrcPath, 'summary.txt'), 'w') as lSummaryFile:
        import json
        json.dump(lSummary, lSummaryFile, indent=2)
    echo()
    # -------------------------------------------------------------------------

    # -------------------------------------------------------------------------
    # Copy bitfile and address table into the packaging area
    secho("Collecting bitfile", fg='blue')
    sh.cp('-av', lBitPath, lSrcPath, _out=sys.stdout)
    echo()

    secho("Collecting addresstable", fg='blue')
    # for addrtab in lDepFileParser.CommandList['addrtab']:
    for addrtab in env.depParser.CommandList['addrtab']:
        sh.cp('-av',
              addrtab.FilePath,
              join(lSrcPath, 'addrtab'),
              _out=sys.stdout)
    echo()
    # -------------------------------------------------------------------------

    # -------------------------------------------------------------------------
    # Tar everything up
    secho("Generating tarball", fg='blue')
    lTgzBaseName = '{name}_{host}_{time}'.format(
        name=env.projectConfig['name'],
        host=socket.gethostname().replace('.', '_'),
        time=time.strftime('%y%m%d_%H%M'))
    lTgzPath = join(lPkgPath, lTgzBaseName + '.tgz')

    # Zip everything
    sh.tar('cvfz',
           abspath(lTgzPath),
           '-C',
           lPkgPath,
           '--transform',
           's/^src/' + lTgzBaseName + '/',
           'src',
           _out=sys.stdout)
    echo()

    echo("Package " + style('%s' % lTgzPath, fg='green') +
         " successfully created.")
def make_source_inversion_directory(iter_number, inversion_directory,
                                    cmtfiles_directory, ref_directory,
                                    data_directory, windows_directory,
                                    data_info_directory, stations_path,
                                    raw_sync_directory, manual_mv):
    """
    make the running directory for the source inversion.
    """
    # make the structure directory for specfem
    specfem_base = join(inversion_directory, "simulation")
    specfem_cmtfiles = join(inversion_directory, "cmts",
                            "cmtsolutions_current_step")
    specfem_ref = join(inversion_directory, "ref")
    specfem_output = join(inversion_directory, "output")
    specfem_database = join(inversion_directory, "database")
    specfem_stations_adjoint = join(inversion_directory, "stations_adjoint")
    data_processed = join(inversion_directory, "data_processed")
    windows = join(inversion_directory, "windows")
    data_info = join(inversion_directory, "data_info")
    stations = join(inversion_directory, "stations")
    raw_sync = join(inversion_directory, "syncs", "raw_sync")
    raw_cmtfiles = join(inversion_directory, "cmts", "raw_cmtsolutions")
    # directories for each iteration
    iter_green1_cmt = join(inversion_directory, "cmts",
                           f"iter{iter_number}_green1_cmtsolutions")
    iter_green1_sync = join(inversion_directory, "syncs",
                            f"iter{iter_number}_green1_sync")
    iter_conv1_processed = join(inversion_directory, "syncs",
                                f"iter{iter_number}_conv1_processed_sync")
    iter_misfit_windows = join(inversion_directory, "misfit_windows",
                               f"iter{iter_number}_misfit_windows")
    iter_adjoint_source = join(inversion_directory, "adjoint_sources",
                               f"iter{iter_number}_adjoint_sources")
    iter_src_frechets = join(inversion_directory, "src_frechets",
                             f"iter{iter_number}_src_frechets")
    iter_green2_cmt = join(inversion_directory, "cmts",
                           f"iter{iter_number}_green2_cmtsolutions")
    iter_green2_sync = join(inversion_directory, "syncs",
                            f"iter{iter_number}_green2_sync")
    iter_next_cmt = join(inversion_directory, "cmts",
                         f"iter{iter_number}_next_cmtsolutions")

    # make directories
    if (int(iter_number) == 1):
        sh.mkdir("-p", specfem_base)
        sh.mkdir("-p", specfem_stations_adjoint)
        sh.mkdir("-p", join(inversion_directory, "cmts"))
        sh.mkdir("-p", join(inversion_directory, "syncs"))
        sh.ln("-s", stations_path, stations)
        sh.cp("-r", cmtfiles_directory, raw_cmtfiles)
        sh.cp("-r", raw_cmtfiles, specfem_cmtfiles)
        sh.ln("-s", ref_directory, specfem_ref)
        sh.mkdir("-p", specfem_output)
        sh.mkdir("-p", specfem_database)
        sh.ln("-s", data_directory, data_processed)
        sh.ln("-s", windows_directory, windows)
        sh.ln("-s", data_info_directory, data_info)
        sh.ln("-s", raw_sync_directory, raw_sync)
    elif ((int(iter_number) > 1) and (not manual_mv)):
        sh.rm("-rf", specfem_cmtfiles)
        iter_last_cmt = join(inversion_directory, "cmts",
                             f"iter{int(iter_number)-1}_next_cmtsolutions")
        sh.cp("-r", iter_last_cmt, specfem_cmtfiles)

    sh.mkdir("-p", iter_green1_cmt)
    sh.mkdir("-p", iter_green1_sync)
    sh.mkdir("-p", iter_conv1_processed)
    sh.mkdir("-p", iter_misfit_windows)
    sh.mkdir("-p", iter_adjoint_source)
    sh.mkdir("-p", iter_src_frechets)
    sh.mkdir("-p", iter_green2_cmt)
    sh.mkdir("-p", iter_green2_sync)
    sh.mkdir("-p", iter_next_cmt)
    return (specfem_base, specfem_stations_adjoint, specfem_cmtfiles,
            specfem_ref, specfem_output, specfem_database, data_processed,
            windows, data_info, stations, raw_sync, iter_green1_cmt,
            iter_green1_sync, iter_conv1_processed, iter_misfit_windows,
            iter_adjoint_source, iter_src_frechets, iter_green2_cmt,
            iter_green2_sync, iter_next_cmt)
示例#40
0
async def setup(reset_factory=False, force=False):
    """安装初始化入口

    Args:
        reset_factory: reset to factory settings
        force: if true, force setup no matter if run already

    Returns:

    """
    msg = """
    Zillionare-omega (大富翁)\\n
    -------------------------\\n
    感谢使用Zillionare-omega -- 高速分布式行情服务器!\\n
    """

    print(format_msg(msg))

    if not force:
        config_file = os.path.join(get_config_dir(), "defaults.yaml")
        if os.path.exists(config_file):
            print(f"{colored('[PASS]', 'green')} 安装程序已在本机上成功运行")
            sys.exit(0)

    if reset_factory:
        import sh

        dst = get_config_dir()
        os.makedirs(dst, exist_ok=True)

        src = os.path.join(factory_config_dir(), "defaults.yaml")
        dst = os.path.join(get_config_dir(), "defaults.yaml")
        sh.cp("-r", src, dst)

    print_title("Step 1. 检测安装环境...")
    settings = load_factory_settings()

    if not check_environment():
        sys.exit(-1)

    print_title("Step 2. 配置日志")
    config_logging(settings)
    print_title("Step 3. 配置上游服务器")
    config_fetcher(settings)
    print_title("Step 4. 配置Redis服务器")
    await config_redis(settings)
    print_title("Step 5. 配置Postgres服务器")
    await config_postgres(settings)
    save_config(settings)

    print_title("Step 6. 下载历史数据")
    config_dir = get_config_dir()
    cfg4py.init(config_dir, False)
    remove_console_log_handler()

    await start("fetcher")
    await download_archive(None)

    print_title("配置已完成。现在为您启动Omega,开启财富之旅!")

    await start("jobs")
    await status()
示例#41
0
# Actually make the zip

# Generate the folders we use to organize things
mkdir(FAB_FOLDER)
mkdir(ASSEM_FOLDER)
mkdir(IMAGE_FOLDER)

# Put the contents of the zip files in the folders
# This way we don't have to replicate that logic
unzip(fab_zip, '-d', FAB_FOLDER)
unzip(assem_zip, '-d', ASSEM_FOLDER)

# Put the images in the images folder
for jpg in jpgs:
    cp(jpg, IMAGE_FOLDER)

# Get the filenames for fab
fab_files = glob.glob('{}/*'.format(FAB_FOLDER))
assem_files = glob.glob('{}/*'.format(ASSEM_FOLDER))
image_files = glob.glob('{}/*'.format(IMAGE_FOLDER))

combined =  [output_name] + schs + brds + pdfs + dxfs + infos + boms + \
                            fab_files + assem_files + image_files

sh.zip(*combined)

rm('-rf', FAB_FOLDER)
rm('-rf', ASSEM_FOLDER)
rm('-rf', IMAGE_FOLDER)
 def prebuild_arch(self, arch):
     sh.cp("pkg_resources.py",
         join(self.ctx.site_packages_dir, "pkg_resources.py"))
示例#43
0
def individual_pfabric_run(args):
    print(args)
    utilization_interval_ns = ("100ms", 100000000)
    index, config = args
    time.sleep(index * 30)
    waf_bin_path = f"{ns3_basic_sim_dir}/simulator"
    main_program = "main_pfabric_flows_horovod"
    program_name = "pfabric_flows_horovod"
    new_run_dir = f"{program_name}_"+\
                  f"{utilization_interval_ns[0]}_"+\
                  f"arrival_{config.c_flow_arr_rate}_"+\
                  f"runhvd_{config.c_run_horovod}_"+\
                  f"hrvprio_{config.c_horovod_prio}_"+\
                  f"num_hvd_{config.c_hrvd_specifics.num_workers}_"+\
                  f"link_bw_{config.c_link_bw_Mbits/1000}Gbit_"+\
                  f"run_idx_{config.c_run_idx}_"+\
                  f"ctn_{config.hrvd_expected_compute_to_network_ratio}_"+\
                  f"pftohrvd_{config.expected_pfabric_to_hrvd_load_ratio}_"+\
                  f"{curr_date}"

    new_run_dir_path_abs = f"{ns3_basic_sim_dir}/runs/{new_run_dir}"
    sh.mkdir(f"{new_run_dir_path_abs}")
    # cp config run_dir
    new_config_file = f"{new_run_dir_path_abs}/config_ns3.properties"
    sh.cp(f"{config_file}", f"{new_config_file}")

    # copy hrvd config file to new directory
    new_hrvd_config_file = f"{new_run_dir_path_abs}/horovod.properties"
    sh.cp(f"{hrvd_config_file}", f"{new_hrvd_config_file}")

    # cp topology run_dir
    new_topology_file = f"{new_run_dir_path_abs}/topology_single.properties"
    sh.cp(topology_file, f"{new_topology_file}")

    # total nodes in the topolgy is num_workers + one additional tor
    generate_leaf_tor_topology(config.c_hrvd_specifics.num_workers + 1,
                               new_topology_file)

    # local_shell.sed_replace_in_file_plain("./config_ns3.properties", "[ARRIVAL_RATE]", str(expected_flows_per_s))
    # local_shell.sed_replace_in_file_plain("./config_ns3.properties", "[UTILIZATION_INTERVAL]", str(utilization_interval_ns[1]))
    sh.sed("-i", f"s/\\[ARRIVAL\\-RATE\\]/{config.c_flow_arr_rate}/g",
           f"{new_config_file}")
    sh.sed("-i",
           f"s/\\[UTILIZATION\\-INTERVAL\\]/{utilization_interval_ns[1]}/g",
           f"{new_config_file}")
    sh.sed("-i", f"s/\\[SIMULATION\\-TIME\\-NS\\]/{config.c_simulation_ns}/g",
           f"{new_config_file}")
    sh.sed("-i", f"s/\\[LINK\\-DATARATE\\-MBITS\\]/{config.c_link_bw_Mbits}/g",
           f"{new_config_file}")
    sh.sed("-i", f"s/\\[PRIORITY\\-HEX\\]/{config.c_horovod_prio}/g",
           f"{new_config_file}")
    sh.sed("-i", f"s/\\[RUN\\-HOROVOD\\]/{config.c_run_horovod}/g",
           f"{new_config_file}")
    sh.sed("-i", f"s/\\[MASTER\\-SEED\\]/{config.c_master_seed}/g",
           f"{new_config_file}")

    # prepare horovod.properties
    sh.sed("-i",
           f"s/\\[NUM\\-WORKER\\]/{config.c_hrvd_specifics.num_workers}/g",
           f"{new_hrvd_config_file}")
    sh.sed("-i",
           f"s/\\[NUM\\-LAYERS\\]/{config.c_hrvd_specifics.num_layers}/g",
           f"{new_hrvd_config_file}")
    sh.sed(
        "-i",
        f"s/\\[FUSION\\-SIZE\\]/{config.c_hrvd_specifics.fusion_buffer_size}/g",
        f"{new_hrvd_config_file}")
    sh.sed(
        "-i",
        f"s/\\[MODEL\\-SIZE\\]/{config.c_hrvd_specifics.model_size_in_byte/(10**6)}/g",
        f"{new_hrvd_config_file}")
    sh.sed(
        "-i",
        f"s/\\[ITER\\-TIME\\]/{config.c_hrvd_specifics.iteration_time_ms}/g",
        f"{new_hrvd_config_file}")

    layer_size_file = f"{new_run_dir_path_abs}/layer_weight_model_size_{config.c_hrvd_specifics.model_size_in_byte/(10**6)}MB.csv"
    fp_compute_time_file = f"{new_run_dir_path_abs}/fp_compute_iter_time_{config.c_hrvd_specifics.iteration_time_ms}_ms.csv"
    bp_compute_time_file = f"{new_run_dir_path_abs}/bp_compute_iter_time_{config.c_hrvd_specifics.iteration_time_ms}_ms.csv"
    # write to layer_size.csv
    write_to_csv(config.c_hrvd_specifics.layer_size, layer_size_file)
    write_to_csv(config.c_hrvd_specifics.fp_compute_time, fp_compute_time_file)
    write_to_csv(config.c_hrvd_specifics.bp_compute_time, bp_compute_time_file)

    schedule_file_name = f"schedule_{config.c_flow_arr_rate}.csv"

    # Write pfabric flow schedule
    servers = [x for x in range(config.c_hrvd_specifics.num_workers)]
    generate_pfabric_flows(f"{new_run_dir_path_abs}/{schedule_file_name}", \
                            servers, \
                            config.c_flow_arr_rate,\
                            config.c_simulation_ns, \
                            config.c_link_bw_Mbits, \
                            config.c_master_seed)

    # run the program
    run_pfabric_horovod_simulation(waf_bin_path, main_program,
                                   new_run_dir_path_abs)

    # plot link utilization
    sh.cd(f"{utilization_plot_dir}")
    plot_link_utilization_single_tor(f"{new_run_dir_path_abs}/logs_ns3",
                                     config.c_hrvd_specifics.num_workers + 1)

    # plot horovod progress and priority samples
    for i in range(config.c_hrvd_specifics.num_workers):
        progress_file_abs_path = f"{new_run_dir_path_abs}/logs_ns3/HorovodWorker_{i}_layer_50_port_1024_progress.txt"
        save_fig = True
        HorovodWorkerProgress(progress_file_abs_path, save_fig)

    sh.cd(f"{config_dir}")
示例#44
0
def convert_func(flags):
    configs = config_parser.parse(flags.config)

    print(configs)
    library_name = configs[YAMLKeyword.library_name]
    if not os.path.exists(BUILD_OUTPUT_DIR):
        os.makedirs(BUILD_OUTPUT_DIR)
    elif os.path.exists(os.path.join(BUILD_OUTPUT_DIR, library_name)):
        sh.rm("-rf", os.path.join(BUILD_OUTPUT_DIR, library_name))
    os.makedirs(os.path.join(BUILD_OUTPUT_DIR, library_name))
    if not os.path.exists(BUILD_DOWNLOADS_DIR):
        os.makedirs(BUILD_DOWNLOADS_DIR)

    model_output_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_OUTPUT_DIR_NAME)
    model_header_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_HEADER_DIR_PATH)
    # clear output dir
    if os.path.exists(model_output_dir):
        sh.rm("-rf", model_output_dir)
    os.makedirs(model_output_dir)
    if os.path.exists(model_header_dir):
        sh.rm("-rf", model_header_dir)

    if os.path.exists(MODEL_CODEGEN_DIR):
        sh.rm("-rf", MODEL_CODEGEN_DIR)
    if os.path.exists(ENGINE_CODEGEN_DIR):
        sh.rm("-rf", ENGINE_CODEGEN_DIR)

    if flags.quantize_stat:
        configs[YAMLKeyword.quantize_stat] = flags.quantize_stat

    if flags.model_data_format:
        model_data_format = flags.model_data_format
    else:
        model_data_format = configs.get(YAMLKeyword.model_data_format,
                                        "file")
    embed_model_data = model_data_format == ModelFormat.code

    if flags.model_graph_format:
        model_graph_format = flags.model_graph_format
    else:
        model_graph_format = configs.get(YAMLKeyword.model_graph_format,
                                         "file")
    embed_graph_def = model_graph_format == ModelFormat.code
    if flags.enable_micro:
        mace_check((not embed_model_data) and (not embed_graph_def),
                   ModuleName.YAML_CONFIG,
                   "You should specify file mode to convert micro model.")
    if embed_graph_def:
        os.makedirs(model_header_dir)
        sh_commands.gen_mace_engine_factory_source(
            configs[YAMLKeyword.models].keys(),
            embed_model_data)
        sh.cp("-f", glob.glob("mace/codegen/engine/*.h"),
              model_header_dir)

    convert.convert(configs, MODEL_CODEGEN_DIR, flags.enable_micro)

    for model_name, model_config in configs[YAMLKeyword.models].items():
        if flags.enable_micro:
            data_type = model_config.get(YAMLKeyword.data_type, "")
            mace_check(data_type == FPDataType.fp32_fp32.value or
                       data_type == FPDataType.bf16_fp32.value,
                       ModuleName.YAML_CONFIG,
                       "You should specify fp32_fp32 or bf16_fp32 data type "
                       "to convert micro model.")
        model_codegen_dir = "%s/%s" % (MODEL_CODEGEN_DIR, model_name)
        encrypt.encrypt(model_name,
                        "%s/model/%s.pb" % (model_codegen_dir, model_name),
                        "%s/model/%s.data" % (model_codegen_dir, model_name),
                        model_codegen_dir,
                        bool(model_config.get(YAMLKeyword.obfuscate, 1)),
                        model_graph_format == "code",
                        model_data_format == "code")

        if model_graph_format == ModelFormat.file:
            sh.mv("-f",
                  '%s/model/%s.pb' % (model_codegen_dir, model_name),
                  model_output_dir)
            sh.mv("-f",
                  '%s/model/%s.data' % (model_codegen_dir, model_name),
                  model_output_dir)
            if flags.enable_micro:
                sh.mv("-f", '%s/model/%s_micro.tar.gz' %
                      (model_codegen_dir, model_name), model_output_dir)
        else:
            if not embed_model_data:
                sh.mv("-f",
                      '%s/model/%s.data' % (model_codegen_dir, model_name),
                      model_output_dir)

            sh.cp("-f", glob.glob("mace/codegen/models/*/code/*.h"),
                  model_header_dir)

        MaceLogger.summary(
            StringFormatter.block("Model %s converted" % model_name))

    if model_graph_format == ModelFormat.code:
        build_model_lib(configs, flags.address_sanitizer, flags.debug_mode)

    print_library_summary(configs)
示例#45
0
文件: converter.py 项目: yyqgood/mace
def convert_func(flags):
    configs = config_parser.parse(flags.config)
    print(configs)
    library_name = configs[YAMLKeyword.library_name]
    if not os.path.exists(BUILD_OUTPUT_DIR):
        os.makedirs(BUILD_OUTPUT_DIR)
    elif os.path.exists(os.path.join(BUILD_OUTPUT_DIR, library_name)):
        sh.rm("-rf", os.path.join(BUILD_OUTPUT_DIR, library_name))
    os.makedirs(os.path.join(BUILD_OUTPUT_DIR, library_name))
    if not os.path.exists(BUILD_DOWNLOADS_DIR):
        os.makedirs(BUILD_DOWNLOADS_DIR)

    model_output_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_OUTPUT_DIR_NAME)
    model_header_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_HEADER_DIR_PATH)
    # clear output dir
    if os.path.exists(model_output_dir):
        sh.rm("-rf", model_output_dir)
    os.makedirs(model_output_dir)
    if os.path.exists(model_header_dir):
        sh.rm("-rf", model_header_dir)

    if os.path.exists(MODEL_CODEGEN_DIR):
        sh.rm("-rf", MODEL_CODEGEN_DIR)
    if os.path.exists(ENGINE_CODEGEN_DIR):
        sh.rm("-rf", ENGINE_CODEGEN_DIR)

    if flags.model_data_format:
        model_data_format = flags.model_data_format
    else:
        model_data_format = configs.get(YAMLKeyword.model_data_format, "file")
    embed_model_data = model_data_format == ModelFormat.code

    if flags.model_graph_format:
        model_graph_format = flags.model_graph_format
    else:
        model_graph_format = configs.get(YAMLKeyword.model_graph_format,
                                         "file")
    if model_graph_format == ModelFormat.code:
        os.makedirs(model_header_dir)
        sh_commands.gen_mace_engine_factory_source(
            configs[YAMLKeyword.models].keys(), embed_model_data)
        sh.cp("-f", glob.glob("mace/codegen/engine/*.h"), model_header_dir)

    convert.convert(configs, MODEL_CODEGEN_DIR)

    for model_name, model_config in configs[YAMLKeyword.models].items():
        model_codegen_dir = "%s/%s" % (MODEL_CODEGEN_DIR, model_name)
        encrypt.encrypt(
            model_name, "%s/model/%s.pb" % (model_codegen_dir, model_name),
            "%s/model/%s.data" % (model_codegen_dir, model_name),
            config_parser.parse_device_type(model_config[YAMLKeyword.runtime]),
            model_codegen_dir, bool(model_config.get(YAMLKeyword.obfuscate,
                                                     1)),
            model_graph_format == "code", model_data_format == "code")

        if model_graph_format == ModelFormat.file:
            sh.mv("-f", '%s/model/%s.pb' % (model_codegen_dir, model_name),
                  model_output_dir)
            sh.mv("-f", '%s/model/%s.data' % (model_codegen_dir, model_name),
                  model_output_dir)
        else:
            if not embed_model_data:
                sh.mv("-f",
                      '%s/model/%s.data' % (model_codegen_dir, model_name),
                      model_output_dir)

            sh.cp("-f", glob.glob("mace/codegen/models/*/code/*.h"),
                  model_header_dir)

        MaceLogger.summary(
            StringFormatter.block("Model %s converted" % model_name))

    if model_graph_format == ModelFormat.code:
        build_model_lib(configs, flags.address_sanitizer, flags.debug_mode)

    print_library_summary(configs)
示例#46
0
文件: vivado.py 项目: kouhanjun/ipbb
def package(ictx, aTag):
    '''Package bitfile with address table and file list

    '''

    ensureVivado(ictx)

    if not exists(ictx.vivadoProjFile):
        cprint('Vivado project does not exist. Creating the project...',
               style='yellow')
        genproject(ictx, True, True, None, False)

    lProjName = ictx.currentproj.name
    lDepFileParser = ictx.depParser
    lTopEntity = lDepFileParser.settings.get('top_entity', kTopEntity)

    lBaseName = ictx.vivadoProdFileBase
    lBitPath = lBaseName + '.bit'
    if not exists(lBitPath):
        cprint('Bitfile does not exist. Starting a build ...', style='yellow')
        bitfile(ictx)

    try:
        lVivadoCfg = lDepFileParser.settings['vivado']
        lActiveMemCfgs = [
            k for k, o in _memCfgKinds.items() if o in lVivadoCfg
        ]
        lMemCfgFiles = [lBaseName + '.' + k for k in lActiveMemCfgs]

        if any([not exists(f) for f in lMemCfgFiles]):
            memcfg(ictx)
    except KeyError as e:
        lMemCfgFiles = []

    lDebugProbesPath = lBaseName + '.ltx'
    if not os.path.exists(lDebugProbesPath):
        lDebugProbesPath = None

    lPkgPath = 'package'
    lPkgSrcPath = join(lPkgPath, 'src')

    # Cleanup first
    sh.rm('-rf', lPkgPath, _out=sys.stdout)

    # Create the folders
    try:
        os.makedirs(join(lPkgSrcPath, 'addrtab'))
    except OSError:
        pass

    # -------------------------------------------------------------------------
    # Generate a json signature file

    console.log("Generating summary files", style='blue')

    # -------------------------------------------------------------------------

    lHash = hash(ictx, output=join(lPkgSrcPath, 'hashes.txt'), verbose=True)
    # -------------------------------------------------------------------------

    # -------------------------------------------------------------------------
    lSummary = dict(ictx.currentproj.settings)
    lSummary.update({
        'build host': socket.gethostname().replace('.', '_'),
        'time': time.strftime("%a, %d %b %Y %H:%M:%S +0000"),
        'md5': lHash.hexdigest(),
    })

    with open(join(lPkgSrcPath, 'summary.txt'), 'w') as lSummaryFile:
        yaml.safe_dump(lSummary,
                       lSummaryFile,
                       indent=2,
                       default_flow_style=False)
    # -------------------------------------------------------------------------

    # -------------------------------------------------------------------------
    # Copy bitfile, memcfg, and address table into the packaging area
    console.log("Collecting bitfile", style='blue')
    sh.cp('-av', lBitPath, lPkgSrcPath, _out=sys.stdout)

    for f in lMemCfgFiles:
        console.log("Collecting memcfg {}".format(f), style='blue')
        sh.cp('-av', f, lPkgSrcPath, _out=sys.stdout)

    if lDebugProbesPath:
        console.log("Collecting debug-probes file", style='blue')
        sh.cp('-av', lDebugProbesPath, lPkgSrcPath, _out=sys.stdout)

    console.log("Collecting address tables", style='blue')
    for addrtab in ictx.depParser.commands['addrtab']:
        sh.cp('-avL',
              addrtab.filepath,
              join(lPkgSrcPath, 'addrtab'),
              _out=sys.stdout)
    # -------------------------------------------------------------------------

    # -------------------------------------------------------------------------
    # Tar everything up
    console.log("Creating tarball", style='blue')

    lTgzBaseName = '_'.join(
        [ictx.currentproj.settings['name']] +
        ([aTag] if aTag is not None else []) +
        [socket.gethostname().replace('.', '_'),
         time.strftime('%y%m%d_%H%M')])
    lTgzPath = join(lPkgPath, lTgzBaseName + '.tgz')

    # Zip everything
    sh.tar(
        'cvfz',
        abspath(lTgzPath),
        '-C',
        lPkgPath,
        '--transform',
        's|^src|' + lTgzBaseName + '|',
        'src',
        _out=sys.stdout,
    )

    console.log(
        f"Package {lTgzPath} successfully created.",
        style='green',
    )
示例#47
0
def gen_input(model_output_dir,
              input_nodes,
              input_shapes,
              input_files=None,
              input_ranges=None,
              input_data_types=None,
              input_data_map=None,
              input_file_name="model_input"):
    for input_name in input_nodes:
        formatted_name = common.formatted_file_name(
            input_file_name, input_name)
        if os.path.exists("%s/%s" % (model_output_dir, formatted_name)):
            sh.rm("%s/%s" % (model_output_dir, formatted_name))
    input_file_list = []
    if isinstance(input_files, list):
        input_file_list.extend(input_files)
    else:
        input_file_list.append(input_files)
    if input_data_map:
        for i in range(len(input_nodes)):
            dst_input_file = model_output_dir + '/' + \
                             common.formatted_file_name(input_file_name,
                                                        input_nodes[i])
            input_name = input_nodes[i]
            common.mace_check(input_name in input_data_map,
                              common.ModuleName.RUN,
                              "The preprocessor API in PrecisionValidator"
                              " script should return all inputs of model")
            if input_data_types[i] == 'float32' or \
                    input_data_types[i] == 'float16' or \
                    input_data_types[i] == 'bfloat16':
                input_data = np.array(input_data_map[input_name],
                                      dtype=np.float32)
            elif input_data_types[i] == 'int32':
                input_data = np.array(input_data_map[input_name],
                                      dtype=np.int32)
            else:
                common.mace_check(
                    False,
                    common.ModuleName.RUN,
                    'Do not support input data type %s' % input_data_types[i])
            common.mace_check(
                list(map(int, common.split_shape(input_shapes[i])))
                == list(input_data.shape),
                common.ModuleName.RUN,
                "The shape return from preprocessor API of"
                " PrecisionValidator script is not same with"
                " model deployment file. %s vs %s"
                % (str(input_shapes[i]), str(input_data.shape)))
            input_data.tofile(dst_input_file)
    elif len(input_file_list) != 0:
        input_name_list = []
        if isinstance(input_nodes, list):
            input_name_list.extend(input_nodes)
        else:
            input_name_list.append(input_nodes)
        common.mace_check(len(input_file_list) == len(input_name_list),
                          common.ModuleName.RUN,
                          'If input_files set, the input files should '
                          'match the input names.')
        for i in range(len(input_file_list)):
            if input_file_list[i] is not None:
                dst_input_file = model_output_dir + '/' + \
                                 common.formatted_file_name(input_file_name,
                                                            input_name_list[i])
                if input_file_list[i].startswith("http://") or \
                        input_file_list[i].startswith("https://"):
                    six.moves.urllib.request.urlretrieve(input_file_list[i],
                                                         dst_input_file)
                else:
                    sh.cp("-f", input_file_list[i], dst_input_file)
    else:
        # generate random input files
        input_nodes_str = ",".join(input_nodes)
        input_shapes_str = ":".join(input_shapes)
        input_ranges_str = ":".join(input_ranges)
        input_data_types_str = ",".join(input_data_types)
        generate_input_data("%s/%s" % (model_output_dir, input_file_name),
                            input_nodes_str,
                            input_shapes_str,
                            input_ranges_str,
                            input_data_types_str)
示例#48
0
    def run(self, binaries_path):
        self.log("copying binaries...")
        dest_lib_dir = platform_dir(self._basedir, "lib")

        if IS_MAC:
            cp(glob(os.path.join(binaries_path, "Qt*")), dest_lib_dir)
            cp(glob(os.path.join(binaries_path, "*.dylib")), dest_lib_dir)
            cp(glob(os.path.join(binaries_path, "Python")), dest_lib_dir)
            resources_dir = os.path.join(self._basedir, "Bitmask",
                                         "Bitmask.app", "Contents",
                                         "Resources")
            cp(glob(os.path.join(binaries_path, "openvpn.leap*")),
               resources_dir)

            mkdir("-p", os.path.join(resources_dir, "openvpn"))
            cp("-r", glob(os.path.join(binaries_path, "openvpn.files", "*")),
               os.path.join(resources_dir, "openvpn"))

            cp(os.path.join(binaries_path, "cocoasudo"), resources_dir)

            cp("-r", os.path.join(binaries_path, "qt_menu.nib"), resources_dir)
            cp("-r", os.path.join(binaries_path, "tuntap-installer.app"),
               resources_dir)
            cp(os.path.join(binaries_path, "Bitmask"),
               platform_dir(self._basedir))
        elif IS_WIN:
            root = _convert_path_for_win(os.path.join(self._basedir,
                                                      "Bitmask"))
            for i in glob(os.path.join(binaries_path, "*.dll")):
                cp(_convert_path_for_win(i), root)
            import win32com
            win32comext_path = os.path.split(win32com.__file__)[0] + "ext"
            shell_path = os.path.join(win32comext_path, "shell")
            cp("-r", _convert_path_for_win(shell_path),
               _convert_path_for_win(os.path.join(dest_lib_dir, "win32com")))
            cp(
                _convert_path_for_win(
                    os.path.join(binaries_path, "bitmask.exe")), root)
            cp(
                _convert_path_for_win(
                    os.path.join(binaries_path,
                                 "Microsoft.VC90.CRT.manifest")), root)
            cp(
                _convert_path_for_win(
                    os.path.join(binaries_path, "openvpn_leap.exe")),
                _convert_path_for_win(os.path.join(root, "apps", "eip")))
            cp(
                _convert_path_for_win(
                    os.path.join(binaries_path, "openvpn_leap.exe.manifest")),
                _convert_path_for_win(os.path.join(root, "apps", "eip")))
            cp(
                "-r",
                _convert_path_for_win(os.path.join(binaries_path,
                                                   "tap_driver")),
                _convert_path_for_win(os.path.join(root, "apps", "eip")))
        else:
            cp(glob(os.path.join(binaries_path, "*.so*")), dest_lib_dir)
            cp(glob(os.path.join(binaries_path, "libQt*.non-ubuntu")),
               dest_lib_dir)

            eip_dir = platform_dir(self._basedir, "apps", "eip")
            # cp(os.path.join(binaries_path, "openvpn"), eip_dir)

            cp("-r", glob(os.path.join(binaries_path, "openvpn.files", "*")),
               os.path.join(eip_dir, "files"))
            cp(os.path.join(binaries_path, "bitmask"),
               platform_dir(self._basedir))

        mail_dir = platform_dir(self._basedir, "apps", "mail")
        cp(_convert_path_for_win(os.path.join(binaries_path, "gpg")),
           _convert_path_for_win(mail_dir))
        self.log("done.")
示例#49
0
def cp_dir(src, desc):
    sh.cp("-r", src, desc)
示例#50
0
文件: ipbus.py 项目: jhegeman/ipbb
def gendecoders(ictx, aCheckUpToDate, aForce):

    lDecodersDir = 'decoders'

    with DirSentry(ictx.currentproj.path):
        sh.rm('-rf', lDecodersDir)
        # Gather address tables
        addrtab(ictx, aDest=lDecodersDir)

    lGenScript = 'gen_ipbus_addr_decode'

    if not which(lGenScript):
        raise click.ClickException("'{0}' script not found.".format(lGenScript))

    cprint(f"Using {which(lGenScript)}", style='green')

    # ------------------------------------------------------------------------------

    lUpdatedDecoders = []
    lGen = sh.Command(which(lGenScript))
    lErrors = {}
    with DirSentry(join(ictx.currentproj.path, lDecodersDir)):
        for lAddr in ictx.depParser.commands['addrtab']:
            cprint(f"Processing [blue]{basename(lAddr.filepath)}[/blue]")
            # Interested in top-level address tables only
            if not lAddr.toplevel:
                cprint(
                    f"{lAddr.filepath} is not a top-level address table. Decoder will not be generated.",
                    style='cyan',
                )
                continue

            # Generate a new decoder file
            try:
                lGen(basename(lAddr.filepath), _out=sys.stdout, _err=sys.stderr, _tee=True)
            except Exception as lExc:
                cprint(f"Failed to generate decoder for {basename(lAddr.filepath)}", style='red')
                lErrors[lAddr] = lExc
                continue

            lDecoder = f'ipbus_decode_{splitext(basename(lAddr.filepath))[0]}.vhd'
            lTarget = ictx.pathMaker.getPath(
                lAddr.package, lAddr.component, 'src', lDecoder
            )

            diff = sh.colordiff if which('colordiff') else sh.diff

            # Has anything changed?
            try:
                diff('-u', '-I', '^-- START automatically', lTarget, lDecoder)
            except sh.ErrorReturnCode as e:
                lUpdatedDecoders.append((lDecoder, lTarget))

        if lErrors:
            cprint(
                "\nERROR: decoder generation failed",
                style='red',
            )
            for a in sorted(lErrors):
                cprint(' - ' + basename(a.filepath))
                cprint('   ' + lErrors[a].stdout.decode(DEFAULT_ENCODING, "replace"))
            raise SystemExit(-1)



        # ------------------------------------------------------------------------------
        # If no difference between old and newly generated decoders, quit here.
        if not lUpdatedDecoders:
            console.log(
                f"{ictx.currentproj.name}: All ipbus decoders are up-to-date.",
                style='green',
            )
            return

        # ------------------------------------------------------------------------------
        cprint(
            'The following decoders have changed and must be updated:\n'
            + '\n'.join([f" * [blue]{d}[/blue]" for d in lUpdatedDecoders])
            + '\n'
        )
        if aCheckUpToDate:
            raise SystemExit(-1)

        if not aForce and not Confirm.ask("Do you want to continue?"):
            return

        for lDecoder, lTarget in lUpdatedDecoders:
            cprint(sh.cp('-av', lDecoder, lTarget))

        console.log(
            f"{ictx.currentproj.name}: {len(lUpdatedDecoders)} decoders updated.",
            style='green',
        )
示例#51
0
    def build_arch(self, arch):
        with current_directory(self.get_build_dir(arch.arch)):
            env = arch.get_env()

            flags = ['--disable-everything']
            cflags = []
            ldflags = []

            if 'openssl' in self.ctx.recipe_build_order:
                flags += [
                    '--enable-openssl',
                    '--enable-nonfree',
                    '--enable-protocol=https,tls_openssl',
                ]
                build_dir = Recipe.get_recipe(
                    'openssl', self.ctx).get_build_dir(arch.arch)
                cflags += ['-I' + build_dir + '/include/']
                ldflags += ['-L' + build_dir]

            if 'ffpyplayer_codecs' in self.ctx.recipe_build_order:
                # libx264
                flags += ['--enable-libx264']
                build_dir = Recipe.get_recipe(
                    'libx264', self.ctx).get_build_dir(arch.arch)
                cflags += ['-I' + build_dir + '/include/']
                ldflags += ['-lx264', '-L' + build_dir + '/lib/']

                # libshine
                flags += ['--enable-libshine']
                build_dir = Recipe.get_recipe(
                    'libshine', self.ctx).get_build_dir(arch.arch)
                cflags += ['-I' + build_dir + '/include/']
                ldflags += ['-lshine', '-L' + build_dir + '/lib/']

                # Enable all codecs:
                flags += [
                    '--enable-parsers',
                    '--enable-decoders',
                    '--enable-encoders',
                    '--enable-muxers',
                    '--enable-demuxers',
                ]
            else:
                # Enable codecs only for .mp4:
                flags += [
                    '--enable-parser=aac,ac3,h261,h264,mpegaudio,mpeg4video,mpegvideo,vc1',
                    '--enable-decoder=aac,h264,mpeg4,mpegvideo',
                    '--enable-muxer=h264,mov,mp4,mpeg2video',
                    '--enable-demuxer=aac,h264,m4v,mov,mpegvideo,vc1',
                ]

            # needed to prevent _ffmpeg.so: version node not found for symbol av_init_packet@LIBAVFORMAT_52
            # /usr/bin/ld: failed to set dynamic section sizes: Bad value
            flags += [
                '--disable-symver',
            ]

            # disable binaries / doc
            flags += [
                '--disable-ffmpeg',
                '--disable-ffplay',
                '--disable-ffprobe',
                '--disable-ffserver',
                '--disable-doc',
            ]

            # other flags:
            flags += [
                '--enable-filter=aresample,resample,crop,adelay,volume',
                '--enable-protocol=file,http',
                '--enable-small',
                '--enable-hwaccels',
                '--enable-gpl',
                '--enable-pic',
                '--disable-static',
                '--enable-shared',
            ]

            # android:
            flags += [
                '--target-os=android',
                '--cross-prefix=arm-linux-androideabi-',
                '--arch=arm',
                '--sysroot=' + self.ctx.ndk_platform,
                '--enable-neon',
                '--prefix={}'.format(realpath('.')),
            ]
            cflags += [
                '-mfpu=vfpv3-d16',
                '-mfloat-abi=softfp',
                '-fPIC',
            ]

            env['CFLAGS'] += ' ' + ' '.join(cflags)
            env['LDFLAGS'] += ' ' + ' '.join(ldflags)

            configure = sh.Command('./configure')
            shprint(configure, *flags, _env=env)
            shprint(sh.make, '-j4', _env=env)
            shprint(sh.make, 'install', _env=env)
            # copy libs:
            sh.cp('-a', sh.glob('./lib/lib*.so'),
                  self.ctx.get_libs_dir(arch.arch))
示例#52
0
    def build_arch(self, arch):
        with current_directory(self.get_build_dir(arch.arch)):
            env = arch.get_env()

            flags = ['--disable-everything']
            cflags = []
            ldflags = []

            if 'openssl' in self.ctx.recipe_build_order:
                flags += [
                    '--enable-openssl',
                    '--enable-nonfree',
                    '--enable-protocol=https,tls_openssl',
                ]
                build_dir = Recipe.get_recipe(
                    'openssl', self.ctx).get_build_dir(arch.arch)
                cflags += [
                    '-I' + build_dir + '/include/',
                    '-DOPENSSL_API_COMPAT=0x10002000L'
                ]
                ldflags += ['-L' + build_dir]

            if 'ffpyplayer_codecs' in self.ctx.recipe_build_order:
                # Enable GPL
                flags += ['--enable-gpl']

                # libx264
                flags += ['--enable-libx264']
                build_dir = Recipe.get_recipe(
                    'libx264', self.ctx).get_build_dir(arch.arch)
                cflags += ['-I' + build_dir + '/include/']
                ldflags += ['-lx264', '-L' + build_dir + '/lib/']

                # libshine
                flags += ['--enable-libshine']
                build_dir = Recipe.get_recipe(
                    'libshine', self.ctx).get_build_dir(arch.arch)
                cflags += ['-I' + build_dir + '/include/']
                ldflags += ['-lshine', '-L' + build_dir + '/lib/']
                ldflags += ['-lm']

                # Enable all codecs:
                flags += [
                    '--enable-parsers',
                    '--enable-decoders',
                    '--enable-encoders',
                    '--enable-muxers',
                    '--enable-demuxers',
                ]
            else:
                # Enable codecs only for .mp4:
                flags += [
                    '--enable-parser=aac,ac3,h261,h264,mpegaudio,mpeg4video,mpegvideo,vc1',
                    '--enable-decoder=aac,h264,mpeg4,mpegvideo',
                    '--enable-muxer=h264,mov,mp4,mpeg2video',
                    '--enable-demuxer=aac,h264,m4v,mov,mpegvideo,vc1',
                ]

            # needed to prevent _ffmpeg.so: version node not found for symbol av_init_packet@LIBAVFORMAT_52
            # /usr/bin/ld: failed to set dynamic section sizes: Bad value
            flags += [
                '--disable-symver',
            ]

            # disable binaries / doc
            flags += [
                '--disable-programs',
                '--disable-doc',
            ]

            # other flags:
            flags += [
                '--enable-filter=aresample,resample,crop,adelay,volume,scale',
                '--enable-protocol=file,http,hls',
                '--enable-small',
                '--enable-hwaccels',
                '--enable-pic',
                '--disable-static',
                '--disable-debug',
                '--enable-shared',
            ]

            if 'arm64' in arch.arch:
                cross_prefix = 'aarch64-linux-android-'
                arch_flag = 'aarch64'
            elif 'x86' in arch.arch:
                cross_prefix = 'i686-linux-android-'
                arch_flag = 'x86'
                flags += ['--disable-asm']
            else:
                cross_prefix = 'arm-linux-androideabi-'
                arch_flag = 'arm'

            # android:
            flags += [
                '--target-os=android',
                '--enable-cross-compile',
                '--cross-prefix={}-'.format(arch.target),
                '--arch={}'.format(arch_flag),
                '--strip={}strip'.format(cross_prefix),
                '--sysroot={}'.format(
                    join(self.ctx.ndk_dir, 'toolchains', 'llvm', 'prebuilt',
                         'linux-x86_64', 'sysroot')),
                '--enable-neon',
                '--prefix={}'.format(realpath('.')),
            ]

            if arch_flag == 'arm':
                cflags += [
                    '-mfpu=vfpv3-d16',
                    '-mfloat-abi=softfp',
                    '-fPIC',
                ]

            env['CFLAGS'] += ' ' + ' '.join(cflags)
            env['LDFLAGS'] += ' ' + ' '.join(ldflags)

            configure = sh.Command('./configure')
            shprint(configure, *flags, _env=env)
            shprint(sh.make, '-j4', _env=env)
            shprint(sh.make, 'install', _env=env)
            # copy libs:
            sh.cp('-a', sh.glob('./lib/lib*.so'),
                  self.ctx.get_libs_dir(arch.arch))
示例#53
0
    def run(self, binary_path, tuf_repo):
        self.log("downloading thunderbird extension...")
        ext_path = platform_dir(self._basedir, "apps",
                                "bitmask-thunderbird-latest.xpi")
        urllib.urlretrieve(
            "https://downloads.leap.se/thunderbird_extension/"
            "bitmask-thunderbird-latest.xpi", ext_path)
        self.log("done")
        self.log("copying misc files...")
        apps_dir = _convert_path_for_win(platform_dir(self._basedir, "apps"))
        cp(
            _convert_path_for_win(
                os.path.join(self._basedir, "bitmask_launcher", "src",
                             "launcher.py")), apps_dir)
        cp(
            "-r",
            _convert_path_for_win(
                os.path.join(self._basedir, "bitmask_client", "src", "leap")),
            apps_dir)
        lib_dir = _convert_path_for_win(platform_dir(self._basedir, "lib"))
        cp(
            _convert_path_for_win(
                os.path.join(self._basedir, "leap_pycommon", "src", "leap",
                             "common", "cacert.pem")),
            _convert_path_for_win(os.path.join(lib_dir, "leap", "common")))
        cp(
            _convert_path_for_win(
                glob(
                    os.path.join(self._basedir, "bitmask_client", "build",
                                 "lib*", "leap", "bitmask",
                                 "_version.py"))[0]),
            os.path.join(apps_dir, "leap", "bitmask"))

        cp(
            _convert_path_for_win(
                os.path.join(self._basedir, "bitmask_client", "relnotes.txt")),
            _convert_path_for_win(os.path.join(self._basedir, "Bitmask")))

        launcher_path = os.path.join(self._basedir, "Bitmask", "launcher.conf")

        if tuf_repo == 'stable':
            tuf_config = self.TUF_CONFIG.format(self.TUF_STABLE)
        elif tuf_repo == 'unstable':
            tuf_config = self.TUF_CONFIG.format(self.TUF_UNSTABLE)
        else:
            # a different value than stable/unstable is interpreted as
            # "don't use tuf", so we don't use a launcher file
            tuf_config = None

        if tuf_config is not None:
            with open(launcher_path, "w") as f:
                f.write(tuf_config)

        metadata = os.path.join(self._basedir, "Bitmask", "repo", "metadata")
        mkdir("-p", os.path.join(metadata, "current"))
        mkdir("-p", os.path.join(metadata, "previous"))
        cp(os.path.join(binary_path, "root.json"),
           os.path.join(metadata, "current"))

        self.log("done")
示例#54
0
文件: converter.py 项目: overut/mace
def convert_model(configs, cl_mem_type):
    # Remove previous output dirs
    library_name = configs[YAMLKeyword.library_name]
    if not os.path.exists(BUILD_OUTPUT_DIR):
        os.makedirs(BUILD_OUTPUT_DIR)
    elif os.path.exists(os.path.join(BUILD_OUTPUT_DIR, library_name)):
        sh.rm("-rf", os.path.join(BUILD_OUTPUT_DIR, library_name))
    os.makedirs(os.path.join(BUILD_OUTPUT_DIR, library_name))
    if not os.path.exists(BUILD_DOWNLOADS_DIR):
        os.makedirs(BUILD_DOWNLOADS_DIR)

    model_output_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_OUTPUT_DIR_NAME)
    model_header_dir = \
        '%s/%s/%s' % (BUILD_OUTPUT_DIR, library_name, MODEL_HEADER_DIR_PATH)
    # clear output dir
    if os.path.exists(model_output_dir):
        sh.rm("-rf", model_output_dir)
    os.makedirs(model_output_dir)
    if os.path.exists(model_header_dir):
        sh.rm("-rf", model_header_dir)

    embed_model_data = \
        configs[YAMLKeyword.model_data_format] == ModelFormat.code

    if os.path.exists(MODEL_CODEGEN_DIR):
        sh.rm("-rf", MODEL_CODEGEN_DIR)
    if os.path.exists(ENGINE_CODEGEN_DIR):
        sh.rm("-rf", ENGINE_CODEGEN_DIR)

    if configs[YAMLKeyword.model_graph_format] == ModelFormat.code:
        os.makedirs(model_header_dir)
        sh_commands.gen_mace_engine_factory_source(
            configs[YAMLKeyword.models].keys(),
            embed_model_data)
        sh.cp("-f", glob.glob("mace/codegen/engine/*.h"),
              model_header_dir)

    for model_name in configs[YAMLKeyword.models]:
        MaceLogger.header(
            StringFormatter.block("Convert %s model" % model_name))
        model_config = configs[YAMLKeyword.models][model_name]
        runtime = model_config[YAMLKeyword.runtime]
        if cl_mem_type:
            model_config[YAMLKeyword.cl_mem_type] = cl_mem_type
        else:
            model_config[YAMLKeyword.cl_mem_type] = "image"

        model_file_path, weight_file_path, quantize_range_file_path = \
            get_model_files(
                model_config[YAMLKeyword.model_file_path],
                model_config[YAMLKeyword.model_sha256_checksum],
                BUILD_DOWNLOADS_DIR,
                model_config[YAMLKeyword.weight_file_path],
                model_config[YAMLKeyword.weight_sha256_checksum],
                model_config.get(YAMLKeyword.quantize_range_file, ""))

        data_type = model_config[YAMLKeyword.data_type]
        # TODO(liuqi): support multiple subgraphs
        subgraphs = model_config[YAMLKeyword.subgraphs]

        model_codegen_dir = "%s/%s" % (MODEL_CODEGEN_DIR, model_name)
        sh_commands.gen_model_code(
            model_codegen_dir,
            model_config[YAMLKeyword.platform],
            model_file_path,
            weight_file_path,
            model_config[YAMLKeyword.model_sha256_checksum],
            model_config[YAMLKeyword.weight_sha256_checksum],
            ",".join(subgraphs[0][YAMLKeyword.input_tensors]),
            ",".join(subgraphs[0][YAMLKeyword.input_data_formats]),
            ",".join(subgraphs[0][YAMLKeyword.output_tensors]),
            ",".join(subgraphs[0][YAMLKeyword.output_data_formats]),
            ",".join(subgraphs[0][YAMLKeyword.check_tensors]),
            runtime,
            model_name,
            ":".join(subgraphs[0][YAMLKeyword.input_shapes]),
            ":".join(subgraphs[0][YAMLKeyword.input_ranges]),
            ":".join(subgraphs[0][YAMLKeyword.output_shapes]),
            ":".join(subgraphs[0][YAMLKeyword.check_shapes]),
            model_config[YAMLKeyword.nnlib_graph_mode],
            embed_model_data,
            model_config[YAMLKeyword.winograd],
            model_config[YAMLKeyword.quantize],
            quantize_range_file_path,
            model_config[YAMLKeyword.change_concat_ranges],
            model_config[YAMLKeyword.obfuscate],
            configs[YAMLKeyword.model_graph_format],
            data_type,
            model_config[YAMLKeyword.cl_mem_type],
            ",".join(model_config.get(YAMLKeyword.graph_optimize_options, [])))

        if configs[YAMLKeyword.model_graph_format] == ModelFormat.file:
            sh.mv("-f",
                  '%s/%s.pb' % (model_codegen_dir, model_name),
                  model_output_dir)
            sh.mv("-f",
                  '%s/%s.data' % (model_codegen_dir, model_name),
                  model_output_dir)
        else:
            if not embed_model_data:
                sh.mv("-f",
                      '%s/%s.data' % (model_codegen_dir, model_name),
                      model_output_dir)
            sh.cp("-f", glob.glob("mace/codegen/models/*/*.h"),
                  model_header_dir)

        MaceLogger.summary(
            StringFormatter.block("Model %s converted" % model_name))
示例#55
0
from sh import cp, rm
import sh
import os

cp('../wow-palcu.cpp', '.')
os.system('g++ wow-palcu.cpp')

filename = 'grader_test'
for i in range(1, 11):
    print 'Testul ', i
    cp(filename + str(i) + '.in', 'wow.in')
    os.system('./a.out')
    cp('wow.out', filename + str(i) + '.ok')

for extension in ['in', 'out']:
    rm('wow.' + extension)
rm('wow-palcu.cpp')
rm('a.out')
示例#56
0
    def handle(self, *args, **options):
        session = requests.session()
        session.verify = False  # internal network, self-signed cert
        session.headers['User-Agent'] = 'python-requests/ConfigMaster'

        # global lock - remove after Repository integration is done
        global_lock = locking.FileLock("/run/configmaster-esxi-backup")
        global_lock.acquire()

        for name, uri in settings.ESXI_BACKUP:
            self.stdout.write("Acquiring lock for %s..." % name)
            run_lock = locking.FileLock("/run/esxi-%s" % name)
            tempdir = tempfile.mkdtemp('esxi')
            self.stdout.write("Lock acquired, running... (%s)" % tempdir)

            try:
                run_lock.acquire(non_blocking=True)
            except IOError:
                self.stderr.write(
                    "A run is already in progress")
                return

            try:
                resp = session.get(uri, timeout=5)
            except IOError as e:
                self.stderr.write(
                    "Config backup failure for %s: %s" % (name, str(e)))
                continue

            if resp.status_code != 200:
                self.stderr.write(
                    "Config backup failed for %s: %r" % (name, resp.content))
                continue

            stage_1 = os.path.join(tempdir, 'stage1.tgz')

            with open(stage_1, 'wb') as f:
                f.write(resp.content)

            os.chdir(tempdir)

            try:
                stage1_ret = sh.aunpack('stage1.tgz')
            except sh.ErrorReturnCode:
                self.stderr.write(
                    "Config backup failed for %s: failed to unpack stage1"
                    % name)
                continue

            if not "\nstate.tgz" in stage1_ret.stdout or not os.path.exists(
                    'stage1'):
                self.stderr.write(
                    "Config backup failed for %s: invalid stage1" % name)
                continue

            os.chdir('stage1')

            try:
                stage2_ret = sh.aunpack('state.tgz')
            except sh.ErrorReturnCode:
                self.stderr.write(
                    "Config backup failed for %s: failed to unpack state.tgz"
                    % name)
                continue

            if not os.path.exists('local.tgz'):
                self.stderr.write(
                    "Config backup failed for %s: invalid state.tgz, "
                    "local.tgz missing" % name)
                continue

            try:
                stage3_ret = sh.aunpack('local.tgz')
            except sh.ErrorReturnCode:
                self.stderr.write(
                    "Config backup failed for %s: "
                    "failed to unpack local.tgz" % name)
                continue

            if not os.path.exists('etc'):
                self.stderr.write(
                    "Config backup failed for %s: "
                    "invalid local.tgz, etc/ missing" % name)
                continue

            for path in settings.ESXI_FILE_BLACKLIST:
                if os.path.exists(path):
                    os.unlink(path)

            repo_dir = os.path.join(settings.ESXI_BACKUP_REPO, name)

            if not os.path.exists(repo_dir):
                os.mkdir(repo_dir)

            sh.sh('-c', '/usr/bin/find -type d -exec chmod 750 {} \;'.split())
            sh.sh('-c', '/usr/bin/find -type d -exec chmod 640 {} \;'.split())

            for root, dirs, files in os.walk(repo_dir):
                for d in dirs:
                    os.chmod(os.path.join(root,d), 0o750)
                for f in files:
                    os.chmod(os.path.join(root,f), 0o640)

            sh.cp('-r', '.', repo_dir+'/')
            os.chdir(repo_dir)

            try:
                sh.git.add('-u', '.')
                sh.git.add('.')
                if NetworkDeviceConfigBackupHandler._git_commit(
                                "ESXi config change (%s)" % name):
                    sh.git.push()
            except sh.ErrorReturnCode as e:
                self.stderr.write(
                    "Git commit or push failed: " + str(e))
                continue

            sh.cp(
                os.path.join(tempdir, 'stage1.tgz'),
                os.path.join(settings.ESXI_BACKUP_REPO_RAW, '%s.tgz' % name)
            )

            os.chdir(settings.ESXI_BACKUP_REPO_RAW)

            try:
                sh.git.add('-u', '.')
                sh.git.add('.')
                if NetworkDeviceConfigBackupHandler._git_commit(
                                "ESXi raw config change (%s)" % name):
                    sh.git.push()
            except sh.ErrorReturnCode as e:
                self.stderr.write(
                    "Git commit or push failed: " + str(e))
                continue

            shutil.rmtree(tempdir)
            run_lock.release()

        for path in (settings.ESXI_BACKUP_REPO_RAW, settings.ESXI_BACKUP_REPO):
            os.chdir(path)
            sh.git.push()

        global_lock.release()
        self.stdout.write("Run completed.")
示例#57
0
    def cb_unlocked(self):
        logging.debug("Storage unlocked")
        self.spinner_storage_unlock.set_visible(False)
        self.entry_storage_passphrase.set_visible(False)
        self.button_storage_unlock.set_visible(False)
        self.infobar_persistence.set_visible(False)
        self.image_storage_state.set_from_icon_name('tails-unlocked',
                                                    Gtk.IconSize.BUTTON)
        self.image_storage_state.set_visible(True)
        self.box_storage_unlocked.set_visible(True)
        self.button_start.set_sensitive(True)

        # Copy all settings from the "persistent settings directory". This is
        # a workaround for an issue that caused the "Settings were loaded"-
        # notification to be displayed even if no settings were actually
        # loaded, including on the first boot after activating persistence (
        # which is confusing for users). FTR, the explanation for this is:
        #
        # When persistence is activated, live-persist copies the mount
        # destination directory (/var/lib/gdm3/settings) to the source
        # directory (/live/persistence/TailsData_unlocked/greeter-settings),
        # if the source directory doesn't exist yet.
        # In addition with the fact that we immediately store the settings
        # on the file system as soon as the user changes them, that means
        # that when we look at the destination directory after activating
        # persistence, and see that there are settings stored there, it's
        # unclear whether those were loaded from the persistence or simply
        # set by the user in the same Welcome Screen session before unlocking
        # persistence.
        # One workaround we tried was to check if the values of any of the
        # settings on the filesystem are actually different than the values
        # in memory, but that doesn't work well for the admin password, which
        # is stored hashed on the filesystem, but in cleartext in memory.
        #
        # So the current workaround is to have this separate "persistent
        # settings directory" instead of simply persisting the "normal"
        # settings directory, copying all settings from the former
        # to the latter after persistence was activated, and copying all
        # settings back to persistent directory when the Welcome Screen
        # is left. That means that even if the user already set settings
        # in the Welcome Screen before unlocking persistence, those will
        # be stored in the "normal" settings directory, so the "persistent"
        # settings directory will always be empty if no settings were
        # persisted yet.
        #
        # This workaround will no longer be necessary once #11529 is done,
        # because with #11529, the source directory
        # (/live/persistence/TailsData_unlocked/greeter-settings), will
        # be created immediately, so live-persist will never copy the
        # destination directory to the source directory.
        #
        # Both the commit which introduced the persistent settings directory
        # (e5653981228b375c28bf4d1ace9be3367e080900) and the commit which
        # extended its usage and introduced this lengthy comment, can be
        # reverted once #11529 is done.
        for setting in glob.glob(
                os.path.join(persistent_settings_dir, 'tails.*')):
            sh.cp("-a", setting, settings_dir)

        if not os.listdir(settings_dir):
            self.apply_settings_cb()
        else:
            self.load_settings_cb()
示例#58
0
文件: common.py 项目: gpetruc/ipbb
def gendecoders(ctx):

    lDecodersDir = 'decoders'
    # Extract context
    env = ctx.obj

    with DirSentry(env.projectPath) as lProjDir:
        sh.rm('-rf', lDecodersDir)
        # Gather address tables
        ctx.invoke(addrtab, output=lDecodersDir)

    # ------------------------------------------------------------------------------
    # TODO: Clean me up
    lGenScript = 'gen_ipbus_addr_decode'
    if not which(lGenScript):
        os.environ['PATH'] = '/opt/cactus/bin/uhal/tools:' + os.environ['PATH']
        if not which(lGenScript):
            raise click.ClickException(
                "'{0}' script not found.".format(lGenScript))

    if '/opt/cactus/lib' not in os.environ['LD_LIBRARY_PATH'].split(':'):
        os.environ['LD_LIBRARY_PATH'] = '/opt/cactus/lib:' + \
            os.environ['LD_LIBRARY_PATH']


    lGenScript = 'gen_ipbus_addr_decode'
    lGenToolPath = '/opt/cactus/bin/uhal/tools'
    lGenToolLibPath = '/opt/cactus/lib'

    if not which(lGenScript):

        lPaths = os.environ['PATH'].split() if os.environ['PATH'] else []
        if lGenToolPath not in lPaths:
            lPaths[0:0] = [lGenToolPath]

        lLibPaths = os.environ['LD_LIBRARY_PATH'].split() if os.environ['LD_LIBRARY_PATH'] else []
        if lGenToolLibPath not in lLibPaths:
            lLibPaths[0:0] = [lGenToolLibPath]

        if not which(lGenScript):
            raise click.ClickException(
                "'{0}' script not found.".format(lGenScript))

    # ------------------------------------------------------------------------------

    lUpdatedDecoders = []
    lGen = sh.Command(lGenScript)
    with DirSentry(join(env.projectPath, lDecodersDir)) as lProjDir:
        for lAddr in env.depParser.CommandList['addrtab']:
            echo("Processing "+style(basename(lAddr.FilePath), fg='blue'))
            # Interested in top-level address tables only
            if not lAddr.TopLevel:
                continue

            # Generate a new decoder file
            lGen(basename(lAddr.FilePath), _out=sys.stdout, _err_to_out=True)
            lDecoder = 'ipbus_decode_{0}.vhd'.format(
                splitext(basename(lAddr.FilePath))[0])
            lTarget = env.pathMaker.getPath(
                lAddr.Package, lAddr.Component, 'src', lDecoder)


            diff = sh.colordiff if which('colordiff') else sh.diff

            # Has anything changed?
            try:
                diff('-u', '-I', '^-- START automatically', lTarget, lDecoder)
            except sh.ErrorReturnCode as e:
                print (e.stdout)

                lUpdatedDecoders.append((lDecoder, lTarget))

        # ------------------------------------------------------------------------------
        # If no difference between old and newly generated decoders, quit here.
        if not lUpdatedDecoders:
            print ('All ipbus decoders are up-to-date')
            return
        # ------------------------------------------------------------------------------

        echo (
            'The following decoders have changed and must be updated:\n' +
            '\n'.join(map(lambda s: '* ' + style(s[0], fg='blue'), lUpdatedDecoders)) +
            '\n'
        )
        confirm('Do you want to continue?', abort=True)
        for lDecoder, lTarget in lUpdatedDecoders:
            print (sh.cp('-av', lDecoder, lTarget))
示例#59
0
from sh import cp, rm, diff, ErrorReturnCode
import sh
import os

SURSA_VERIFICATA = 'teleport-palcu-back.cpp'

cp('../' + SURSA_VERIFICATA, '.')
os.system('g++ ' + SURSA_VERIFICATA)

filename = 'grader_test'
for i in range(1, 11):
    print 'Testul ', i
    cp(filename + str(i) + '.in', 'teleport.in')
    os.system('./a.out')
    print diff('teleport.out', filename + str(i) + '.ok', _ok_code=[0, 1])

for extension in ['in', 'out']:
    rm('teleport.' + extension)
rm(SURSA_VERIFICATA)
rm('a.out')
示例#60
0
    def run_phil_pipeline(self, base_paramfile, opt_params, run_name):
        tempdir_container = os.path.join(self.wrkdir, 'phil_univax_out', base_paramfile, '%s.%d' % (run_name, randint(0,sys.maxsize)))
        sh.mkdir('-p',tempdir_container)
        tempdir = mkdtemp(prefix='phl-', dir=tempdir_container)
        basename = os.path.basename(tempdir)
        event_report_file = os.path.join(tempdir, 'events.json_lines')
        poe_output_file = os.path.join(tempdir, 'poe_output')
        poe_format = 'csv'
        qsub = sh.qsub.bake('-h','-v','PHIL_HOME=%s,OMP_NUM_THREADS=16' % self.phil_home)

        paramfile = open(os.path.join(tempdir,'params'), 'w')
        params = self.read_phil_base_params_from_file(base_paramfile)
        params.update({
            'outdir': tempdir,
            'event_report_file' : event_report_file,
            'seed': randint(1, 2147483647)
        })
        params.update(opt_params)

        for param, value in params.items():
            paramfile.write('%s = %s\n' % (param, str(value)))
        paramfile.close()

        lockfile = os.path.join(tempdir, 'lockfile')
        statusfile = os.path.join(tempdir, 'statusfile')

        sh.cp(params['primary_cases_file[0]'], tempdir)
        sh.cp(params['vaccination_capacity_file'], tempdir)
        sh.cp('config.yaml', tempdir)

        qsub_template_args = dict(
            stdout = os.path.join(tempdir, 'stdout'),
            stderr = os.path.join(tempdir, 'stderr'),
            lockfile = lockfile, statusfile = statusfile,
            tempdir = tempdir, jobname = basename,
            #reservation = 'philo.0',
            paramfile = paramfile.name,
            synthetic_population = self.synthetic_population,
            event_report_file = event_report_file,
            poe_output_file = poe_output_file, poe_format = poe_format)

        with open(self.qsub_template_file, 'r') as f:
            qsub_template = jinja2.Template(f.read())
      
        qsub_file = os.path.join(tempdir, 'qsub.py')
        with open(qsub_file, 'w') as f:
            f.write(qsub_template.render(qsub_template_args))

        jobid = qsub(qsub_file).strip()
        sh.ln('-s', tempdir, os.path.join(tempdir_container, jobid))
        sh.touch(lockfile)
        sh.qalter('-h','n', jobid)

        while sh.qstat('-x', jobid, _ok_code=[0,153]).exit_code == 0:
            time.sleep(randint(1,4))

        n_check = 3
        for _n in range(n_check+1):
            try:
                if os.path.isfile(lockfile):
                    raise Exception('Lockfile present but %s not in queue!' % jobid)

                with open(statusfile, 'r') as f:
                    stat = f.read()
                    if len(stat) > 0:
                        raise Exception(stat)
                break
            except Exception as e:
                if _n == n_check:
                    raise(e)
                else:
                    time.sleep(randint(10,20))

        return (tempdir, '%s.%s' % (poe_output_file, poe_format))