Example #1
1
File: cp.py Project: yunify/qsctl
def step_impl(context):
    sh.mkdir("tmp").wait()
    for row in context.table:
        sh.dd(
            "if=/dev/zero", "of=tmp/" + row["name"], "bs=1048576",
            "count=" + row["count"]
        )
Example #2
0
def handle_task(task, dotfiles_dir):
    click.echo('handling task: {}'.format(task))

    if 'src' not in task:
        click.echo("you must define at least a 'src' in each task")
        raise click.Abort

    source = os.path.expanduser(task['src'])
    if not os.path.exists(source):
        click.echo('file not found: {}'.format(source))
        raise click.Abort

    _, filename = os.path.split(source)
    subdir = task['subdir'].rstrip('/') if 'subdir' in task else '.'
    target = os.path.abspath(os.path.join(dotfiles_dir, subdir, filename))
    
    # make sure the target directory exists, e.g. .dotfiles/bash/
    target_path, _ = os.path.split(target)
    mkdir('-p', target_path)

    # copy the files
    msg_fmt = 'copying {}: from [{}] to [{}]'
    if os.path.isdir(source):
        click.echo(msg_fmt.format('dir', source, target))
        cp("-r", os.path.join(source, "."), target)
    else:
        click.echo(msg_fmt.format('file', source, target))
        cp(source, target)    
    def __init__(self, username, name, subclass_type=None, dir_yaml=None, dir_diag=None, dir_output=None, img_type=None):
        # init params with default values
        if subclass_type is None:
            subclass_type = self.subclass_type_list[0]
        if dir_yaml is None:
            dir_yaml = self.guessDefaultConfigLocation()
        if dir_diag is None:
            dir_diag = self.guessDefaultDiagLocation()
        if dir_output is None:
            dir_output = self.guessDefaultDiagramLocation()
        if img_type is None:
            img_type = self.image_type_list[0]
        # set login username
        self.set_login_username(username)

        abs_dir_yaml = cm_path_expand(dir_yaml)
        abs_dir_diag = cm_path_expand(dir_diag)
        abs_dir_output = cm_path_expand(dir_output)

        log.debug(
            "The output directory of rack diagram is {0}".format(abs_dir_output))
        # make sure the output directory exist
        if not path.exists(abs_dir_output):
            mkdir("-p", abs_dir_output)

        self.readClustersConfig(abs_dir_yaml)
        self.readRackConfig(name, abs_dir_yaml, abs_dir_diag)
        self.setRackImageType(subclass_type, img_type, abs_dir_output)
        self.initDictServers()
        self.get_map_progress()
Example #4
0
def clone(root, uniqname, project):
	into='/tmp/'+root
	mkdir('-p', into)
	url='[email protected]:' + uniqname + '/' + project
	to_path=into + '/' + uniqname

	if 'rerun' in sys.argv:
		if os.path.exists(to_path):
			print('cached {}'.format(to_path))
			repo = git.Repo(to_path)
			return repo, to_path

	print('clone {}/{}'.format(uniqname, project))
	try:
		repo = git.Repo.clone_from(
				url=url,
				to_path=to_path,
				)
		return repo, to_path
	except:
		# Fall back to sh path to grab the error
		try:
			sh.git('clone', url, to_path)
		except sh.ErrorReturnCode as e:
			if 'Connection closed by remote host' in e.stderr.decode('utf8'):
				# gitlab rate-limited us
				time.sleep(3)
				return clone(root, uniqname, project)
			raise

		# Since we're hammering the gitlab server, rarely the first will
		# timeout, but this second chance will succeed. Create a repo object
		# from the path in that case.
		repo = git.Repo(to_path)
		return repo, to_path
Example #5
0
def build_opencv():
    sh.pip.install("numpy")
    clone_if_not_exists("opencv", "https://github.com/PolarNick239/opencv.git", branch="stable_3.0.0)
    clone_if_not_exists("opencv_contrib", "https://github.com/PolarNick239/opencv_contrib.git", branch="stable_3.0.0")
    sh.rm("-rf", "build")
    sh.mkdir("build")
    sh.cd("build")
    python_path = pathlib.Path(sh.pyenv.which("python").stdout.decode()).parent.parent
    version = "{}.{}".format(sys.version_info.major, sys.version_info.minor)
    sh.cmake(
        "..",
        "-DCMAKE_BUILD_TYPE=RELEASE",
        "-DCMAKE_INSTALL_PREFIX={}/usr/local".format(python_path),
        "-DWITH_CUDA=OFF",
        "-DWITH_FFMPEG=OFF",
        "-DINSTALL_C_EXAMPLES=OFF",
        "-DBUILD_opencv_legacy=OFF",
        "-DBUILD_NEW_PYTHON_SUPPORT=ON",
        "-DBUILD_opencv_python3=ON",
        "-DOPENCV_EXTRA_MODULES_PATH=~/opencv_contrib-3.4.1/modules",
        "-DBUILD_EXAMPLES=ON",
        "-DPYTHON_EXECUTABLE={}/bin/python".format(python_path),
        "-DPYTHON3_LIBRARY={}/lib/libpython{}m.so".format(python_path, version),
        "-DPYTHON3_PACKAGES_PATH={}/lib/python{}/site-packages/".format(python_path, version),
        "-DPYTHON3_NUMPY_INCLUDE_DIRS={}/lib/python{}/site-packages/numpy/core/include".format(python_path, version),
        "-DPYTHON_INCLUDE_DIR={}/include/python{}m".format(python_path, version),
        _out=sys.stdout,
    )
    sh.make("-j4", _out=sys.stdout)
    sh.make.install(_out=sys.stdout)
Example #6
0
    def apply(self, config, containers, storage_entry=None):
        # The "entry point" is the way to contact the storage service.
        entry_point = { 'type' : 'titan' }
    
        # List all the containers in the entry point so that
        # clients can connect any of them. 
        entry_point['ip'] = str(containers[0]['data_ip'])

        config_dirs = []
        try:
            for c in containers:
                host_dir = "/tmp/" + self._generate_config_dir(config.uuid, c)
                try:
                    sh.mkdir('-p', host_dir)
                except:
                    sys.stderr.write('could not create config dir ' + host_dir)

                self._apply_rexster(host_dir, storage_entry, c)
                self._apply_titan(host_dir, storage_entry, c)

                # The config dirs specifies what to transfer over. We want to 
                # transfer over specific files into a directory. 
                config_dirs.append([c['container'], 
                                    host_dir + '/*', 
                                    config.config_directory])

        except IOError as err:
            sys.stderr.write('' + str(err))

        return config_dirs, entry_point
Example #7
0
 def compile( self, source_dir, build_dir, install_dir ):
     package_source_dir = os.path.join( source_dir, self.dirname )
     assert( os.path.exists( package_source_dir ) )
     package_build_dir = os.path.join( build_dir, self.dirname )
     runpath_dir = os.path.join( package_source_dir, 'RunPath' )
     if ( not os.path.exists( os.path.join( runpath_dir, 'media.zip' ) ) ):
         sh.cd( runpath_dir )
         sh.wget( '--no-check-certificate', 'https://bitbucket.org/jacmoe/ogitor/downloads/media.zip' )
         sh.unzip( 'media.zip' )
     if ( not os.path.exists( os.path.join( runpath_dir, 'projects.zip' ) ) ):
         sh.cd( runpath_dir )
         sh.wget( '--no-check-certificate', 'https://bitbucket.org/jacmoe/ogitor/downloads/projects.zip' )
         sh.unzip( 'projects.zip' )
     sh.mkdir( '-p', package_build_dir )
     sh.cd( package_build_dir )
     if ( platform.system() == 'Darwin' ):
         sh.cmake(
             '-G', 'Xcode',
             '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir,
             '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'CMake' ),
             package_source_dir,
             _out = sys.stdout )
         sh.xcodebuild( '-configuration', 'Release', _out = sys.stdout )
     else:
         sh.cmake(
             '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir,
             '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'lib/OGRE/cmake' ),
             package_source_dir,
             _out = sys.stdout )
         sh.make( '-j4', 'VERBOSE=1', _out = sys.stdout )
         sh.make.install( _out = sys.stdout )
Example #8
0
def split_ann(ann_file):
    if 'tmp' not in ls():
        mkdir('tmp')
    parser = BeautifulSoup(open(ann_file))
    for mistake in parser.find_all('mistake'):
        with open('tmp/%s' % mistake.attrs['nid'], 'a') as f:
            f.write(mistake.__str__())
Example #9
0
    def _set_rabbitmq_env(self):

        location = path_expand("~/.cloudmesh/rabbitm")

        if sys.platform == "darwin":
            sh.mkdir("-p", location)
            self.rabbit_env["RABBITMQ_MNESIA_BASE"] = location
            self.rabbit_env["RABBITMQ_LOG_BASE"] = location
            os.environ["RABBITMQ_MNESIA_BASE"] = location
            os.environ["RABBITMQ_LOG_BASE"] = location
            self.rabbit_env["rabbitmq_server"] = \
                "/usr/local/opt/rabbitmq/sbin/rabbitmq-server"
            self.rabbit_env["rabbitmqctl"] = \
                "/usr/local/opt/rabbitmq/sbin/rabbitmqctl"
        elif sys.platform == "linux2":
            sh.mkdir("-p", location)
            self.rabbit_env["RABBITMQ_MNESIA_BASE"] = location
            self.rabbit_env["RABBITMQ_LOG_BASE"] = location
            os.environ["RABBITMQ_MNESIA_BASE"] = location
            os.environ["RABBITMQ_LOG_BASE"] = location
            self.rabbit_env["rabbitmq_server"] = "/usr/sbin/rabbitmq-server"
            self.rabbit_env["rabbitmqctl"] = "/usr/sbin/rabbitmqctl"
        else:
            print("WARNING: cloudmesh rabbitmq user install not supported, "
                  "using system install")
def generate_template():
    template_file = ""
    if not isdir(build_dir):
        mkdir(build_dir)
    if isdir(build_dir):
        template_file = build_dir + "/dekko.dekkoproject.pot"
        print("TemplateFile: " + template_file)
        cd(build_dir)
        print("Running cmake to generate updated template")
        cmake('..')
        print("Running make")
        make("-j2")
    if isfile(template_file):
        if isdir(po_dir):
            print("Moving template to po dir: " + po_dir)
            mv(template_file, po_dir)
        else:
            print("Couldn't find po dir: " + po_dir)
            cleanup()
            return
    else:
        cleanup()
        print("No template found for: " + template_file)
        return
    print("Cleaning up")
    cleanup()
    print("YeeHaa!")
    print("All done, you need to commit & push this to bitbucket now :-)")
    print("NOTE: this would also be a good time to sync with launchpad, run")
    print("  $ python3 launchpad_sync.py")
    def __init__(self, test_file, temp_dir, cache=False):
        self.tic = time.time()
        self.test_file = test_file
        self.temp_dir = temp_dir
        self.cache = cache

        self.failed = False
        self.lines = []
        clean_test_file_name = re.sub('^' + re.escape(test_root('data/')), '',
                                      test_file)
        self.say('{}', test_started(clean_test_file_name))
        self.say("Testing {}...", bold(clean_test_file_name))

        self.style = self._get_style()
        sh.mkdir('-p', fail_path(self.style))
        self.style_args = Test._get_style_options(self.style)
        if self.style_args:
            self.say("\tstyling: {}", shell_join(self.style_args))
        self.bib_args = Test._get_bib_options(test_file)
        if self.bib_args:
            self.say("\tbibliography: {}", self.bib_args)
        self.options = self.style_args + self.bib_args

        self.test_name = os.path.join(self.style, os.path.basename(test_file))
        self.test_out = os.path.join(self.temp_dir, self.test_name)
        self.test_err = self.test_out + '.err'
        _, ext = os.path.splitext(test_file)
        self.test_new = self.test_out + '.new.' + ext
Example #12
0
    def __init__(self, source, dest, rsync_args='', user=None, **_):
        super().__init__()
        self.user = user
        self.args = (rsync_args, source, dest)

        dest_dir = os.path.split(dest)[0]
        if not os.path.isdir(dest_dir):
            if os.path.exists(dest_dir):
                logger.critical('Destination %s isn\'t valid because a file exists at %s', dest, dest_dir)
                sys.exit(1)
            sh.mkdir('-p', dest_dir)
            if user is not None:
                sh.chown('{}:'.format(user), dest_dir)

        if user is not None:
            self.rsync = sh.sudo.bake('-u', user, 'rsync')
        else:
            self.rsync = sh.rsync
        self.rsync = self.rsync.bake(source, dest, '--no-h', *rsync_args.split(), progress=True)
        self.daemon = True
        self.running = threading.Event()
        self._buffer = ''
        self._status = {
            'running': False,
            'source': source,
            'dest': dest,
        }
        self._status_lock = threading.Lock()
Example #13
0
    def compile( self, source_dir, build_dir, install_dir ):
        package_source_dir = os.path.join( source_dir, self.dirname )
        assert( os.path.exists( package_source_dir ) )
        package_build_dir = os.path.join( build_dir, self.dirname )

        sh.cd( os.path.join( package_source_dir, 'scripts/Resources' ) )
        sh.sh( './copyresources.sh' )
        # the install target doesn't copy the stuff that copyresources.sh puts in place
        sh.cp( '-v', os.path.join( package_source_dir, 'bin/Release/Readme.txt' ), os.path.join( install_dir, 'Readme.meshy.txt' ) )
        sh.cp( '-v', '-r', os.path.join( package_source_dir, 'bin/Release_Linux/Resources/' ), install_dir )

        sh.mkdir( '-p', package_build_dir )
        sh.cd( package_build_dir )
        if ( platform.system() == 'Darwin' ):
            sh.cmake(
                '-G', 'Xcode',
                '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir,
                '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'CMake' ),
                package_source_dir,
                _out = sys.stdout )
            sh.xcodebuild( '-configuration', 'Release', _out = sys.stdout )
        else:
            sh.cmake(
                '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir,
                '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'lib/OGRE/cmake' ),
                package_source_dir,
                _out = sys.stdout )
            sh.make( '-j4', 'VERBOSE=1', _out = sys.stdout )
            sh.make.install( _out = sys.stdout )
def copy_proguard_mapping(flavor, version_name):
    folder_path = 'releases'
    sh.mkdir("-p", folder_path)
    output_file = '%s/wikipedia-%s.mapping.tar.gz' % (folder_path, version_name)
    input_file = 'wikipedia/build/outputs/mapping/%s/release/mapping.txt' % flavor
    sh.tar('czf', output_file, input_file)
    print ' proguard mapping: %s' % output_file
def create(version_number):
    heading1("Creating new version based on Fedora " + version_number + "\n")

    # Update git and create new version.
    heading2("Updating master branch.")
    print(git.checkout("master"))
    print(git.pull())  # Bring branch current.

    heading2("Creating new branch")
    print(git.checkout("-b" + version_number))  # Create working branch.

    # Get kickstart files.
    heading2("Creating fedora-kickstarts directory\n")
    mkdir("-p", (base_dir + "/fedora-kickstarts/"))
    cd(base_dir + "/fedora-kickstarts/")

    heading2("Downloading Fedora kickstart files.")
    ks_base = "https://pagure.io/fedora-kickstarts/raw/f" \
              + version_number + "/f"

    for file in ks_files:
        file_path = ks_base + "/fedora-" + file

        print ("Downloading " + file_path)
        curl("-O", file_path)
def main():

    data_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..', 'data'))
    log('data dir: {}'.format(data_dir))

    # gpg insists on these permissions
    oce_gpg_dir = '{}/oce/.gnupg'.format(data_dir)
    if os.path.exists(oce_gpg_dir):
        sh.chmod('--recursive', 'go-rwx', oce_gpg_dir)
        log('prepared oce')

    if USE_POSTGRESQL:
        # postgres must own its config dir
        sh.chown('-h', '--recursive', 'postgres:postgres', '/etc/postgresql')

    # we need a dir for other persistent data that is not normally in data_dir)
    persistent_dir = '{}/persistent'.format(data_dir)
    if not os.path.exists(persistent_dir):
        sh.mkdir('--parents', persistent_dir)
    sh.chown('goodcrypto:goodcrypto', persistent_dir)
    sh.chmod('g+rx', persistent_dir)
    sh.chmod('o-rwx', persistent_dir)
    log('prepared {}'.format(persistent_dir))

    # root should own these subdirectories
    django_dir = '{}/django'.format(persistent_dir)
    if not os.path.exists(django_dir):
        sh.mkdir(django_dir)
    sh.chown('goodcrypto:goodcrypto', django_dir)
    sh.chmod('go-rwx', django_dir)

    return 0
Example #17
0
    def update_cache(self):
        if self.repo_url == '':
            print("repo_url is not set, cannot update")
            return

        if not self.test_cache():
            self.purge_cache()
            mkdir(os.path.join(self.cache_dir, 'repodata'))

            index_file_url = '/'.join([self.repo_url, self.index_file])
            index_file_path = os.path.join(self.cache_dir, self.index_file)

            try:
                print("Downloading index file '{0}' --> '{1}' ...".format(
                    index_file_url, index_file_path
                ))
                urlretrieve(index_file_url, index_file_path)
            except:
                self.broken = True
                return

            try:
                xmlroot = etree.parse(index_file_path).getroot()
                xmlns = xmlroot.nsmap[None]
                for item in xmlroot.findall("{{{0}}}data".format(xmlns)):
                    for subitem in item.findall("{{{0}}}location".format(xmlns)):
                        location = subitem.get('href')
                        url = '/'.join([self.repo_url, location])
                        path = '/'.join([self.cache_dir, location])
                        print("Downloading file '{0}' --> '{1}' ...".format(
                            url, path
                        ))
                        urlretrieve(url, path)
            except:
                self.broken = True
def main():
    sh.mkdir("-p","log")
    sh.fuser("-k","-n","tcp","4040", _ok_code=[0,1])

    printc("COMPILE FRONTEND", color="blue")
    with chdir("frontend"):
        sh.make("compile", _out="../log/compile.txt")

    printc("UNIT TESTS", color="blue")
    fail = False
    try:
        with chdir("frontend"):
            sh.npm.test(_out="../log/unit_tests.txt")
        printc("PASS UNIT TESTS", color="green")
    except:
        with open("log/unit_tests.txt","r") as fd:
            print(fd.read())
        printc("FAIL UNIT TESTS", color="red")
        fail = True

    if fail:
        printc("FAIL", color="red")
        sys.exit(1)
    else:
        printc("SUCCESS", color="green")
    sys.exit(0)
def get_receive_errors(rev_old, rev_new, file_type, function):
  checkable = True
  if file_type == "js":
    checkable = config.getboolean("receive", "CHECK_JAVASCRIPT")
  elif file_type == "php":
    checkable = config.getboolean("receive", "CHECK_PHP")

  if not checkable:
    return None

  files = _get_receive_files(rev_old, rev_new, file_type)
  if not files:
    return None

  tmp_dir = config.get("receive", "TMP_DIR")
  errors = []
  for file_path in files:
    mkdir("-p", "/".join((tmp_dir + file_path).split("/")[:-1]))
    system("git show %s:%s > %s" % (rev_new, file_path, tmp_dir + file_path))
    if path.exists(tmp_dir + file_path):
      file_error = function(tmp_dir + file_path)
      if file_error:
        errors.append(file_path + file_error)

  rm("-rf", tmp_dir)
  return "\n".join(errors)
Example #20
0
    def test_git_dir_from_subdir(self):
        sh.git('init')
        sh.mkdir('foo')
        expected = os.path.join(os.getcwd(), '.git')

        sh.cd('foo')
        self.assertEqual(expected, git_dir())
def git_clone_to_local(dest_directory, webhook_data):
    git = sh.git.bake()
    logger.debug('Making destination directory %s' % dest_directory)
    print ('Making destination directory %s' % dest_directory)
    sh.mkdir('-p', dest_directory)
    sh.cd(dest_directory)
    logger.debug("checking for repo_name %s in %s" % (webhook_data.repo_name, sh.pwd()))
    if not os.path.exists(webhook_data.repo_name):
        logger.debug("Cloning new repository")
        print(git.clone(webhook_data.repo_url, webhook_data.repo_name))
    sh.cd(webhook_data.repo_name)
    print(git.fetch('--all'))

    try:
        git('show-ref', '--heads', webhook_data.branch_name)
        branch_exists = True
    except:
        branch_exists = False

    if branch_exists is False and not webhook_data.is_tag():
        print(git.checkout('-b', webhook_data.branch_name,
                           'origin/%s' % webhook_data.branch_name))
    elif branch_exists:
        git.checkout(webhook_data.branch_name)

    print(git.pull())
    print webhook_data.before, webhook_data.after
Example #22
0
def copy_assets():
    """copy assets for static serving"""
    proj()

    print(". copying assets ...")

    copy_patterns = {
        "dist": ["./static/lib/jquery-1.8.3.min.js"] +
        sh.glob("./static/config/*.json") +
        sh.glob("./static/fragile-min.*"),

        "dist/font": sh.glob("./static/lib/awesome/font/*"),
        "dist/svg": sh.glob("./static/svg/*.svg"),
        "dist/img": sh.glob("./static/img/*.*") or [],
        
        "dist/docs/assets": sh.glob("./docs/assets/*.*") or [],
    }

    for dst, copy_files in copy_patterns.items():
        if not os.path.exists(dst):
            sh.mkdir("-p", dst)

        for c_file in copy_files:
            print "... copying", c_file, dst
            sh.cp("-r", c_file, dst)

    wa_cache = "./dist/.webassets-cache"

    if os.path.exists(wa_cache):
        sh.rm("-r", wa_cache)
Example #23
0
def ensure_syncer_dir():
    if path.isdir(syncer_dir):
        return

    username = input('GitHub username: '******'GitHub password: '******'clone', 'https://%s:%[email protected]/%s/%s.git' % (username, password, username, SYNCER_REPO_NAME), syncer_dir)

    needs_commit = False
    sh.cd(syncer_dir)
    if not path.isfile(path('manifest.json')):
        sh.touch('manifest.json')
    
    if not path.isdir(path('content')):
        sh.mkdir('content')

    if not path.isdir(path('backup')):
        sh.mkdir('backup')

    if not path.isfile(path('.gitignore')):
        needs_commit = True
        with open('.gitignore', 'w') as gitignore_file:
            gitignore_file.write('backup')

    if needs_commit:
        sh.git('add', '-A')
        sh.git('commit', '-m', 'Setting up scaffolding.')
Example #24
0
def pull_file_from_device(serial_num, file_path, file_name, output_dir):
    if not os.path.exists(output_dir):
        sh.mkdir("-p", output_dir)
    output_path = "%s/%s" % (output_dir, file_path)
    if os.path.exists(output_path):
        sh.rm('-rf', output_path)
    adb_pull(file_path + '/' + file_name, output_dir, serial_num)
Example #25
0
 def import_map(self, path, mode='symlink'):
     """
     will populate the maps dir with a SAM or BAM file.
     TODO: other modes might be: copy, move
     """
     mkdir('-p', "%s/maps" % self.sample.path)
     if mode == 'symlink':
         ln('-s', path, self.sam_path)
Example #26
0
 def verify_dir(self, dirs):
     if os.path.exists(dirs):
         return False
     else:
         if re.search('undefined' ,dirs):
             return False
         else:
             sh.mkdir(dirs)
             return True
def copy_artifacts(flavor):
    folder_path = 'releases'
    sh.mkdir("-p", folder_path)
    version_name = get_version_name_from_apk(get_output_apk_file_name(flavor, 'universal'))
    copy_apk(flavor, version_name, 'universal')
    copy_apk(flavor, version_name, 'armeabi-v7a')
    copy_apk(flavor, version_name, 'arm64-v8a')
    copy_apk(flavor, version_name, 'x86')
    copy_apk(flavor, version_name, 'x86_64')
Example #28
0
    def setup(self, git_repo_dir):
        sh.mkdir("-p", join(self.btd, "var/log", self.pkg_name))
        
        lib_base = join(self.btd, "usr/lib/api")
        sh.mkdir("-p", lib_base)

        for subdir in ["api", "models"]:
            merge = lib_base.endswith(subdir)
            sh.cp("-r", join(git_repo_dir, "src", subdir), lib_base, T=merge)
Example #29
0
 def create_dir(self,request, *args, **kwargs):
     detail_set = {}
     dirname = request.DATA['dir']
     newdir = request.DATA['dir_name']
     full_dir_name = dirname + '/' + newdir
     print full_dir_name
     sh.mkdir('-p', full_dir_name)
     detail_set = self.common_list_dir(request)
     return Response(detail_set)
Example #30
0
	def install_tool(self):
		sh.rm("-rf", S3CMD_HOME)
		sh.rm("-f", TOOL_HOME + "/bin/s3cmd")
		sh.mkdir("-p", S3CMD_HOME)
		sh.virtualenv(S3CMD_HOME)
		sh.mkdir("-p", TOOL_HOME + "/bin")
		pip = sh.Command(S3CMD_HOME + "/bin/pip")
		pip("install", "s3cmd")
		sh.ln("-s", S3CMD_HOME + "/bin/s3cmd", TOOL_HOME + "/bin/s3cmd")
Example #31
0
def gen_model_code(model_codegen_dir,
                   platform,
                   model_file_path,
                   weight_file_path,
                   model_sha256_checksum,
                   weight_sha256_checksum,
                   input_nodes,
                   output_nodes,
                   runtime,
                   model_tag,
                   input_shapes,
                   dsp_mode,
                   embed_model_data,
                   winograd,
                   obfuscate,
                   model_graph_format,
                   data_type,
                   graph_optimize_options):
    bazel_build_common("//mace/python/tools:converter")

    if os.path.exists(model_codegen_dir):
        sh.rm("-rf", model_codegen_dir)
    sh.mkdir("-p", model_codegen_dir)

    sh.python("bazel-bin/mace/python/tools/converter",
              "-u",
              "--platform=%s" % platform,
              "--model_file=%s" % model_file_path,
              "--weight_file=%s" % weight_file_path,
              "--model_checksum=%s" % model_sha256_checksum,
              "--weight_checksum=%s" % weight_sha256_checksum,
              "--input_node=%s" % input_nodes,
              "--output_node=%s" % output_nodes,
              "--runtime=%s" % runtime,
              "--template=%s" % "mace/python/tools",
              "--model_tag=%s" % model_tag,
              "--input_shape=%s" % input_shapes,
              "--dsp_mode=%s" % dsp_mode,
              "--embed_model_data=%s" % embed_model_data,
              "--winograd=%s" % winograd,
              "--obfuscate=%s" % obfuscate,
              "--output_dir=%s" % model_codegen_dir,
              "--model_graph_format=%s" % model_graph_format,
              "--data_type=%s" % data_type,
              "--graph_optimize_options=%s" % graph_optimize_options,
              _fg=True)
Example #32
0
def proxyCommand(host, port, chargingport, glassfishport):
    print("Installing logic proxy")
    name = proxy.get("url").split("/")[-1][:-4]
    cd(name)
    bash('install.sh')
    if not os.path.isfile("config.js"):
        shutil.copy2("config.js.template", "config.js")

        with open("config.js") as f:
            text = f.read()

        text = text.replace("config.port = 80", "config.port = {}".format(port))\
                   .replace("'/proxy'", "''")

        texts = text.split("\n")
        texts = texts[:59] + generate_endpoints(glassfishport, chargingport) + texts[121:]

        text = "\n".join(texts)

        with open("config.js", "w") as f:
            f.write(text)

    if os.path.isdir('indexes'):
        rm('-rf', 'indexes')

    mkdir('indexes')
    node('fill_indexes.js')

    print("""
    Finished!
    Now, go to your IdM instance (e.g. https://account.lab.fiware.org) and create an application with this settings:

    - URL: http://{host}:{port}
    - Callback URL: http://{host}:{port}/auth/fiware/callback

    Create a role called "seller"
    Attach the role to the users you prefer.
    Modify config.js file with:

    - config.oauth2.clientID: The client ID that you got when you created the Application
    - config.oauth2.clientSecret: The client Secret that you got when you created the Application
    - config.oauth2.callbackURL = http://{host}:{port}/auth/fiware/callback

    Please refer to http://business-api-ecosystem.readthedocs.io/en/latest/installation-administration-guide.html#configuration
    for details on configuration settings
    """.format(host=host, port=port))
Example #33
0
def merge_opencl_parameters(binaries_dirs,
                            cl_parameter_file_name,
                            output_file_path):
    cl_bin_dirs = []
    for d in binaries_dirs:
        cl_bin_dirs.append(os.path.join(d, "opencl_bin"))
    # create opencl binary output dir
    opencl_binary_dir = os.path.dirname(output_file_path)
    if not os.path.exists(opencl_binary_dir):
        sh.mkdir("-p", opencl_binary_dir)
    kvs = {}
    for binary_dir in cl_bin_dirs:
        binary_path = os.path.join(binary_dir, cl_parameter_file_name)
        if not os.path.exists(binary_path):
            continue

        six.print_('generate opencl parameter from', binary_path)
        with open(binary_path, "rb") as f:
            binary_array = np.fromfile(f, dtype=np.uint8)

        idx = 0
        size, = struct.unpack("Q", binary_array[idx:idx + 8])
        idx += 8
        for _ in six.moves.range(size):
            key_size, = struct.unpack("i", binary_array[idx:idx + 4])
            idx += 4
            key, = struct.unpack(
                str(key_size) + "s", binary_array[idx:idx + key_size])
            idx += key_size
            value_size, = struct.unpack("i", binary_array[idx:idx + 4])
            idx += 4
            kvs[key] = binary_array[idx:idx + value_size]
            idx += value_size

    output_byte_array = bytearray()
    data_size = len(kvs)
    output_byte_array.extend(struct.pack("Q", data_size))
    for key, value in six.iteritems(kvs):
        key_size = len(key)
        output_byte_array.extend(struct.pack("i", key_size))
        output_byte_array.extend(struct.pack(str(key_size) + "s", key))
        value_size = len(value)
        output_byte_array.extend(struct.pack("i", value_size))
        output_byte_array.extend(value)

    np.array(output_byte_array).tofile(output_file_path)
Example #34
0
 def restore_attachments(self, zipfile, docker=False):
     unzip = sh.unzip.bake('-x', '-qq', '-n')
     restore_folder = os.path.join(self.data_dir,
                                   'filestore',
                                   self.target_db)
     sh.mkdir('-p', restore_folder)
     # unzip will place files are in <datadir>/filestore/<dbname>/filestore,
     # we create a symlink to <datadir>/filestore/<dbname> so they wind up
     # in the right spot
     restore_folder_faulty = os.path.join(restore_folder, 'filestore')
     sh.ln('-s', restore_folder, restore_folder_faulty)
     unzip(zipfile, 'filestore/*', '-d', restore_folder)
     # cleanup the symlink
     sh.rm(restore_folder_faulty)
     # When running in docker mode, change permissions
     if docker:
         sh.chown('-R', '999:999', self.data_dir)
Example #35
0
def git_mv_dir(dir_path, package):
    """
    Performs git mv from main package to a <.package> subpackage, when called
    """
    # Need to do the move in two forms, since we can not
    # move a directory to be its own subdirectory in one step
    sh.git.mv(
        p(dir_path, 'src/main/java/org/wikipedia'),
        p(dir_path, 'src/main/java/org/' + package)
    )

    sh.mkdir('-p', p(dir_path, 'src/main/java/org/wikipedia'))

    sh.git.mv(
        p(dir_path, 'src/main/java/org/' + package),
        p(dir_path, 'src/main/java/org/wikipedia')
    )
Example #36
0
 def test_correct_relative_paths(self, tmpdir):
     # - make a tempdir
     # - add two directories A and B
     # - add a file to A
     # - make a link to A/the_file from in B using python
     # - make a link to A/the_file from in B using relative paths in sh
     # - test that the the two links have the same representation
     with sh.pushd(tmpdir):
         sh.mkdir("A")
         sh.mkdir("B")
         target = "A/some_file"
         sh.touch(target)
         link1 = "B/link1"
         link2 = "B/link2"
         add_relative_symlink(target, link1)
         sh.ln("--symbolic", "--relative", target, link2)
         assert os.readlink(link1) == os.readlink(link2)
Example #37
0
def safely_clean_dir(mount_point, user=False):
    if not os.path.exists(mount_point):
        sh.mkdir('-p', mount_point)
        return 0
    try:
        sh.mountpoint(mount_point, '-q')
        logger.error('Error: Mount point is sill occupied by others: ' +
                     mount_point)
        exit(1)
    except sh.ErrorReturnCode:
        pass
    if user:
        sh.rm('-rf', mount_point, _fg=True)
    else:
        sh.sudo.rm('-rf', mount_point, _fg=True)
    sh.mkdir('-p', mount_point)
    return 0
Example #38
0
def proxyCommand(host, port, chargingport, glassfishport):
    print("Installing logic proxy")
    name = proxy.get("url").split("/")[-1][:-4]
    cd(name)
    npm("install")
    if not os.path.isfile("config.js"):
        shutil.copy2("config.js.template", "config.js")

        text = ""
        with open("config.js") as f:
            text = f.read()

        text = text.replace("config.port = 80", "config.port = {}".format(port))\
                   .replace("'/proxy'", "''")

        texts = text.split("\n")
        texts = texts[:47] + generate_endpoints(glassfishport,
                                                chargingport) + texts[109:]

        text = "\n".join(texts)

        with open("config.js", "w") as f:
            f.write(text)

        mkdir('indexes')
        node('fill_indexes.js')

    print("""
    Finished!
    Now, go to https://account.lab.fiware.org and create an application with this settings:

    - URL: http://{host}:{port}
    - Callback URL: http://{host}:{port}/auth/fiware/callback

    Create a role called "seller"
    Attach the role to the users you prefer.
    Modify config.js file with:

    - config.oauth2.clientID: The client ID that you got when you created the Application
    - config.oauth2.clientSecret: The client Secret that you got when you created the Application
    - config.oauth2.callbackURL = http://{host}:{port}/auth/fiware/callback

    Once you've done all, execute the proxy with:
    node server.js
    """.format(host=host, port=port))
def generate_pep(input_dir, output_dir):
    sh.mkdir("-pv", output_dir)
    len_arr = os.popen(
        "ls -l {}| grep '^-' | wc -l".format(input_dir)).readlines()
    length = strip(len_arr)
    length = int(length[0])
    print(length)
    for i in range(1, length + 1):
        f = open("{0}/{1}.fa".format(input_dir, i), 'r')
        g = open("{0}/{1}.pep".format(output_dir, i), 'a+')

        while 1:
            line = f.readline()
            if not line:
                f.close()
                break
            if (line[0] != ">"):
                g.writelines(line)
Example #40
0
def setup():
    # =================
    sentry_sdk.init(
        "https://[email protected]/1533664")
    # =================

    global HOME, arch, db_conn
    HOME = getenv('HOME', '/home/pi')
    arch = machine()

    signal(SIGUSR1, sigusr1)
    signal(SIGALRM, sigalrm)

    load_settings()
    db_conn = db.conn(settings['database'])

    sh.mkdir(SCREENLY_HTML, p=True)
    html_templates.black_page(BLACK_PAGE)
Example #41
0
    def prepare_data(self):
        has_cache_files = False

        try:
            cache_dir = sh.ls(FLAGS.cache_dir)
            if 'train.set' in cache_dir and 'valid.set' in cache_dir:
                has_cache_files = True
        except Exception as e:
            logging.error(e)
            sh.mkdir(FLAGS.cache_dir)

        if not has_cache_files:
            data_list = self._preprocess_qa_data(self.tokenizer)
            train_list, valid_list = train_test_split(
                data_list, test_size=FLAGS.test_size, random_state=0)

            torch.save(train_list, os.path.join(FLAGS.cache_dir, 'train.set'))
            torch.save(valid_list, os.path.join(FLAGS.cache_dir, 'valid.set'))
Example #42
0
def start1(usrID, proID):
    try:
        sh.mkdir("/home/" + usrID)
        sh.chmod("777", "/home/" + usrID)
        sh.docker("run", "-itd", "-v", "/home/" + usrID + ":/home/" + usrID,
                  "--name", proID + usrID, proID)  #创建容器
        sh.docker("exec", proID + usrID, 'service', 'ssh', 'start')  #容器开启ssh服务
        #以下为返回容器id,用户id,容器ip的操作
        containerID = str(
            sh.docker("inspect", "-f", "{{.Config.Hostname}}",
                      proID + usrID)).replace('\n', '').replace('\r', '')
        containerIP = str(
            sh.docker("inspect", "-f", "{{.NetworkSettings.IPAddress}}",
                      proID + usrID)).replace('\n', '').replace('\r', '')
        return {'containerID': containerID, 'containerIP': containerIP}
    except Exception as e:
        error["error"] = str(e)
        return error
Example #43
0
    def update_cache(self):
        if not self.test_cache():
            rm(self.path, '-rf')
            mkdir('-p', self.path)

            index_file_url = '/'.join(
                [self.repo_url.url.geturl(), 'Packages.gz'])
            index_file_path = os.path.join(self.path, self.index_file)

            print("Downloading index file '{0}' --> '{1}' ...".format(
                index_file_url, index_file_path))
            try:
                with pushd(self.path):
                    wget(index_file_url, '-O', self.index_file + '.gz')
                    gzip('-d', self.index_file + '.gz')
            except Exception as err:
                print(str(err))
                self.broken = True
Example #44
0
def pull_binaries(abi, serialno, model_output_dirs, cl_built_kernel_file_name):
    compiled_opencl_dir = "/data/local/tmp/mace_run/interior/"
    mace_run_param_file = "mace_run.config"

    cl_bin_dirs = []
    for d in model_output_dirs:
        cl_bin_dirs.append(os.path.join(d, "opencl_bin"))
    cl_bin_dirs_str = ",".join(cl_bin_dirs)
    if cl_bin_dirs:
        cl_bin_dir = cl_bin_dirs_str
        if os.path.exists(cl_bin_dir):
            sh.rm("-rf", cl_bin_dir)
        sh.mkdir("-p", cl_bin_dir)
        if abi != "host":
            adb_pull(compiled_opencl_dir + cl_built_kernel_file_name,
                     cl_bin_dir, serialno)
            adb_pull("/data/local/tmp/mace_run/%s" % mace_run_param_file,
                     cl_bin_dir, serialno)
Example #45
0
def mifs(env):

    lDepFileParser = env.depParser

    # print (env.depParser.commands)

    lPaths = []
    for c in env.depParser.commands['src']:
        if splitext(c.FilePath)[1] != '.mif':
            continue
        lPaths.append(c.FilePath)

    if not lPaths:
        return

    sh.mkdir('-p', 'mif')
    for p in lPaths:
        sh.cp(p, 'mif/')
Example #46
0
    def copy_files(self):
        # Pliki do skopiowania
        files_to_copy = []
        for root, dirnames, filenames in os.walk(self.source_mount):
            for file in filenames:
                files_to_copy.append(root + '/' + file)

        # Modyfikacja ścieżki oraz kopiowanie plików
        for index, file in enumerate(files_to_copy):
            match = re.search("pyWinUSB\/\w{40}\/source\/(.*\/)", file)
            dest_file = "/".join(
                [self.destination_mount,
                 match.group(1) if match else ""])

            # Tworzenie i kopiowanie
            self.event_handler.on_progress(len(files_to_copy), index, file)
            sh.mkdir(dest_file, "-p")
            sh.cp(file, dest_file)
Example #47
0
	def build(self, branch, sourceDir, buildDir, rebuild):
		msgcmd("Building branch {}".format(branch))
		try:
			if not path.exists(buildDir):
				mkdir("-p", buildDir)
			elif path.exists(buildDir + "/bin") and rebuild:
				rm("-rf", buildDir + "/bin")

			cd(buildDir)
			cmake("-C", self.configFile, sourceDir, _out=debugSh, _err=debugSh)
			cd(sourceDir)
			make("BUILD_DIR=" + buildDir, "NPROCS=4", _out=debugSh, _err=debugSh)
		except ErrorReturnCode as e:
			msgerr("Failed build branch {}".format(branch))
			return False
		else:
			msgstat("Sucess build branch {}".format(branch))
			return True
Example #48
0
File: device.py Project: lbqin/mace
 def pull(self, src_path, file_name, dst_path='.'):
     if not os.path.exists(dst_path):
         sh.mkdir("-p", dst_path)
     src_file = "%s/%s" % (src_path, file_name)
     dst_file = "%s/%s" % (dst_path, file_name)
     if os.path.exists(dst_file):
         sh.rm('-f', dst_file)
     six.print_("Pull %s to %s" % (src_path, dst_path))
     if self.system == SystemType.android:
         sh_commands.adb_pull(src_file, dst_file, self.address)
     elif self.system == SystemType.arm_linux:
         try:
             sh.scp('-r',
                    '%s@%s:%s' % (self.username, self.address, src_file),
                    dst_file)
         except sh.ErrorReturnCode_1 as e:
             six.print_("Pull Failed !", file=sys.stderr)
             raise e
def merge(version_number):
    # Meld kickstart files together.
    heading1("Initiating kickstart file comparisons.")

    mkdir("-p", (base_dir + "/final-kickstarts/"))
    # for file in ks_files:
    #     fedora_ks = base_dir + "/fedora-kickstarts/fedora-" + file
    #     tananda_ks = base_dir + "/tananda-kickstarts/tananda-" + file
    #     final_ks = base_dir + "/final-kickstarts/final-" + file

    #     print(touch(final_ks))

    #     meld("--auto-merge", fedora_ks, final_ks, tananda_ks)

    fedora_ks = base_dir + "/fedora-kickstarts/"
    tananda_ks = base_dir + "/tananda-kickstarts/"

    meld("--auto-compare", "--diff", fedora_ks, tananda_ks)
Example #50
0
def create(template, project_name):
    templates = get_templates()
    if template not in templates:
        click.echo('%s template not found' % template, err=True)
        return
    project_dir = './' + project_name
    sh.mkdir('-p', project_dir)
    sh.cp('-rf', os.path.join(HERE, f'templates/{template}/'), project_dir)
    for f in sh.find(project_dir, '-name', '*.py'):
        sh.sed('-i', '', '-e', 's/%s/%s/g' % ('proj', project_name), f.strip())
    for f in sh.find(project_dir, '-name', 'Dockerfile*'):
        sh.sed('-i', '', '-e', 's/%s/%s/g' % ('proj', project_name), f.strip())
    # hack: custom model import
    if template == 'simple':
        sh.sed('-i', '', '-e', 's/%s/%s/g' % ('proj', project_name),
               './%s/migrations/script.py.mako' % project_name)
    sh.mv(os.path.join(project_dir, 'proj'),
          os.path.join(project_dir, project_name))
Example #51
0
    def apply(self, config, containers):
        entry_point = {'type': 'cassandra-client'}
        entry_point['ip'] = containers[0]['manage_ip']

        # Get the storage information.
        storage_entry = self._find_cassandra_storage(containers)
        if not storage_entry:
            # The Cassandra client is currently only compatible with a
            # Cassandra backend. So just return an error.
            return None, None

        # Otherwise record the storage type and get the seed node.
        entry_point['cassandra_url'] = storage_entry['seed']

        # Create a new configuration directory, and place
        # into the template directory.
        config_dirs = []

        try:
            host_dir = "/tmp/" + self._generate_config_dir(config.uuid)
            try:
                sh.mkdir('-p', host_dir)
            except:
                sys.stderr.write('could not create config dir ' + host_dir)

            self._apply_cassandra(host_dir, entry_point, config, containers[0])

            # See if we need to apply
            if 'titan' in storage_entry:
                self._apply_titan(host_dir, storage_entry, containers[0])
                out_file = open(host_dir + '/servers', 'w+')
                out_file.write("%s %s" %
                               (storage_entry['titan']['ip'], 'rexserver'))
                out_file.close

            # The config dirs specifies what to transfer over. We want to
            # transfer over specific files into a directory.
            for c in containers:
                config_dirs.append(
                    [c['container'], host_dir + '/*', config.config_directory])
        except IOError as err:
            sys.stderr.write('' + str(err))

        return config_dirs, entry_point
Example #52
0
    def apply(self, config, containers):
        """
        Apply the configuration to the instances
        """
        entry_point = {'type': 'spark'}
        entry_point['ip'] = containers[0]['manage_ip']
        config_dirs = []

        new_config_dir = "/tmp/" + self._generate_config_dir(config.uuid)
        try:
            sh.mkdir('-p', new_config_dir)
        except:
            sys.stderr.write('could not create config dir ' + new_config_dir)

        # This file records all instances so that we can
        # generate the hosts file.
        entry_point['instances'] = []
        for server in containers:
            entry_point['instances'].append(
                [server['data_ip'], server['host_name']])

        if not 'compute' in containers[0]:
            # This is being called as a compute service.
            slave_file = open(new_config_dir + '/slaves', 'w+')
            entry_point['master'] = containers[0]['host_name']
            entry_point['instances'] = []
            master = entry_point['master']
            for server in containers:
                if server != master:
                    slave_file.write("%s\n" % server['host_name'])
            slave_file.close()
        else:
            # This is being called as a client service.
            # For the client, also include the host/IP of the compute service.
            compute = containers[0]['compute'][0]
            entry_point['master'] = compute['master']

        # Transfer the configuration.
        for c in containers:
            config_files = new_config_dir + '/*'
            config_dirs.append(
                [c['container'], config_files, config.config_directory])

        return config_dirs, entry_point
class ImagesBatch(object):
    def __init__(self, path, filters):
        self.dp = int(os.path.basename(path))
        self.path = path
        self.svgs = []
        all_svgs = self.find_svg_files(path)
        filtered_svgs = self.filter_filenames(all_svgs, filters)
        self.svgs = self.abspath(filtered_svgs)

    @staticmethod
    def find_svg_files(path):
        return [p for p in glob(os.path.join(path, "*.svg"))]

    @staticmethod
    def filter_filenames(all_svg_files, filters=None):
        relative_svg_files = []
        if filters:
            for filter in filters:
                if os.path.join(source_path, filter) in all_svg_files:
                    relative_svg_files.append(os.path.join(
                        source_path, filter))
        else:
            relative_svg_files = all_svg_files
        return relative_svg_files

    @staticmethod
    def abspath(filenames):
        output = []
        for filename in filenames:
            output.append(os.path.abspath(filename))
        return output

    def _do_export(self, density, input_path, drawable):
        nonspecific = ".nonspecific." in input_path
        noflip = nonspecific or input_path.endswith(".noflip.svg")
        file_name = os.path.basename(
            os.path.splitext(input_path)[0].split(".")[0] + ".png")
        folder_path = os.path.join(OUTPUT_PATH_PREFIX, drawable)
        sh.mkdir("-p", folder_path)
        output_file_path = os.path.join(folder_path, file_name)
        output_precrush_path = output_file_path + "_"
        px = int(DENSITIES[density] * self.dp)
        sh.rsvg_convert(input_path, "-a", h=px, o=output_precrush_path)
        sh.pngcrush("-q", "-reduce", output_precrush_path, output_file_path)
        sh.rm(output_precrush_path)
        return output_file_path, noflip

    def _do_flop(self, density, (input_path, noflip)):
        if noflip:
            return
        folder_name = os.path.join(OUTPUT_PATH_PREFIX,
                                   "drawable-ldrtl-" + density)
        output_file_path = os.path.join(folder_name,
                                        os.path.basename(input_path))
        sh.mkdir("-p", folder_name)
        sh.convert(input_path, "-flop", output_file_path)
Example #54
0
def html():
    proj()
    print ". generating production html"
    sh.mkdir("-p", "dist/frame")

    blockd3_path = os.path.abspath(os.path.dirname(__file__))
    sys.path.append(blockd3_path)

    from app.app import make_app
    app = make_app("prod")

    prod_files = {
        "dist/index.html": "/dist/",
        "dist/frame/index.html": "/dist/frame/"
    }

    for prod_file, url in prod_files.items():
        print ".. writing ", prod_file
        open(prod_file, "w").write(app.test_client().get(url).data)
Example #55
0
 def setUp(self):
     """ Clone the test repo locally, then add and push a commit."""
     super(ArchiveRepositoryAuthenticatedMethodsTest, self).setUp()
     # Clone test repository localy.
     repo_origin = '[email protected]:%s/%s.git' % (self.bb.username, self.bb.repo_slug)
     # TODO : Put the temp folder on the right place for windows.
     repo_folder = os.path.join(
         '/tmp',
         'bitbucket-' + ''.join(random.choice(string.digits + string.letters) for x in range(10)))
     sh.mkdir(repo_folder)
     sh.cd(repo_folder)
     self.pwd = sh.pwd().strip()
     sh.git.init()
     sh.git.remote('add', 'origin', repo_origin)
     # Add commit with empty file.
     sh.touch('file')
     sh.git.add('.')
     sh.git.commit('-m', '"Add empty file."')
     sh.git.push('origin', 'master')
Example #56
0
File: pupil.py Project: zhoupc/ease
    def plot_traces(self, outdir='./', show=False):
        """
        Plot existing traces to output directory.

        :param outdir: destination of plots
        """
        import seaborn as sns
        import matplotlib.pyplot as plt
        plt.switch_backend('GTK3Agg')

        for key in self.fetch.keys():
            print('Processing', key)
            with sns.axes_style('ticks'):
                fig, ax = plt.subplots(3, 1, figsize=(10, 6), sharex=True)

            r, center, contrast = (TrackedVideo.Frame() & key).fetch('major_r', 'center',
                                                                     'frame_intensity', order_by='frame_id')
            ax[0].plot(r)
            ax[0].set_title('Major Radius')
            c = np.vstack([cc if cc is not None else np.NaN * np.ones(2) for cc in center])

            ax[1].plot(c[:, 0], label='x')
            ax[1].plot(c[:, 1], label='y')
            ax[1].set_title('Pupil Center Coordinates')
            ax[1].legend()

            ax[2].plot(contrast)
            ax[2].set_title('Contrast (frame std)')
            ax[2].set_xlabel('Frames')
            try:
                sh.mkdir('-p', os.path.expanduser(outdir) + '/{animal_id}/'.format(**key))
            except:
                pass

            fig.suptitle(
                'animal id {animal_id} session {session} scan_idx {scan_idx} eye quality {eye_quality}'.format(**key))
            fig.tight_layout()
            sns.despine(fig)
            fig.savefig(outdir + '/{animal_id}/AI{animal_id}SE{session}SI{scan_idx}EQ{eye_quality}.png'.format(**key))
            if show:
                plt.show()
            else:
                plt.close(fig)
Example #57
0
def extractExonBases(
    bamPath,
    tempDir,
    threads,
    picardPath,
    bedAnnoPath,
    bufferSize,
    fastqDir,
    outDir,
    cutoff,
    splitInput=True,
):
    try:
        sh.mkdir(tempDir)
    except:
        logger.warning("{tempDir} existed")

    ### split bam
    logger.info("split bam start")
    splitedDir = f"{tempDir}splitedDir/"
    splitedCounts = threads
    splitBam(bamPath, splitedDir, splitedCounts, picardPath)
    logger.info("split bam end")

    ### get overlap region
    logger.info("get overlap region and generate lmdb database start")
    tsvPath = f"{tempDir}overlapRegion.tsv"
    getOverlapInfo(splitedDir, tsvPath, threads, bedAnnoPath, bufferSize)
    logger.info("get overlap region and generate lmdb database end")

    ### parse overlap region
    logger.info("parse temp file start")
    lmdbPath = f"{tempDir}overlapRegionLmdb/"
    getUsefulRegion(tsvPath, lmdbPath, threads)
    logger.info("parse temp file end")

    ### extract seq
    logger.info("extract exon base start")
    extractSeq(fastqDir, outDir, lmdbPath, threads, splitInput, cutoff)
    logger.info("extract exon base end")

    ### start empty temp files
    sh.rm("-rf", tempDir)
Example #58
0
def main(nproc_old, model_tags, region, npts, nproc, history, time,
         old_mesh_dir, old_model_dir, output_path):
    """
    submit a job to convert the mesh bin files to the netcdf file.
    """
    # * firstly we have to make a tmp file to store the
    # temp_directory = tempfile.mkdtemp()
    temp_directory = join(dirname(output_path), "." + basename(output_path))
    sh.mkdir("-p", temp_directory)
    # * generate the ppm model
    result = ""
    # ! fix MPI cache issue
    home = expanduser("~")
    julia_path = join(home, ".julia")
    result += f"export JULIA_DEPOT_PATH={julia_path}\n"
    result += f"TMPDIR=`mktemp -d`\n"
    result += f"mkdir $TMPDIR/compiled\n"
    # if use v1.1
    result += f"rsync -au $JULIA_DEPOT_PATH/compiled/v1.1 $TMPDIR/compiled/\n"
    result += f"export JULIA_DEPOT_PATH=$TMPDIR:$JULIA_DEPOT_PATH\n"
    result += "date; \n"
    # bin2ppm npts use nlat/nlon/ndep
    nlon, nlat, ndep = npts.split("/")
    npts_bin2ppm = "/".join([nlat, nlon, ndep])
    result += bin2ppm(nproc_old, model_tags, region, npts_bin2ppm, nproc,
                      old_mesh_dir, old_model_dir, temp_directory)
    # * convert to netcdf model
    # region in bin2ppm: lon1/lat1/lon2/lat2/dep1/dep2; region in ppm2netcdf: minlon/maxlon/minlat/maxlat/mindep/maxdep
    # we can treat 1 as the min, 2 as the max
    result += "date; \n"
    minlon, minlat, maxlon, maxlat, mindep, maxdep = region.split("/")
    region_ppm2netcdf = "/".join(
        [minlon, maxlon, minlat, maxlat, mindep, maxdep])
    # here the value of nproc should be latnproc*lonnproc
    latnproc, lonnproc = map(int, nproc.split("/"))
    nproc_ppm2netcdf = latnproc * lonnproc
    # ! we should change npts from nlon/nlat/ndep to latnpts/lonnpts/vnpts
    result += ppm2netcdf(temp_directory, region_ppm2netcdf, npts,
                         nproc_ppm2netcdf, model_tags, output_path, history)
    result += "date; \n"
    # * submit the job
    submit_job("bin2netcdf", result, None, nproc_ppm2netcdf, None, time, None,
               "icer_flexiable")
Example #59
0
def extract_sac(data, ctable, processes):
    """
    extract the downloaded data to the sac and pz format.
    """
    # * get dirname
    thedir = dirname(data)
    # * mkdir for the sac and pz files
    sh.mkdir("-p", join(thedir, "SAC"))
    sh.mkdir("-p", join(thedir, "PZ"))
    # * extract sac
    win32.extract_sac(data,
                      ctable,
                      with_pz=False,
                      outdir=join(thedir, "SAC"),
                      processes=processes)
    # * extract pz
    win32.extract_pz(ctable, outdir=join(thedir, "PZ"))
    # * return the sac and pz directory path
    return join(thedir, "SAC"), join(thedir, "PZ")
Example #60
0
def get_files():
    '''
	Obtain data files for skipjack as listed at http://www.iotc.org/English/meetings/wp/wpttcurrent.php
	'''

    # Make data directory if it does not yet exist
    sh.mkdir('source-data', p=True)

    # Retreive various files
    base_url = 'http://www.iotc.org/files/proceedings/2013/wptt/'
    for filename in (
            'CAT_TROP13.pdf',  # Data catalogue
            'NCTROP.zip',  # Catch data sources and quality information
            'CEALL.zip',  # All catch and effort data zipped up
            'CE_Reference.zip',  # Information on catch and effort files
            'FL_SKJ.zip',
            'SF_Reference.zip',
    ):
        urllib.urlretrieve(base_url + filename, 'source-data/' + filename)