コード例 #1
1
ファイル: package.py プロジェクト: toxinu/ubik
    def remove(self, ignore_errors=False):
        # Think about ignore_errors
        stream_logger.info("   - %s" % self.name)

        # If archive not already extract
        if not os.path.exists("%s/%s" % (conf.get("settings", "cache"), self.name)):
            self.unarchive()

        self.import_control()
        # Pre Remove
        stream_logger.info("     | Pre Remove")
        self.control.pre_remove()

        # Remove
        stream_logger.info("     | Remove")
        files_list = open(os.path.join(conf.get("settings", "cache"), self.name, "files.lst")).readlines()
        for _file in files_list:
            try:
                os.remove(os.path.join(conf.get("settings", "packages"), _file.replace("\n", "")))
            except:
                pass
        # Post Remove
        stream_logger.info("     | Post Remove")
        self.control.post_remove()

        stream_logger.info("     | Clean")
        shutil.rmtree(os.path.join(conf.get("settings", "cache"), self.name))
コード例 #2
0
ファイル: build.py プロジェクト: teto/ycmd
def BuildYcmdLibs( args ):
  build_dir = mkdtemp( prefix = 'ycm_build.' )

  try:
    full_cmake_args = [ '-G', GetGenerator( args ) ]
    if OnMac():
      full_cmake_args.extend( CustomPythonCmakeArgs() )
    full_cmake_args.extend( GetCmakeArgs( args ) )
    full_cmake_args.append( p.join( DIR_OF_THIS_SCRIPT, 'cpp' ) )

    os.chdir( build_dir )
    subprocess.check_call( [ 'cmake' ] + full_cmake_args )

    build_target = ( 'ycm_support_libs' if 'YCM_TESTRUN' not in os.environ else
                     'ycm_core_tests' )

    build_command = [ 'cmake', '--build', '.', '--target', build_target ]
    if OnWindows():
      build_command.extend( [ '--config', 'Release' ] )
    else:
      build_command.extend( [ '--', '-j', str( NumCores() ) ] )

    subprocess.check_call( build_command )

    if 'YCM_TESTRUN' in os.environ:
      RunYcmdTests( build_dir )
  finally:
    os.chdir( DIR_OF_THIS_SCRIPT )
    rmtree( build_dir, ignore_errors = OnTravisOrAppVeyor() )
コード例 #3
0
ファイル: default.py プロジェクト: markbree/web2py
def delete_plugin():
    """ Object delete handler """
    app=request.args(0)
    plugin = request.args(1)
    plugin_name='plugin_'+plugin
    if 'nodelete' in request.vars:
        redirect(URL('design',args=app))
    elif 'delete' in request.vars:
        try:
            for folder in ['models','views','controllers','static','modules']:
                path=os.path.join(apath(app,r=request),folder)
                for item in os.listdir(path):
                    if item.rsplit('.',1)[0] == plugin_name:
                        filename=os.path.join(path,item)
                        if os.path.isdir(filename):
                            shutil.rmtree(filename)
                        else:
                            os.unlink(filename)
            session.flash = T('plugin "%(plugin)s" deleted',
                              dict(plugin=plugin))
        except Exception:
            session.flash = T('unable to delete file plugin "%(plugin)s"',
                              dict(plugin=plugin))
        redirect(URL('design',args=request.args(0)))
    return dict(plugin=plugin)
コード例 #4
0
ファイル: gtsrb.py プロジェクト: MartinThoma/algorithms
def _maybe_extract(fpath, dirname, descend=True):
    path = os.path.dirname(fpath)
    untar_fpath = os.path.join(path, dirname)
    if not os.path.exists(untar_fpath):
        print('Extracting contents of "{}"...'.format(dirname))
        tfile = zipfile.ZipFile(fpath, 'r')
        try:
            tfile.extractall(untar_fpath)
        except (Exception, KeyboardInterrupt) as e:
            if os.path.exists(untar_fpath):
                if os.path.isfile(untar_fpath):
                    os.remove(untar_fpath)
                else:
                    shutil.rmtree(untar_fpath)
            raise
        tfile.close()
    if descend:
        dirs = [os.path.join(untar_fpath, o)
                for o in os.listdir(untar_fpath)
                if os.path.isdir(os.path.join(untar_fpath, o))]
        if len(dirs) != 1:
            print("Error, found not exactly one dir: {}".format(dirs))
            sys.exit(-1)
        return dirs[0]
    else:
        return untar_fpath
コード例 #5
0
ファイル: kickstart.py プロジェクト: rlayte/kickstart
    def copy_template():
        config_prompt(template)
        shutil.copytree(template, name)

        if os.path.exists('%s/%s' % (name, 'config.yaml')):
            os.remove('%s/%s' % (name, 'config.yaml'))

        for dirname, dirnames, files in os.walk(name):
            for d in dirnames:
                if d == options.template:
                    shutil.copytree('%s/%s' % (dirname, d), '%s/%s' % (dirname, name))
                    shutil.rmtree('%s/%s' % (dirname, d))

        for dirname, dirnames, files in os.walk(name):
            for filename in files:
                f = open('%s/%s' % (dirname, filename), 'r')
                lines = f.readlines()
                f.close()

                first_pass = [re.sub('{{\s*(\w+)\s*}}', replace_variable, line) for line in lines]
                new_lines = [re.sub('__config_(\w+)__', replace_variable, line) for line in first_pass]

                f = open('%s/%s' % (dirname, filename), 'w')
                f.write(''.join(new_lines))
                f.close()
コード例 #6
0
ファイル: test_util_hg.py プロジェクト: bdacode/build-tools
    def testShareExtraFiles(self):
        shareBase = os.path.join(self.tmpdir, 'share')
        backup = os.path.join(self.tmpdir, 'backup')

        # Clone the original repo
        mercurial(self.repodir, self.wc, shareBase=shareBase)

        clone(self.repodir, backup)

        # Make the working repo have a new file. We need it to have an earlier
        # timestamp to trigger the odd behavior in hg, so use '-d yesterday'
        run_cmd(['touch', '-d', 'yesterday', 'newfile'], cwd=self.wc)
        run_cmd(['hg', 'add', 'newfile'], cwd=self.wc)
        run_cmd(['hg', 'commit', '-m', '"add newfile"'], cwd=self.wc)

        # Reset the share base to remove the 'add newfile' commit. We
        # overwrite repodir with the backup that doesn't have the commit,
        # then clone the repodir to a throwaway dir to create the new
        # shareBase. Now self.wc still points to shareBase, but the
        # changeset that self.wc was on is lost.
        shutil.rmtree(self.repodir)
        shutil.rmtree(shareBase)
        clone(backup, self.repodir)
        throwaway = os.path.join(self.tmpdir, 'throwaway')
        mercurial(self.repodir, throwaway, shareBase=shareBase)

        # Try and update our working copy
        mercurial(self.repodir, self.wc, shareBase=shareBase)

        self.assertFalse(os.path.exists(os.path.join(self.wc, 'newfile')))
コード例 #7
0
 def tearDown(self):
     try:
         shutil.rmtree(FLAGS.volumes_dir)
     except OSError:
         pass
     notifier_api._reset_drivers()
     super(VolumeTestCase, self).tearDown()
コード例 #8
0
ファイル: create-launcher.py プロジェクト: drmrboy/gpodder
def build_launcher(out_path, icon_path, file_desc, product_name, product_version,
                   company_name, entry_point, is_gui):

    src_ico = os.path.abspath(icon_path)
    target = os.path.abspath(out_path)

    file_version = product_version

    dir_ = os.getcwd()
    temp = tempfile.mkdtemp()
    try:
        os.chdir(temp)
        with open("launcher.c", "w") as h:
            h.write(get_launcher_code(entry_point))
        shutil.copyfile(src_ico, "launcher.ico")
        with open("launcher.rc", "w") as h:
            h.write(get_resource_code(
                os.path.basename(target), file_version, file_desc,
                "launcher.ico", product_name, product_version, company_name))

        build_resource("launcher.rc", "launcher.res")
        build_exe("launcher.c", "launcher.res", is_gui, target)
    finally:
        os.chdir(dir_)
        shutil.rmtree(temp)
コード例 #9
0
ファイル: ez_setup.py プロジェクト: BruceZu/compass-core
def _build_egg(egg, tarball, to_dir):
    """build egg."""
    # extracting the tarball
    tmpdir = tempfile.mkdtemp()
    log.warn('Extracting in %s', tmpdir)
    old_wd = os.getcwd()
    try:
        os.chdir(tmpdir)
        tar = tarfile.open(tarball)
        _extractall(tar)
        tar.close()

        # going in the directory
        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
        os.chdir(subdir)
        log.warn('Now working in %s', subdir)

        # building an egg
        log.warn('Building a Setuptools egg in %s', to_dir)
        _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)

    finally:
        os.chdir(old_wd)
        shutil.rmtree(tmpdir)

    # returning the result
    log.warn(egg)
    if not os.path.exists(egg):
        raise IOError('Could not build the egg.')
コード例 #10
0
    def test_no_setup_cfg(self):
        # makes sure easy_install as a command (main)
        # doesn't use a setup.cfg file that is located
        # in the current working directory
        dir = tempfile.mkdtemp()
        setup_cfg = open(os.path.join(dir, 'setup.cfg'), 'w')
        setup_cfg.write('[easy_install]\nfind_links = http://example.com')
        setup_cfg.close()
        setup_py = open(os.path.join(dir, 'setup.py'), 'w')
        setup_py.write(SETUP_PY)
        setup_py.close()

        from setuptools.dist import Distribution

        def _parse_command_line(self):
            msg = 'Error: a local setup.cfg was used'
            opts = self.command_options
            if 'easy_install' in opts:
                assert 'find_links' not in opts['easy_install'], msg
            return self._old_parse_command_line()

        Distribution._old_parse_command_line = Distribution.parse_command_line
        Distribution.parse_command_line = _parse_command_line

        old_wd = os.getcwd()
        try:
            os.chdir(dir)
            reset_setup_stop_context(
                lambda: self.assertRaises(SystemExit, main, [])
            )
        finally:
            os.chdir(old_wd)
            shutil.rmtree(dir)
            Distribution.parse_command_line = Distribution._old_parse_command_line
コード例 #11
0
def newprovision(names, session, smbconf, provdir, logger):
    """Create a new provision.

    This provision will be the reference for knowing what has changed in the
    since the latest upgrade in the current provision

    :param names: List of provision parameters
    :param creds: Credentials for the authentification
    :param session: Session object
    :param smbconf: Path to the smb.conf file
    :param provdir: Directory where the provision will be stored
    :param logger: A Logger
    """
    if os.path.isdir(provdir):
        shutil.rmtree(provdir)
    os.mkdir(provdir)
    logger.info("Provision stored in %s", provdir)
    return provision(logger, session, smbconf=smbconf,
            targetdir=provdir, samdb_fill=FILL_FULL, realm=names.realm,
            domain=names.domain, domainguid=names.domainguid,
            domainsid=names.domainsid, ntdsguid=names.ntdsguid,
            policyguid=names.policyid, policyguid_dc=names.policyid_dc,
            hostname=names.netbiosname.lower(), hostip=None, hostip6=None,
            invocationid=names.invocation, adminpass=names.adminpass,
            krbtgtpass=None, machinepass=None, dnspass=None, root=None,
            nobody=None, users=None,
            serverrole="domain controller",
            backend_type=None, ldapadminpass=None, ol_mmr_urls=None,
            slapd_path=None,
            dom_for_fun_level=names.domainlevel, dns_backend=names.dns_backend,
            useeadb=True, use_ntvfs=True)
コード例 #12
0
ファイル: ez_setup.py プロジェクト: BruceZu/compass-core
def _install(tarball, install_args=()):
    """install tarball."""
    # extracting the tarball
    tmpdir = tempfile.mkdtemp()
    log.warn('Extracting in %s', tmpdir)
    old_wd = os.getcwd()
    try:
        os.chdir(tmpdir)
        tar = tarfile.open(tarball)
        _extractall(tar)
        tar.close()

        # going in the directory
        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
        os.chdir(subdir)
        log.warn('Now working in %s', subdir)

        # installing
        log.warn('Installing Setuptools')
        if not _python_cmd('setup.py', 'install', *install_args):
            log.warn('Something went wrong during the installation.')
            log.warn('See the error message above.')
            # exitcode will be 2
            return 2

    finally:
        os.chdir(old_wd)
        shutil.rmtree(tmpdir)
コード例 #13
0
ファイル: commands.py プロジェクト: 1901/sublime_sync
    def copy_linter(self, name):
        """Copy the template linter to a new linter with the given name."""

        self.name = name
        self.fullname = 'SublimeLinter-contrib-{}'.format(name)
        self.dest = os.path.join(sublime.packages_path(), self.fullname)

        if os.path.exists(self.dest):
            sublime.error_message('The plugin “{}” already exists.'.format(self.fullname))
            return

        src = os.path.join(sublime.packages_path(), persist.PLUGIN_DIRECTORY, 'linter-plugin-template')
        self.temp_dir = None

        try:
            self.temp_dir = tempfile.mkdtemp()
            self.temp_dest = os.path.join(self.temp_dir, self.fullname)
            shutil.copytree(src, self.temp_dest)

            self.get_linter_language(name, self.configure_linter)

        except Exception as ex:
            if self.temp_dir and os.path.exists(self.temp_dir):
                shutil.rmtree(self.temp_dir)

            sublime.error_message('An error occurred while copying the template plugin: {}'.format(str(ex)))
コード例 #14
0
ファイル: commands.py プロジェクト: 1901/sublime_sync
    def configure_linter(self, language):
        """Fill out the template and move the linter into Packages."""

        try:
            if language is None:
                return

            if not self.fill_template(self.temp_dir, self.name, self.fullname, language):
                return

            git = util.which('git')

            if git:
                subprocess.call((git, 'init', self.temp_dest))

            shutil.move(self.temp_dest, self.dest)

            util.open_directory(self.dest)
            self.wait_for_open(self.dest)

        except Exception as ex:
            sublime.error_message('An error occurred while configuring the plugin: {}'.format(str(ex)))

        finally:
            if self.temp_dir and os.path.exists(self.temp_dir):
                shutil.rmtree(self.temp_dir)
コード例 #15
0
ファイル: standalone3.py プロジェクト: pramttl/tstl
def cleanupFiles():
    # First get rid of modified files
    for l in ["l1", "l2", "l3"]:
        arcpy.Delete_management(l)

    for f in glob.glob("C:\\Arctmp\\*"):
        try:
            shutil.rmtree(f)
        except:
            print "UNABLE TO REMOVE:", f
    # Now remove the old directory
    for i in xrange(0, 1000000):
        new_workspace = "C:\\Arctmp\\workspace." + str(i)
        if not os.path.exists(new_workspace):
            break
    print "TESTING USING WORKSPACE", new_workspace
    # Now move in fresh copies
    shutil.copytree("C:\\Arcbase", new_workspace)
    print "CONTENTS:"
    arcpy.env.workspace = new_workspace
    for f in sorted(glob.glob(arcpy.env.workspace + "\\*.shp")):
        print f
    for f in sorted(glob.glob(arcpy.env.workspace + "\\*.lyr")):
        print f
    for f in sorted(glob.glob(arcpy.env.workspace + "\\*.gdb")):
        print f
コード例 #16
0
def buildComponent(component, componentDir=None):
    """ Build the component. Return a pair of paths
        (pathToBinary, pathToXPTFile)"""
    logger = build_util.getLogger('build_components')
    # Save current working directory to set it back later
    prevDir = os.getcwd()
    # Component Directory
    if componentDir is None:
        componentDir = os.path.join(Settings.prefs.src_dir, "components", component)

    os.chdir(componentDir)
    logger.info("Making build and bin dirs for component %s"%component)
    buildDir = os.path.join(componentDir, "build")
    binDir = os.path.join(componentDir, "bin")
    
    for dir in [buildDir, binDir]:
        try:
            os.mkdir(dir)
        except OSError, err:
            if err.errno == errno.EEXIST:
                logger.warning("Couldn't make %s because it exists."%dir)
                logger.warning("Deleting %s"%dir)
                shutil.rmtree(dir)
                logger.warning("Trying to make %s again"%dir)
                os.mkdir(dir)
            else:
                raise
コード例 #17
0
ファイル: setup.py プロジェクト: dhirajkhatiwada1/uludag
def remove(path):
    if os.path.exists(path):
        print ' removing: ', path
        if os.path.isdir(path):
            shutil.rmtree(path)
        else:
            os.unlink(path)
コード例 #18
0
 def test_compare_values_border_cases(self):
     """tests the condition where BSR values are near the border regions
     differentiated by the function"""
     tdir = tempfile.mkdtemp(prefix="filetest_",)
     fpath = os.path.join(tdir,"group1_pruned")
     fp = open(fpath, "w")
     fp.write("		E2348_69_all\n")
     fp.write("IpaH3 	0.03\n")
     fp.write("LT 	0.00\n")
     fp.write("ST2 	0.00\n")
     fp.write("bfpB 	0.81\n")
     fp.write("stx2a 	0.07")
     fp.close()
     npath = os.path.join(tdir,"group2_pruned")
     np = open(npath, "w")
     np.write("        H10407_all\n")
     np.write("IpaH3   0.03\n")
     np.write("LT      0.80\n")
     np.write("ST2     1.00\n")
     np.write("bfpB    0.00\n")
     np.write("stx2a   0.79")
     np.close()
     self.assertEqual(compare_values(fpath, npath, "0.8", "0.4"), ([0.81], [0.80, 1.00], [0.03, 0.0, 0.0, 0.81, 0.07]))
     shutil.rmtree(tdir)
     os.system("rm group*_out.txt")
コード例 #19
0
 def do_deploy():
     def _run_ansible():
         subprocess.check_call(['ansible-playbook', 'masters.yml'],
                               cwd='result/{}'.format(grid_name))
     parent_deployment._status = 'masters_provision_deploying'
     provision_deployment._status = 'masters_deploying'
     provision_deployment.save()
     parent_deployment.save()
     provision_generator = provision_generators[grid.type][
         grid.provider](grid_name, **kwargs)
     provision_generator.generate_all(
         grid_name, infrastructure_deployment._accessip)
     try:
         _run_ansible()
     except:
         provision_deployment._status = 'masters_deploy_failed'
         parent_deployment._status = 'masters_provision_deploy_failed'
     else:
         provision_deployment._status = 'masters_deployed'
         parent_deployment._status = 'masters_provision_finished'
     finally:
         provision_deployment.save()
         self.unlock(parent_deployment)
         parent_deployment.save()
         os.chdir(cwd)
         try:
             del os.environ['AWS_ACCESS_KEY_ID']
         except:
             print('no such env')
         try:
             del os.environ['AWS_SECRET_ACCESS_KEY']
         except:
             print('no such env')
         shutil.rmtree('result/{}'.format(grid_name))
コード例 #20
0
def test_run_multiproc_nondaemon_with_flag(nondaemon_flag):
    '''
    Start a pipe with two nodes using the multiproc plugin and passing the nondaemon_flag.
    '''
    
    cur_dir = os.getcwd()
    temp_dir = mkdtemp(prefix='test_engine_')
    os.chdir(temp_dir)
    
    pipe = pe.Workflow(name='pipe')
    
    f1 = pe.Node(interface=Function(function=TestInterface.testFunction, input_names=['sum'], output_names=['sum_out']), name='f1')
    f2 = pe.Node(interface=Function(function=TestInterface.testFunction, input_names=['sum'], output_names=['sum_out']), name='f2')

    pipe.connect([(f1,f2,[('sum_out','sum')])])
    pipe.base_dir = os.getcwd()
    f1.inputs.sum = 0
    
    # execute the pipe using the MultiProc plugin with 2 processes and the non_daemon flag
    # to enable child processes which start other multiprocessing jobs
    execgraph = pipe.run(plugin="MultiProc", plugin_args={'n_procs':2, 'non_daemon':nondaemon_flag})
    
    names = ['.'.join((node._hierarchy,node.name)) for node in execgraph.nodes()]
    node = execgraph.nodes()[names.index('pipe.f2')]
    result = node.get_output('sum_out')
    yield assert_equal, result, 180 # n_procs (2) * numberOfThreads (2) * 45 == 180
    os.chdir(cur_dir)
    rmtree(temp_dir)
コード例 #21
0
ファイル: test_sqlite.py プロジェクト: aa-savelyev/OpenMDAO
 def tearDown(self):
     try:
         rmtree(self.dir)
     except OSError as e:
         # If directory already deleted, keep going
         if e.errno not in (errno.ENOENT, errno.EACCES, errno.EPERM):
             raise e
コード例 #22
0
ファイル: util.py プロジェクト: iakinsey/illume
def remove_or_ignore_dir(path):
    """Remove a directory if it exists."""
    try:
        rmtree(path)
    except OSError as e:
        if e.errno != ENOENT:
            raise
コード例 #23
0
    def test_build_debian(self):
        from . import pkgbuild

        tmpdir = tempfile.mkdtemp()
        try:
            source = PackageSource.objects.get(id=1)
            br = BuildRecord(source=source, build_counter=10, sha='e65b55054c5220321c56bb3dfa96fbe5199f329c')
            br.save()

            basedir = os.path.join(tmpdir, 'd')
            shutil.copytree(os.path.join(os.path.dirname(__file__), 'test_data', 'debian'), basedir)

            orig_stdout = sys.stdout
            sys.stdout = StringIO()
            try:
                pkgbuild.main(['--basedir', basedir, 'version', self.live_server_url + br.get_absolute_url()])
                self.assertEquals(sys.stdout.getvalue(), '0.1+10')
                sys.stdout = StringIO()

                pkgbuild.main(['--basedir', basedir, 'name', self.live_server_url + br.get_absolute_url()])
                self.assertEquals(sys.stdout.getvalue(), 'buildsvctest')

                pkgbuild.main(['--basedir', basedir, 'build', self.live_server_url + br.get_absolute_url()])
            finally:
                sys.stdout = orig_stdout

            self.assertTrue(os.path.exists(os.path.join(basedir, 'buildsvctest_0.1+10_source.changes')))
            self.assertTrue(os.path.exists(os.path.join(basedir, 'buildsvctest_0.1+10_amd64.changes')))
        finally:
            shutil.rmtree(tmpdir)
コード例 #24
0
ファイル: main.py プロジェクト: trustedanalytics/apployer
def _download_artifacts_from_url(url, appstack):
    """
    Does the neccessary things to download applications artifacts.

    Args:
        url:    address of artifacts server, i.e. http://artifacts.com/download/{name}
                where 'name' param is dynamically replaced to app name
        appstack: path to appstack file
    """
    if os.path.exists(DEFAULT_ARTIFACTS_PATH):
        shutil.rmtree(DEFAULT_ARTIFACTS_PATH, ignore_errors=True)
    os.makedirs(DEFAULT_ARTIFACTS_PATH)

    with open(appstack) as appstack_file:
        appstack_dict = yaml.load(appstack_file)

    artifacts_names = set()
    for app in appstack_dict['apps']:
        artifacts_names.add(app.get('artifact_name', app.get('name')))

    for artifact_name in artifacts_names:
        artifact_url = url.format(name=artifact_name)
        _log.info('Downloading artifact for %s app from %s', artifact_name, artifact_url)
        proc = subprocess.Popen(['wget', '--no-check-certificate', '--content-disposition',
                                 '-nv', artifact_url], cwd=DEFAULT_ARTIFACTS_PATH)
        if proc.wait() != 0:
            _log.error('Error during download! wget output:\n%s', proc.stdout.read())
コード例 #25
0
ファイル: test_client.py プロジェクト: e-heller/hdfs
  def test_upload_cleanup(self):
    dpath = mkdtemp()
    _write = self.client.write

    def write(hdfs_path, *args, **kwargs):
      if 'bar' in hdfs_path:
        raise RuntimeError()
      return _write(hdfs_path, *args, **kwargs)

    try:
      self.client.write = write
      npath = osp.join(dpath, 'hi')
      os.mkdir(npath)
      with open(osp.join(npath, 'foo'), 'w') as writer:
        writer.write('hello!')
      os.mkdir(osp.join(npath, 'bar'))
      with open(osp.join(npath, 'bar', 'baz'), 'w') as writer:
        writer.write('world!')
      try:
        self.client.upload('foo', dpath)
      except RuntimeError:
        ok_(not self._exists('foo'))
      else:
        ok_(False) # This shouldn't happen.
    finally:
      rmtree(dpath)
      self.client.write = _write
コード例 #26
0
ファイル: test_client.py プロジェクト: e-heller/hdfs
  def test_upload_with_progress(self):

    def callback(path, nbytes, history=defaultdict(list)):
      history[path].append(nbytes)
      return history

    dpath = mkdtemp()
    try:
      path1 = osp.join(dpath, 'foo')
      with open(path1, 'w') as writer:
        writer.write('hello!')
      os.mkdir(osp.join(dpath, 'bar'))
      path2 = osp.join(dpath, 'bar', 'baz')
      with open(path2, 'w') as writer:
        writer.write('the world!')
      self.client.upload(
        'up',
        dpath,
        chunk_size=4,
        n_threads=1, # Callback isn't thread-safe.
        progress=callback
      )
      eq_(self._read('up/foo'), b'hello!')
      eq_(self._read('up/bar/baz'), b'the world!')
      eq_(
        callback('', 0),
        {path1: [4, 6, -1], path2: [4, 8, 10, -1], '': [0]}
      )
    finally:
      rmtree(dpath)
コード例 #27
0
ファイル: test_cli.py プロジェクト: heuer/segno
def test_sequence_output():
    directory = tempfile.mkdtemp()
    assert 0 == len(os.listdir(directory))
    cli.main(['--seq', '-v=1', '-e=m', '-o=' + os.path.join(directory, 'test.svg'), 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'])
    number_of_files = len(os.listdir(directory))
    shutil.rmtree(directory)
    assert 4 == number_of_files
コード例 #28
0
ファイル: createmsi.py プロジェクト: MathieuDuponchelle/meson
    def build_dist(self):
        for sdir in self.staging_dirs:
            if os.path.exists(sdir):
                shutil.rmtree(sdir)
        main_stage, ninja_stage = self.staging_dirs
        modules = [os.path.splitext(os.path.split(x)[1])[0] for x in glob(os.path.join('mesonbuild/modules/*'))]
        modules = ['mesonbuild.modules.' + x for x in modules if not x.startswith('_')]
        modules += ['distutils.version']
        modulestr = ','.join(modules)
        python = shutil.which('python')
        cxfreeze = os.path.join(os.path.dirname(python), "Scripts", "cxfreeze")
        if not os.path.isfile(cxfreeze):
            print("ERROR: This script requires cx_freeze module")
            sys.exit(1)

        subprocess.check_call([python,
                               cxfreeze,
                               '--target-dir',
                               main_stage,
                               '--include-modules',
                               modulestr,
                               'meson.py'])
        if not os.path.exists(os.path.join(main_stage, 'meson.exe')):
            sys.exit('Meson exe missing from staging dir.')
        os.mkdir(ninja_stage)
        shutil.copy(shutil.which('ninja'), ninja_stage)
        if not os.path.exists(os.path.join(ninja_stage, 'ninja.exe')):
            sys.exit('Ninja exe missing from staging dir.')
コード例 #29
0
ファイル: prettyprint.py プロジェクト: briankflau/nbdime
def present_string_diff(a, di, path):
    "Pretty-print a nbdime diff."
    header = ["patch {}:".format(path)]

    if _base64.match(a):
        return header + ['<base64 data changed>']

    b = patch(a, di)
    td = tempfile.mkdtemp()
    cmd = None
    try:
        with open(os.path.join(td, 'before'), 'w') as f:
            f.write(a)
        with open(os.path.join(td, 'after'), 'w') as f:
            f.write(b)
        if which('git'):
            cmd = _git_diff_print_cmd.split()
            heading_lines = 4
        elif which('diff'):
            cmd = ['diff']
            heading_lines = 0
        else:
            dif = ''.join(unified_diff(a.split("\n"),
                                       b.split("\n")))
            heading_lines = 2

        if cmd is not None:
            p = Popen(cmd + ['before', 'after'], cwd=td, stdout=PIPE)
            out, _ = p.communicate()
            dif = out.decode('utf8')

    finally:
        shutil.rmtree(td)
    return header + dif.splitlines()[heading_lines:]
コード例 #30
0
async def handle_ytdl_file_download(e: MessageLike):
    # ytdldfile | format_id | sender_id | suid | is_audio

    data = e.data.decode("UTF-8")
    data = data.split("|")

    if data[2] != str(e.sender_id):
        await e.answer("Not valid user, Dont touch.")
        return
    else:
        await e.answer("Crunching Data.....")

    await e.edit(buttons=None)

    is_audio = False

    path = os.path.join(os.getcwd(), 'userdata', data[3] + ".json")
    if os.path.exists(path):
        with open(path, encoding="UTF-8") as file:
            ytdata = json.loads(file.read())
            yt_url = ytdata.get("webpage_url")
            thumb_path = await get_max_thumb(ytdata, data[3])

            op_dir = os.path.join(os.getcwd(), 'userdata', data[3])
            if not os.path.exists(op_dir):
                os.mkdir(op_dir)
            if data[1].startswith("xxother"):
                data[1] = data[1].replace("xxother", "")
                data[1] = int(data[1])
                j = 0
                for i in ytdata.get("formats"):
                    if j == data[1]:
                        data[1] = i.get("format_id")
                    j += 1
            else:
                for i in ytdata.get("formats"):
                    if i.get("format_id") == data[1]:
                        if i.get("acodec") is not None:
                            is_audio = True

            if data[1].endswith("K"):
                cmd = f"youtube-dl -i --extract-audio --add-metadata --audio-format mp3 --audio-quality {data[1]} -o '{op_dir}/%(title)s.%(ext)s' {yt_url}"

            else:
                if is_audio:
                    cmd = f"youtube-dl --continue --embed-subs --no-warnings --hls-prefer-ffmpeg --prefer-ffmpeg -f {data[1]} -o {op_dir}/%(title)s.%(ext)s {yt_url}"
                else:
                    cmd = f"youtube-dl --continue --embed-subs --no-warnings --hls-prefer-ffmpeg --prefer-ffmpeg -f {data[1]}+bestaudio[ext=m4a]/best -o {op_dir}/%(title)s.%(ext)s {yt_url}"

            out, err = await cli_call(cmd)

            if not err:

                # TODO Fix the original thumbnail
                # rdict = await upload_handel(op_dir,await e.get_message(),e.sender_id,dict(),thumb_path=thumb_path)

                rdict = await upload_handel(op_dir,
                                            await e.get_message(),
                                            e.sender_id,
                                            dict(),
                                            user_msg=e)
                await print_files(e, rdict)

                shutil.rmtree(op_dir)
                os.remove(thumb_path)
                os.remove(path)
            else:
                torlog.error(err)
                omess = await e.get_message()
                omess1 = await omess.get_reply_message()
                if "HTTP Error 429" in err:
                    emsg = "HTTP Error 429: Too many requests try after a while."
                else:
                    emsg = "An error has occured trying to upload any files that are found here."
                await omess.edit(emsg)
                if omess1 is None:
                    await omess.respond(emsg)
                else:
                    await omess1.reply(emsg)

                rdict = await upload_handel(op_dir,
                                            await e.get_message(),
                                            e.sender_id,
                                            dict(),
                                            user_msg=e)
                await print_files(e, rdict)

                try:
                    shutil.rmtree(op_dir)
                    os.remove(thumb_path)
                    os.remove(path)
                except:
                    pass

    else:
        await e.delete()
        await e.answer("Try again something went wrong.", alert=True)
        await e.delete()
コード例 #31
0
ファイル: test_vcs.py プロジェクト: karelyatin/tripleo-common
 def tearDown(self):
     super(GitCleanupActionTest, self).tearDown()
     if os.path.exists(self.temp_test_dir):
         shutil.rmtree(self.temp_test_dir)
コード例 #32
0
 def test_zip_ssmLT(self):
     # zipping a directory containing a ssmLT.xml file
     dtemp = os.path.join(tempfile.mkdtemp(), 'inp')
     shutil.copytree(os.path.dirname(case_21.__file__), dtemp)
     zip_cmd(dtemp)
     shutil.rmtree(dtemp)
コード例 #33
0
 def tearDown(self):
     shutil.rmtree("tests/saved-models/")
コード例 #34
0
 def tearDown(self):
     shutil.rmtree(self.temp_dir)
コード例 #35
0
def handleArchiveDarwin(pArgs):
    originalDir = os.getcwd() # This will be the destination

    pArgs.arCmd.append(pArgs.inputFile)

    # Make temporary directory to extract objects to
    tempDir = ''
    bitCodeFiles = []

    try:


        tempDir = tempfile.mkdtemp(suffix='wllvm')
        os.chdir(tempDir)

        # Extract objects from archive
        try:
            arP = Popen(pArgs.arCmd)
        except OSError as e:
            if e.errno == 2:
                errorMsg = 'Your ar does not seem to be easy to find.\n'
            else:
                errorMsg = f'OS error({e.errno}): {e.strerror}'
            _logger.error(errorMsg)
            raise Exception(errorMsg) from e

        arPE = arP.wait()

        if arPE != 0:
            errorMsg = f'Failed to execute archiver with command {pArgs.arCmd}'
            _logger.error(errorMsg)
            raise Exception(errorMsg)

        _logger.debug(2)

        # Iterate over objects and examine their bitcode inserts
        for (root, _, files) in os.walk(tempDir):
            _logger.debug('Exploring "%s"', root)
            for f in files:
                fPath = os.path.join(root, f)
                if FileType.getFileType(fPath) == pArgs.fileType:

                    # Extract bitcode locations from object
                    contents = pArgs.extractor(fPath)

                    for bcFile in contents:
                        if bcFile != '':
                            if not os.path.exists(bcFile):
                                _logger.warning('%s lists bitcode library "%s" but it could not be found', f, bcFile)
                            else:
                                bitCodeFiles.append(bcFile)
                else:
                    _logger.info('Ignoring file "%s" in archive', f)

        _logger.info('Found the following bitcode file names to build bitcode archive:\n%s', pprint.pformat(bitCodeFiles))

    finally:
        # Delete the temporary folder
        _logger.debug('Deleting temporary folder "%s"', tempDir)
        shutil.rmtree(tempDir)

    #write the manifest file if asked for
    if pArgs.manifestFlag:
        manifestFile = f'{pArgs.inputFile}.llvm.manifest'
        with open(manifestFile, 'w') as output:
            for f in bitCodeFiles:
                output.write(f'{f}\n')

    # Build bitcode archive
    os.chdir(originalDir)

    return buildArchive(pArgs, bitCodeFiles)
コード例 #36
0
import subprocess, os, sys, shutil, glob, timeit

tic = timeit.default_timer()

# Wipe contents of output folder 
# This script is running inside the docker container, so this should be allowed
for g in glob.glob('/output/*'):
    if os.path.isdir(g):
        shutil.rmtree(g)
    else:
        os.remove(g)

# Collect the list of tags to be run
all_tags = []
if os.path.exists('/REFPROP/.just_these_tags'):
    all_tags = [line.strip() for line in open('/REFPROP/.just_these_tags').readlines() if line]
elif '--catchargs' in sys.argv:
    iargs = sys.argv.index('--catchargs')
    args = sys.argv[iargs+1]
    all_tags = args.split(',')
else:
    output = subprocess.run('/REFPROP-tests/build/main -t', shell = True, stdout = subprocess.PIPE).stdout.decode('utf-8')
    for il, line in enumerate(output.split('\n')[1::]):
        if not line or '[' not in line: continue
        tag = '[' + line.split('[')[1]
        if 'veryslow' in tag: continue # Don't run the veryslow tests
    #    if 'predef_mix' not in tag: continue
        all_tags.append(tag)

tag_times = {}
for tag in all_tags:
コード例 #37
0
ファイル: grass_engine.py プロジェクト: yuzhou2013/WebODM
 def __del__(self):
     pass
     # Cleanup
     if os.path.exists(self.get_cwd()):
         shutil.rmtree(self.get_cwd())
コード例 #38
0
ファイル: base.py プロジェクト: viewpnt1/gamification-engine
 def tearDown(self):
     self.db.stop()
     shutil.rmtree("/tmp/test_pgdata")
コード例 #39
0
 def clean(self):
     """
     Deletes all files and directories in the temporary directory.
     """
     shutil.rmtree(self.temp_dir)
コード例 #40
0
ファイル: logcat_test.py プロジェクト: vijaiks/mobly
 def tearDown(self):
     """Removes the temp dir.
     """
     shutil.rmtree(self.tmp_dir)
コード例 #41
0
ファイル: generate.py プロジェクト: ojcole/exam-templates
path = os.path.dirname(os.path.realpath(__file__))
output = os.path.join(path, "output")
config = os.path.join(path, "config.json")
settings_file = os.path.join(path, "settings.tex")
template = os.path.join(path, "template.tex")
out_pdf = os.path.join(path, "out.pdf")

verbose = sys.argv[1:].__contains__("-v")

if not os.path.exists(config):
    print("Config file not found")
    exit(1)

if os.path.exists(output):
    shutil.rmtree(output)


os.mkdir(output)

with open(config) as f:
    json_data = json.load(f)

university_id = json_data['universityID']
pages = json_data['pages']
modules = json_data['modules']

tex_settings = [f"\\userid{{{university_id}}}\n", f"\\pages{{{pages}}}"]

with open(settings_file, "w+") as f:
    f.writelines(tex_settings)
コード例 #42
0
 def tearDown(self):
     # comment out the line below if you need to debug the test
     shutil.rmtree(self.OUTDIR)
コード例 #43
0
                                 finalstrg = ""
                                 logging.info("Product was not found in the CSVs!")
                                 list_products(records_csv, full_img_path, "not found in the CSVs", "N/A", "N/A", "N/A")
                         print(finalstrg)
                         if not args.O: list_products(records_csv, full_img_path, "Metadata", parse_footprint(theline[8])[1], "N/A",
                                       get_level(full_img_path, theline))
                         logging.info("Metadata was successfully printed to stdout")
                         if args.O:
                             if args.f:
                                 zippedimg = organize(theline, args.output, full_img_path, args.ds, parse_footprint(theline[8])[1])
                             else:
                                 zippedimg = organize(theline, args.output, full_img_path, args.ds, True)
                             logging.info("Product was stored into " + zippedimg)
                             list_products(records_csv, full_img_path, "Re-organized", parse_footprint(theline[8])[1], zippedimg, get_level(full_img_path, theline))
                         elif args.r and unzipped:
                             shutil.rmtree(full_img_path)
                     else:
                         logging.info("---Processing product " + line)
                         logging.info("Product was not found or doesn't have the expected file structure!")
                         list_products(records_csv, line, "not found or no structure", "N/A", "N/A", "N/A")
 elif args.avhrr_file != None:
     img, full_img_path, dest_dir, unzipped = handle_zipped_input(args.avhrr_file)
     if not img == '':
         img_path = full_img_path
         logging.info("---Processing product " + full_img_path)
         ext = os.path.splitext(img_path)[-1]
         if ext in zipped:
             productid = img[:-4]
         else:
             productid = img
         if args.d:
コード例 #44
0
 def setUp(self):
     if exists('/tmp/mycroft'):
         rmtree('/tmp/mycroft')
コード例 #45
0
ファイル: npm.py プロジェクト: Boomatang/scanner
def remove_node_modules(path):
    node_modules = Path(path, 'node_modules')
    print("Checking node_modules folder")
    if node_modules.exists():
        print(f"Removing node_modules folder from {path}")
        shutil.rmtree(node_modules)
コード例 #46
0
    def test_table_import(self):
        # Add some data and checkpoint.
        self.populate(self.ntables, self.nrows)
        self.session.checkpoint()

        original_db_table = 'original_db_table'
        uri = 'table:' + original_db_table
        create_config = 'allocation_size=512,log=(enabled=true),' + self.config
        self.session.create(uri, create_config)

        keys = self.keys
        values = self.values
        ts = [10*k for k in range(1, len(keys)+1)]

        # Add data and perform a checkpoint.
        min_idx = 0
        max_idx = len(keys) // 3
        for i in range(min_idx, max_idx):
            self.update(uri, keys[i], values[i], ts[i])
        self.session.checkpoint()

        # Add more data and checkpoint again.
        min_idx = max_idx
        max_idx = 2*len(keys) // 3
        for i in range(min_idx, max_idx):
            self.update(uri, keys[i], values[i], ts[i])
        self.session.checkpoint()

        # Export the metadata for the table.
        original_db_file_uri = 'file:' + original_db_table + '.wt'
        c = self.session.open_cursor('metadata:', None, None)
        original_db_table_config = c[uri]
        original_db_file_config = c[original_db_file_uri]
        c.close()

        self.printVerbose(3, '\nFile configuration:\n' + original_db_file_config)
        self.printVerbose(3, '\nTable configuration:\n' + original_db_table_config)

        # Contruct the config string.
        import_config = '{},import=(enabled,repair=false,file_metadata=({}))'.format(
            original_db_table_config, original_db_file_config)

        # Close the connection.
        self.close_conn()

        # Create a new database and connect to it.
        newdir = 'IMPORT_DB'
        shutil.rmtree(newdir, ignore_errors=True)
        os.mkdir(newdir)
        self.conn = self.setUpConnectionOpen(newdir)
        self.session = self.setUpSessionOpen(self.conn)

        # Make a bunch of files and fill them with data.
        self.populate(self.ntables, self.nrows)
        self.session.checkpoint()

        # Bring forward the oldest to be past or equal to the timestamps we'll be importing.
        self.conn.set_timestamp('oldest_timestamp=' + self.timestamp_str(ts[max_idx]))

        # Copy over the datafiles for the object we want to import.
        self.copy_file(original_db_table + '.wt', '.', newdir)

        # Import the table.
        self.session.create(uri, import_config)

        # Verify object.
        self.session.verify(uri)

        # Check that the previously inserted values survived the import.
        self.check(uri, keys[:max_idx], values[:max_idx])

        # Check against projections when the table is not simple.
        if not self.is_simple:
            self.check_projections(uri, keys[:max_idx], values[:max_idx])

        # Compare configuration metadata.
        c = self.session.open_cursor('metadata:', None, None)
        current_db_table_config = c[uri]
        c.close()
        self.config_compare(original_db_table_config, current_db_table_config)

        # Add some data and check that the table operates as usual after importing.
        min_idx = max_idx
        max_idx = len(keys)
        for i in range(min_idx, max_idx):
            self.update(uri, keys[i], values[i], ts[i])
        self.check(uri, keys, values)
        if not self.is_simple:
            self.check_projections(uri, keys, values)

        # Perform a checkpoint.
        self.session.checkpoint()
コード例 #47
0
ファイル: TestSuite.py プロジェクト: schlom/Praktomat
	def teardown_test_environment(self, **kwargs):
		super(TestSuiteRunner, self).teardown_test_environment(**kwargs)
		try:
			rmtree(self.testSuiteUploadRoot)
		except:
			pass
コード例 #48
0
def run(test, params, env):
    """
    convert specific kvm guest to rhev
    """
    for v in list(params.values()):
        if "V2V_EXAMPLE" in v:
            test.cancel("Please set real value for %s" % v)
    if utils_v2v.V2V_EXEC is None:
        test.error('Missing command: virt-v2v')
    # Guest name might be changed, we need a new variant to save the original
    # name
    vm_name = params['original_vm_name'] = params.get('main_vm', 'EXAMPLE')
    target = params.get('target')
    input_mode = params.get('input_mode')
    input_file = params.get('input_file')
    output_mode = params.get('output_mode')
    output_format = params.get('output_format')
    output_storage = params.get('output_storage', 'default')
    bridge = params.get('bridge')
    network = params.get('network')
    address_cache = env.get('address_cache')
    v2v_timeout = int(params.get('v2v_timeout', 1200))
    status_error = 'yes' == params.get('status_error', 'no')
    skip_vm_check = params.get('skip_vm_check', 'no')
    pool_name = params.get('pool_name', 'v2v_test')
    pool_type = params.get('pool_type', 'dir')
    pool_target = params.get('pool_target_path', 'v2v_pool')
    pvt = libvirt.PoolVolumeTest(test, params)
    checkpoint = params.get('checkpoint', '')
    datastore = params.get('datastore')
    esxi_host = params.get('esx_hostname')
    esxi_password = params.get('esxi_password')
    hypervisor = params.get("hypervisor")
    input_transport = params.get("input_transport")
    vmx_nfs_src = params.get("vmx_nfs_src")
    # for construct rhv-upload option in v2v cmd
    output_method = params.get("output_method")
    rhv_upload_opts = params.get("rhv_upload_opts")
    storage_name = params.get('storage_name')
    # for get ca.crt file from ovirt engine
    rhv_passwd = params.get("rhv_upload_passwd")
    rhv_passwd_file = params.get("rhv_upload_passwd_file")
    ovirt_engine_passwd = params.get("ovirt_engine_password")
    ovirt_hostname = params.get("ovirt_engine_url").split(
        '/')[2] if params.get("ovirt_engine_url") else None
    ovirt_ca_file_path = params.get("ovirt_ca_file_path")
    local_ca_file_path = params.get("local_ca_file_path")
    vpx_dc = params.get("vpx_dc")
    vpx_hostname = params.get("vpx_hostname")
    vpx_password = params.get("vpx_password")
    src_uri_type = params.get('src_uri_type')
    error_list = []

    # create different sasl_user name for different job
    if output_mode == 'rhev':
        params.update({'sasl_user': params.get("sasl_user") +
                       utils_misc.generate_random_string(3)})
        logging.info('sals user name is %s' % params.get("sasl_user"))
        if output_method == 'rhv_upload':
            # Create password file for '-o rhv_upload' to connect to ovirt
            with open(rhv_passwd_file, 'w') as f:
                f.write(rhv_passwd)
            # Copy ca file from ovirt to local
            remote.scp_from_remote(ovirt_hostname, 22, 'root',
                                   ovirt_engine_passwd,
                                   ovirt_ca_file_path,
                                   local_ca_file_path)

    def log_fail(msg):
        """
        Log error and update error list
        """
        logging.error(msg)
        error_list.append(msg)

    def check_BSOD():
        """
        Check if boot up into BSOD
        """
        bar = 0.999
        match_img = params.get('image_to_match')
        screenshot = '%s/BSOD_screenshot.ppm' % data_dir.get_tmp_dir()
        if match_img is None:
            test.error('No BSOD screenshot to match!')
        cmd_man_page = 'man virt-v2v|grep -i "Boot failure: 0x0000007B"'
        if process.run(cmd_man_page, shell=True).exit_status != 0:
            log_fail('Man page not contain boot failure msg')
        for i in range(100):
            virsh.screenshot(vm_name, screenshot)
            similar = ppm_utils.image_histogram_compare(screenshot, match_img)
            if similar > bar:
                logging.info('Meet BSOD with similarity %s' % similar)
                return
            time.sleep(1)
        log_fail('No BSOD as expected')

    def check_result(result, status_error):
        """
        Check virt-v2v command result
        """
        libvirt.check_exit_status(result, status_error)
        output = result.stdout + result.stderr
        if not status_error:
            if output_mode == 'rhev':
                if not utils_v2v.import_vm_to_ovirt(params, address_cache,
                                                    timeout=v2v_timeout):
                    test.fail('Import VM failed')
            # Create vmchecker before virsh.start so that the vm can be undefined
            # if started failed.
            vmchecker = VMChecker(test, params, env)
            params['vmchecker'] = vmchecker
            if output_mode == 'libvirt':
                try:
                    virsh.start(vm_name, debug=True, ignore_status=False)
                except Exception as e:
                    test.fail('Start vm failed: %s' % str(e))
            # Check guest following the checkpoint document after convertion
            if params.get('skip_vm_check') != 'yes':
                if checkpoint != 'win2008r2_ostk':
                    ret = vmchecker.run()
                    if len(ret) == 0:
                        logging.info("All common checkpoints passed")
                if checkpoint == 'win2008r2_ostk':
                    check_BSOD()
                # Merge 2 error lists
                error_list.extend(vmchecker.errors)
        log_check = utils_v2v.check_log(params, output)
        if log_check:
            log_fail(log_check)
        if len(error_list):
            test.fail(
                '%d checkpoints failed: %s' %
                (len(error_list), error_list))

    try:
        v2v_params = {
            'main_vm': vm_name, 'target': target, 'v2v_opts': '-v -x',
            'storage': output_storage, 'network': network, 'bridge': bridge,
            'input_mode': input_mode, 'input_file': input_file,
            'new_name': 'ova_vm_' + utils_misc.generate_random_string(3),
            'datastore': datastore,
            'esxi_host': esxi_host,
            'esxi_password': esxi_password,
            'input_transport': input_transport,
            'vmx_nfs_src': vmx_nfs_src,
            'output_method': output_method,
            'storage_name': storage_name,
            'rhv_upload_opts': rhv_upload_opts
        }
        if input_mode == 'vmx':
            v2v_params.update(
                {'new_name': vm_name + utils_misc.generate_random_string(3),
                 'hypervisor': hypervisor,
                 'vpx_dc': vpx_dc,
                 'password': vpx_password if src_uri_type != 'esx' else esxi_password,
                 'hostname': vpx_hostname})
        # copy ova from nfs storage before v2v conversion
        if input_mode == 'ova':
            src_dir = params.get('ova_dir')
            dest_dir = params.get('ova_copy_dir')
            logging.info('Copy ova from %s to %s', src_dir, dest_dir)
            if not os.path.exists(dest_dir):
                shutil.copytree(src_dir, dest_dir)
            else:
                logging.debug('%s already exists, Skip copying' % dest_dir)
        if output_format:
            v2v_params.update({'output_format': output_format})
        # Create libvirt dir pool
        if output_mode == 'libvirt':
            pvt.pre_pool(pool_name, pool_type, pool_target, '')
        # Build rhev related options
        if output_mode == 'rhev':
            # Create SASL user on the ovirt host
            user_pwd = "[['%s', '%s']]" % (params.get("sasl_user"),
                                           params.get("sasl_pwd"))
            v2v_sasl = utils_sasl.SASL(sasl_user_pwd=user_pwd)
            v2v_sasl.server_ip = params.get("remote_ip")
            v2v_sasl.server_user = params.get('remote_user')
            v2v_sasl.server_pwd = params.get('remote_pwd')
            v2v_sasl.setup(remote=True)
        if output_mode == 'local':
            v2v_params['storage'] = data_dir.get_tmp_dir()

        if checkpoint == 'ova_relative_path':
            logging.debug('Current dir: %s', os.getcwd())
            ova_dir = params.get('ova_dir')
            logging.info('Change to dir: %s', ova_dir)
            os.chdir(ova_dir)

        # Set libguestfs environment variable
        os.environ['LIBGUESTFS_BACKEND'] = 'direct'
        if checkpoint == 'permission':
            os.environ['LIBGUESTFS_BACKEND'] = ''
        process.run('echo $LIBGUESTFS_BACKEND', shell=True)

        v2v_result = utils_v2v.v2v_cmd(v2v_params)

        if 'new_name' in v2v_params:
            vm_name = params['main_vm'] = v2v_params['new_name']

        check_result(v2v_result, status_error)
    finally:
        # Cleanup constant files
        utils_v2v.cleanup_constant_files(params)
        if input_mode == 'ova' and os.path.exists(dest_dir):
            shutil.rmtree(dest_dir)
        if params.get('vmchecker'):
            params['vmchecker'].cleanup()
        if output_mode == 'rhev' and v2v_sasl:
            v2v_sasl.cleanup()
        if output_mode == 'libvirt':
            pvt.cleanup_pool(pool_name, pool_type, pool_target, '')
        if input_mode == 'vmx' and input_transport == 'ssh':
            process.run("ssh-agent -k")
コード例 #49
0
ファイル: backend_pgf.py プロジェクト: zhaohui257/matplotlib
 def cleanup_remaining_tmpdirs():
     for tmpdir in TmpDirCleaner.remaining_tmpdirs:
         error_message = "error deleting tmp directory {}".format(tmpdir)
         shutil.rmtree(tmpdir,
                       onerror=lambda *args: _log.error(error_message))
コード例 #50
0
ファイル: test_rule.py プロジェクト: shimde/july3
 def tearDown(self):
     shutil.rmtree('test-build', ignore_errors=True)
     sh('yes | pip uninstall toc')
コード例 #51
0
ファイル: test_cli.py プロジェクト: tkruse/wstool
 def tearDownClass(self):
     shutil.rmtree(self.test_root_path)
     os.environ.update(self.environback)
コード例 #52
0
async def download_video(v_url):  # sourcery skip: avoid-builtin-shadow
    """ For .ytdl command, download media from YouTube and many other sites. """
    url = v_url.pattern_match.group(2)
    type = v_url.pattern_match.group(1).lower()
    out_folder = Config.TMP_DOWNLOAD_DIRECTORY + "youtubedl/"

    if not os.path.isdir(out_folder):
        os.makedirs(out_folder)

    await v_url.edit("`Preparing to download...`")

    if type == "a":
        opts = {
            'format': 'bestaudio',
            'addmetadata': True,
            'key': 'FFmpegMetadata',
            'writethumbnail': True,
            'embedthumbnail': True,
            'audioquality': 0,
            'audioformat': 'mp3',
            'prefer_ffmpeg': True,
            'geo_bypass': True,
            'nocheckcertificate': True,
            'postprocessors': [{
                'key': 'FFmpegExtractAudio',
                'preferredcodec': 'mp3',
                'preferredquality': '320',
            }],
            'outtmpl': out_folder+'%(title)s.mp3',
            'quiet': True,
            'logtostderr': False
        }
        video = False
        song = True

    elif type == "v":
        opts = {
            'format': 'best',
            'addmetadata': True,
            'key': 'FFmpegMetadata',
            'writethumbnail': True,
            'write_all_thumbnails': True,
            'embedthumbnail': True,
            'prefer_ffmpeg': True,
            'hls_prefer_native': True,
            'geo_bypass': True,
            'nocheckcertificate': True,
            'postprocessors': [{
                'key': 'FFmpegVideoConvertor',
                'preferedformat': 'mp4'
            }],
            'outtmpl': out_folder+'%(title)s.mp4',
            'logtostderr': False,
            'quiet': True
        }
        song = False
        video = True

    try:
        await v_url.edit("`Fetching data, please wait..`")
        with YoutubeDL(opts) as ytdl:
            ytdl_data = await loop.run_in_executor(None, ytdl.extract_info, url)
        filename = sorted(get_lst_of_files(out_folder, []))
    except DownloadError as DE:
        await v_url.edit(f"`{str(DE)}`")
        return
    except ContentTooShortError:
        await v_url.edit("`The download content was too short.`")
        return
    except GeoRestrictedError:
        await v_url.edit(
            "`Video is not available from your geographic location due to geographic restrictions imposed by a website.`"
        )
        return
    except MaxDownloadsReached:
        await v_url.edit("`Max-downloads limit has been reached.`")
        return
    except PostProcessingError:
        await v_url.edit("`There was an error during post processing.`")
        return
    except UnavailableVideoError:
        await v_url.edit("`Media is not available in the requested format.`")
        return
    except XAttrMetadataError as XAME:
        await v_url.edit(f"`{XAME.code}: {XAME.msg}\n{XAME.reason}`")
        return
    except ExtractorError:
        await v_url.edit("`There was an error during info extraction.`")
        return
    except Exception as e:
        await v_url.edit(f"{str(type(e)): {str(e)}}")
        return
    c_time = time.time()

    # cover_url = f"https://img.youtube.com/vi/{ytdl_data['id']}/0.jpg"
    # thumb_path = wget.download(cover_url, out_folder + "cover.jpg")

    # relevant_path = "./DOWNLOADS/youtubedl"
    # included_extensions = ["mp4","mp3"]
    # file_names = [fn for fn in os.listdir(relevant_path)
    #             if any(fn.endswith(ext) for ext in included_extensions)]

    if song:
        relevant_path = "./DOWNLOADS/youtubedl"
        included_extensions = ["mp3"]
        file_names = [fn for fn in os.listdir(relevant_path)
                      if any(fn.endswith(ext) for ext in included_extensions)]
        img_extensions = ["webp", "jpg", "jpeg"]
        img_filenames = [fn_img for fn_img in os.listdir(relevant_path) if any(
            fn_img.endswith(ext_img) for ext_img in img_extensions)]
        thumb_image = out_folder + img_filenames[0]

        # thumb = out_folder + "cover.jpg"
        file_path = out_folder + file_names[0]
        song_size = file_size(file_path)
        j = await v_url.edit(f"`Preparing to upload song:`\
        \n**{ytdl_data['title']}**\
        \nby *{ytdl_data['uploader']}*")
        await v_url.client.send_file(
            v_url.chat_id,
            file_path,
            caption=ytdl_data['title'] + "\n" + f"`{song_size}`",
            supports_streaming=True,
            thumb=thumb_image,
            attributes=[
                DocumentAttributeAudio(duration=int(ytdl_data['duration']),
                                       title=str(ytdl_data['title']),
                                       performer=str(ytdl_data['uploader']))
            ],
            progress_callback=lambda d, t: asyncio.get_event_loop(
            ).create_task(
                progress(d, t, v_url, c_time, "Uploading..",
                         f"{ytdl_data['title']}.mp3")))
        # os.remove(file_path)
        await asyncio.sleep(DELETE_TIMEOUT)
        os.remove(thumb_image)
        await j.delete()

    elif video:
        relevant_path = "./DOWNLOADS/youtubedl/"
        included_extensions = ["mp4"]
        file_names = [fn for fn in os.listdir(relevant_path)
                      if any(fn.endswith(ext) for ext in included_extensions)]
        img_extensions = ["webp", "jpg", "jpeg"]
        img_filenames = [fn_img for fn_img in os.listdir(relevant_path) if any(
            fn_img.endswith(ext_img) for ext_img in img_extensions)]
        thumb_image = out_folder + img_filenames[0]

        file_path = out_folder + file_names[0]
        video_size = file_size(file_path)
        # thumb = out_folder + "cover.jpg"

        j = await v_url.edit(f"`Preparing to upload video:`\
        \n**{ytdl_data['title']}**\
        \nby *{ytdl_data['uploader']}*")
        await v_url.client.send_file(
            v_url.chat_id,
            file_path,
            supports_streaming=True,
            caption=ytdl_data['title'] + "\n" + f"`{video_size}`",
            thumb=thumb_image,
            progress_callback=lambda d, t: asyncio.get_event_loop(
            ).create_task(
                progress(d, t, v_url, c_time, "Uploading..",
                         f"{ytdl_data['title']}.mp4")))
        os.remove(file_path)
        await asyncio.sleep(DELETE_TIMEOUT)
        os.remove(thumb_image)
        await v_url.delete()
        await j.delete()
    shutil.rmtree(out_folder)
コード例 #53
0
def main():
    global ANDROID_ROOT  # Android project home directory
    global ICU_VERSION  # ICU version number
    global ICUDATA  # e.g. "icudt42l"
    global ICUDATA_DAT  # e.g. "icudt42l.dat"
    global CLDR_VERSION  # CLDR version. The value can be vary upon ICU release.
    global TMP_DAT_PATH  # temp directory to store all resource files and
    # intermediate dat files.
    global HELP
    global VERBOSE

    argc = len(sys.argv)
    if argc < 2:
        print "You must provide icu version number."
        print "Example: ./icu_dat_generator.py 4.4"
        sys.exit(1)
    ICU_VERSION = sys.argv[1]
    version = GetIcuVersion(ICU_VERSION)
    if (version == -1):
        print sys.argv[1] + " is not a valid icu version number!"
        sys.exit(1)
    ICUDATA = "icudt" + version + "l"
    CLDR_VERSION = "1.8"
    ANDROID_ROOT = os.environ.get("ANDROID_BUILD_TOP")
    if not ANDROID_ROOT:
        print "$ANDROID_BUILD_TOP not set! Run 'env_setup.sh'."
        sys.exit(1)
    ICUDATA_DAT = ICUDATA + ".dat"
    HELP = False
    VERBOSE = False

    try:
        opts, args = getopt.getopt(sys.argv[2:], 'hv', ['help', 'verbose'])
    except getopt.error:
        print "Invalid option"
        PrintHelp()
        sys.exit(1)
    for opt, arg in opts:
        if opt in ('-h', '--help'):
            PrintHelp()
            sys.exit(1)
        elif opt in ('-v', '--verbose'):
            VERBOSE = True

    # Check for requiered source files.
    icu_dat_path = os.path.join(ANDROID_ROOT, "external", "icu4c", "stubdata")
    full_data_filename = os.path.join(icu_dat_path, ICUDATA + "-all.dat")
    if not os.path.isfile(full_data_filename):
        print "%s not present." % full_data_filename
        sys.exit(1)

    # Create a temporary working directory.
    TMP_DAT_PATH = os.path.join(ANDROID_ROOT, "external", "icu4c", "tmp")
    if os.path.exists(TMP_DAT_PATH):
        shutil.rmtree(TMP_DAT_PATH)
    os.mkdir(TMP_DAT_PATH)

    # Extract resource files from icudtxxl-all.dat to TMP_DAT_PATH.
    ExtractAllResourceToTempDir()

    datlist = ["us", "us-euro", "default", "us-japan", "zh", "medium", "large"]
    for dat_subtag in datlist:
        MakeDat(icu_dat_path, dat_subtag)
        # Copy icudtxxl.dat to stubdata directory with corresponding subtag.
        shutil.copyfile(
            os.path.join(TMP_DAT_PATH, ICUDATA_DAT),
            os.path.join(icu_dat_path, ICUDATA + "-" + dat_subtag + ".dat"))
        print "Generate ICU data:" + os.path.join(
            icu_dat_path, ICUDATA + "-" + dat_subtag + ".dat")

    # Cleanup temporary working directory and icudtxxl.dat
    shutil.rmtree(TMP_DAT_PATH)
    os.remove(os.path.join(icu_dat_path, ICUDATA_DAT))
コード例 #54
0
ファイル: proxy.py プロジェクト: tarantoj/kodi.proxy
def menu(url='', module='default'):
    cmds = ['install', 'uninstall', 'update', 'plugin', 'settings']

    installed_addons = _get_installed_addons()

    split = urlparse.urlsplit(url)
    addon_id = split.netloc.lower()
    cmd = split.scheme.lower()

    if not cmd and not SETTINGS['interactive']:
        return

    _print("")

    if cmd not in cmds:
        for idx, option in enumerate(cmds):
            _print('{}: {}'.format(idx, option))

        selected = int(get_input('\nSelect: '))
        if selected < 0:
            return

        return menu(url='{}://'.format(cmds[selected]))

    if cmd == 'install':
        addons = get_addons()

        if not addon_id:
            _print('{}: {}'.format(0, 'ALL'))

            options = ['ALL']

            for idx, addon_id in enumerate(sorted(
                    addons, key=lambda x: addons[x][1].lower()),
                                           start=1):
                addon = addons[addon_id]
                label = addon[1]
                options.append(addon_id)
                if addon_id in installed_addons:
                    label += ' [INSTALLED]'

                _print('{}: {}'.format(idx, label))

            addon_id = options[int(get_input('\nSelect: '))]
            return menu(url='install://{}'.format(addon_id))

        if addon_id == 'ALL':
            to_install = addons.keys()
        elif addon_id in installed_addons:
            raise ProxyException('{} already installed'.format(addon_id))
        else:
            to_install = [addon_id]

        for addon_id in to_install:
            load_dependencies(addon_id, install_missing=True)

    elif cmd == 'uninstall':
        if not installed_addons:
            raise ProxyException('No addons installed')

        if not addon_id:
            options = installed_addons[:]
            options.insert(0, 'all')

            for idx, addon in enumerate(options):
                _print('{}: {}'.format(idx, addon))

            addon_id = options[int(get_input('\nSelect: '))]
            return menu(url='uninstall://{}'.format(addon_id))

        if addon_id == 'all':
            to_uninstall = installed_addons
        elif addon_id not in installed_addons:
            raise ProxyException('{} is not installed.'.format(addon_id))
        else:
            to_uninstall = [addon_id]

        for addon_id in to_uninstall:
            addon_data = os.path.join(addons_data, addon_id)
            if os.path.exists(addon_data) and int(
                    get_input(
                        '{}\n\n0: Keep addon data\n1: Delete addon data\n\nSelect :'
                        .format(addon_id), 0)) == 1:
                shutil.rmtree(addon_data)

            addon_dir = os.path.join(addons_dir, addon_id)
            shutil.rmtree(addon_dir)

            _print('{} Uninstalled'.format(addon_id))

    elif cmd == 'update':
        if not addon_id:
            options = installed_addons[:]
            options.insert(0, 'all')

            for idx, addon in enumerate(options):
                _print('{}: {}'.format(idx, addon))

            addon_id = options[int(get_input('\nSelect: '))]
            return menu(url='update://{}'.format(addon_id))

        if addon_id == 'all':
            to_update = installed_addons
        else:
            to_update = [addon_id]

        if not to_update:
            raise ProxyException('No addons to update')

        addons = get_addons()
        for addon in to_update:
            addon_xml_path = os.path.join(addons_dir, addon, 'addon.xml')
            tree = ET.parse(addon_xml_path)
            root = tree.getroot()
            version = root.attrib['version']

            if version == addons[addon][0]:
                _print('{} ({}): Upto date'.format(addon, version))
                continue

            install(addon)

    elif cmd == 'settings':
        if not addon_id:
            for idx, addon in enumerate(installed_addons):
                _print('{}: {}'.format(idx, addon))

            addon_id = installed_addons[int(get_input('\nSelect: '))]
            return menu(url='settings://{}'.format(addon_id))

        key = None  #need to parse from url
        value = None  #need to parse from url

        addon = xbmcaddon.Addon(addon_id)

        if key and value:
            addon.setSetting(key, value)
        elif key:
            _print(addon.getSetting(key))
        else:
            addon.openSettings()

    elif cmd == 'plugin':
        if not addon_id:
            for idx, addon in enumerate(installed_addons):
                addon_xml_path = os.path.join(addons_dir, addon, 'addon.xml')
                with open(addon_xml_path) as f:
                    if 'xbmc.python.pluginsource' not in f.read():
                        continue

                _print('{}: {}'.format(idx, addon))

            addon_id = installed_addons[int(get_input('\nSelect: '))]
            return menu(url='plugin://{}'.format(addon_id))

        run(url, module)
        while next_path:
            run(next_path, module)
コード例 #55
0
ファイル: test_rule.py プロジェクト: shimde/july3
 def tearDown(self):
     shutil.rmtree('test-build', ignore_errors=True)
コード例 #56
0
ファイル: test_kgtk_split.py プロジェクト: vishalbelsare/kgtk
 def tearDown(self) -> None:
     shutil.rmtree(self.temp_dir)
コード例 #57
0
            upscaler.scale_width = args.width
            upscaler.scale_height = args.height
            upscaler.scale_ratio = args.ratio
            upscaler.model_dir = args.model_dir
            upscaler.threads = args.threads
            upscaler.video2x_cache_directory = video2x_cache_directory
            upscaler.image_format = image_format
            upscaler.preserve_frames = preserve_frames

            # run upscaler
            upscaler.create_temp_directories()
            upscaler.run()
            upscaler.cleanup_temp_directories()
    else:
        Avalon.error('Input path is neither a file nor a directory')
        raise FileNotFoundError(f'{args.input} is neither file nor directory')

    Avalon.info(
        f'Program completed, taking {round((time.time() - begin_time), 5)} seconds'
    )
except Exception:
    Avalon.error('An exception has occurred')
    traceback.print_exc()
finally:
    # remove Video2X cache directory
    try:
        if not preserve_frames:
            shutil.rmtree(video2x_cache_directory)
    except FileNotFoundError:
        pass
コード例 #58
0
ファイル: format.py プロジェクト: spresec/coreclr
def main(argv):
    parser = argparse.ArgumentParser()
    required = parser.add_argument_group('required arguments')
    required.add_argument('-a', '--arch', type=str, 
            default=None, help='architecture to run jit-format on')
    required.add_argument('-o', '--os', type=str,
            default=None, help='operating system')
    required.add_argument('-c', '--coreclr', type=str,
            default=None, help='full path to coreclr')

    args, unknown = parser.parse_known_args(argv)

    if unknown:
        print('Ignorning argument(s): ', ','.join(unknown))

    if args.coreclr is None:
        print('Specify --coreclr')
        return -1
    if args.os is None:
        print('Specifiy --os')
        return -1
    if args.arch is None:
        print('Specify --arch')
        return -1

    if not os.path.isdir(expandPath(args.coreclr)):
        print('Bad path to coreclr')
        return -1

    coreclr = args.coreclr
    platform = args.os
    arch = args.arch

    my_env = os.environ

    # Download .Net CLI

    dotnetcliUrl = ""
    dotnetcliFilename = ""

    # build.cmd removes the Tools directory, so we need to put our version of jitutils
    # outside of the Tools directory

    dotnetcliPath = os.path.join(coreclr, 'dotnetcli-jitutils')

    # Try to make the dotnetcli-jitutils directory if it doesn't exist

    try:
        os.makedirs(dotnetcliPath)
    except OSError:
        if not os.path.isdir(dotnetcliPath):
            raise

    print("Downloading .Net CLI")
    if platform == 'Linux':
        dotnetcliUrl = "https://go.microsoft.com/fwlink/?LinkID=809129"
        dotnetcliFilename = os.path.join(dotnetcliPath, 'dotnetcli-jitutils.tar.gz')
    elif platform == 'OSX':
        dotnetcliUrl = "https://go.microsoft.com/fwlink/?LinkID=809128"
        dotnetcliFilename = os.path.join(dotnetcliPath, 'dotnetcli-jitutils.tar.gz')
    elif platform == 'Windows_NT':
        dotnetcliUrl = "https://go.microsoft.com/fwlink/?LinkID=809126"
        dotnetcliFilename = os.path.join(dotnetcliPath, 'dotnetcli-jitutils.zip')
    else:
        print('Unknown os ', os)
        return -1

    response = urllib2.urlopen(dotnetcliUrl)
    request_url = response.geturl()
    testfile = urllib.URLopener()
    testfile.retrieve(request_url, dotnetcliFilename)

    if not os.path.isfile(dotnetcliFilename):
        print("Did not download .Net CLI!")
        return -1

    # Install .Net CLI

    if platform == 'Linux' or platform == 'OSX':
        tar = tarfile.open(dotnetcliFilename)
        tar.extractall(dotnetcliPath)
        tar.close()
    elif platform == 'Windows_NT':
        with zipfile.ZipFile(dotnetcliFilename, "r") as z:
            z.extractall(dotnetcliPath)

    dotnet = ""
    if platform == 'Linux' or platform == 'OSX':
        dotnet = "dotnet"
    elif platform == 'Windows_NT':
        dotnet = "dotnet.exe"


    if not os.path.isfile(os.path.join(dotnetcliPath, dotnet)):
        print("Did not extract .Net CLI from download")
        return -1

    # Download bootstrap

    bootstrapFilename = ""

    jitUtilsPath = os.path.join(coreclr, "jitutils")

    if os.path.isdir(jitUtilsPath):
        print("Deleting " + jitUtilsPath)
        shutil.rmtree(jitUtilsPath, onerror=del_rw)

    if platform == 'Linux' or platform == 'OSX':
        bootstrapFilename = "bootstrap.sh"
    elif platform == 'Windows_NT':
        bootstrapFilename = "bootstrap.cmd"

    bootstrapUrl = "https://raw.githubusercontent.com/dotnet/jitutils/master/" + bootstrapFilename

    bootstrapPath = os.path.join(coreclr, bootstrapFilename)
    testfile.retrieve(bootstrapUrl, bootstrapPath)

    if not os.path.isfile(bootstrapPath):
        print("Did not download bootstrap!")
        return -1

    # On *nix platforms, we need to make the bootstrap file executable

    if platform == 'Linux' or platform == 'OSX':
        print("Making bootstrap executable")
        os.chmod(bootstrapPath, 0751)

    print(bootstrapPath)

    # Run bootstrap

    my_env["PATH"] += os.pathsep + dotnetcliPath
    if platform == 'Linux' or platform == 'OSX':
        print("Running bootstrap")
        proc = subprocess.Popen(['bash', bootstrapPath], env=my_env)
        output,error = proc.communicate()
    elif platform == 'Windows_NT':
        proc = subprocess.Popen([bootstrapPath], env=my_env)
        output,error = proc.communicate()

    # Run jit-format

    returncode = 0
    jitutilsBin = os.path.join(coreclr, "jitutils", "bin")
    my_env["PATH"] += os.pathsep + jitutilsBin
    current_dir = os.getcwd()

    if not os.path.isdir(jitutilsBin):
        print("Jitutils not built!")
        return -1

    jitformat = jitutilsBin

    if platform == 'Linux' or platform == 'OSX':
        jitformat = os.path.join(jitformat, "jit-format")
    elif platform == 'Windows_NT':
        jitformat = os.path.join(jitformat,"jit-format.bat")
    errorMessage = ""

    builds = ["Checked", "Debug", "Release"]
    projects = ["dll", "standalone", "crossgen"]

    for build in builds:
        for project in projects:
            proc = subprocess.Popen([jitformat, "-a", arch, "-b", build, "-o", platform, "-c", coreclr, "--verbose", "--projects", project], env=my_env)
            output,error = proc.communicate()
            errorcode = proc.returncode

            if errorcode != 0:
                errorMessage += "\tjit-format -a " + arch + " -b " + build + " -o " + platform
                errorMessage += " -c <absolute-path-to-coreclr> --verbose --fix --projects " + project +"\n"
                returncode = errorcode

                # Fix mode doesn't return an error, so we have to run the build, then run with
                # --fix to generate the patch. This means that it is likely only the first run
                # of jit-format will return a formatting failure.
                if errorcode == -2:
                    # If errorcode was -2, no need to run clang-tidy again
                    proc = subprocess.Popen([jitformat, "--fix", "--untidy", "-a", arch, "-b", build, "-o", platform, "-c", coreclr, "--verbose", "--projects", project], env=my_env)
                    output,error = proc.communicate()
                else:
                    # Otherwise, must run both
                    proc = subprocess.Popen([jitformat, "--fix", "-a", arch, "-b", build, "-o", platform, "-c", coreclr, "--verbose", "--projects", project], env=my_env)
                    output,error = proc.communicate()

    os.chdir(current_dir)

    if returncode != 0:
        # Create a patch file
        patchFile = open("format.patch", "w")
        proc = subprocess.Popen(["git", "diff", "--patch", "-U20"], env=my_env, stdout=patchFile)
        output,error = proc.communicate()

    if os.path.isdir(jitUtilsPath):
        print("Deleting " + jitUtilsPath)
        shutil.rmtree(jitUtilsPath, onerror=del_rw)

    if os.path.isdir(dotnetcliPath):
        print("Deleting " + dotnetcliPath)
        shutil.rmtree(dotnetcliPath, onerror=del_rw)

    if os.path.isfile(bootstrapPath):
        print("Deleting " + bootstrapPath)
        os.remove(bootstrapPath)

    if returncode != 0:
        buildUrl = my_env["BUILD_URL"]
        print("There were errors in formatting. Please run jit-format locally with: \n")
        print(errorMessage)
        print("\nOr download and apply generated patch:")
        print("wget " + buildUrl + "artifact/format.patch")
        print("git apply format.patch")

    return returncode
コード例 #59
0
# Build manpages if we're making a source distribution tarball.
if 'sdist' in sys.argv:
    # Go into the docs directory and build the manpage.
    docdir = os.path.join(os.path.dirname(__file__), 'docs')
    curdir = os.getcwd()
    os.chdir(docdir)
    try:
        subprocess.check_call(['make', 'man'])
    finally:
        os.chdir(curdir)

    # Copy resulting manpages.
    mandir = os.path.join(os.path.dirname(__file__), 'man')
    if os.path.exists(mandir):
        shutil.rmtree(mandir)
    shutil.copytree(os.path.join(docdir, '_build', 'man'), mandir)

setup(name='beets',
      version='1.1.0-beta.2',
      description='music tagger and library organizer',
      author='Adrian Sampson',
      author_email='*****@*****.**',
      url='http://beets.radbox.org/',
      license='MIT',
      platforms='ALL',
      long_description=_read('README.rst'),
      test_suite='test.testall.suite',
      include_package_data=True, # Install plugin resources.

      packages=[
コード例 #60
-1
 def test_compare_values_basic_function(self):
     """test basic functionality of compare_values function.  Tests
     that the values are being parsed, and that the Numpy mean feature
     is working correctly"""
     tdir = tempfile.mkdtemp(prefix="filetest_",)
     fpath = os.path.join(tdir,"group1_pruned")
     fp = open(fpath, "w")
     fp.write("        E2348_69_all    H10407_all\n")
     fp.write("IpaH3   0.03    0.03\n")
     fp.write("LT      0.00    1.00\n")
     fp.write("ST2     0.00    0.92\n")
     fp.write("bfpB    1.00    0.00\n")
     fp.write("stx2a   0.07    0.08")
     fp.close()
     npath = os.path.join(tdir,"group2_pruned")
     np = open(npath, "w")
     np.write("        SSON_046_all\n")
     np.write("IpaH3   0.03\n")
     np.write("LT      1.00\n")
     np.write("ST2     1.00\n")
     np.write("bfpB    0.00\n")
     np.write("stx2a   0.08")
     np.close()
     self.assertEqual(compare_values(fpath, npath, "0.8", "0.4"), ([1.00, 0.92, 1.00], [1.00, 1.00], [0.03, 0.5, 0.46, 0.5, 0.07500000000000001]))
     shutil.rmtree(tdir)