Example #1
0
def test_bundler():
    """ Run bundler integration """
    shell = True if platform.system() == 'Windows' else False

    if os.environ.get('TRAVIS', False):
        raise SkipTest('Skip this test on Travis CI :(')

    # Helper methods
    def is_installed(prog):
        dev_null = open(os.devnull, 'wb')
        try:
            return_value = subprocess.call([prog, '--version'], shell=shell,
                                           stdout=dev_null, stderr=dev_null)
            return return_value == 0
        except OSError:
            return False

    def get_output(cmd):
        return str(subprocess.check_output(cmd, shell=shell))

    # Check for ruby and bundler
    if not (is_installed('ruby') and is_installed('gem')
            and 'bundler' in get_output(['gem', 'list'])):

        # Ruby not installed, skip test
        raise SkipTest('Ruby not installed, skipped Bundler integration test')

    os.chdir(repo_path)

    from PyGitUp.gitup import GitUp
    gitup = GitUp(testing=True)
    gitup.run()
Example #2
0
    def test_example1(self):
        testdir = path(l2emod.__file__).parent / 'testtex'
        fn = testdir / 'example1.tex'
        print "file %s" % fn
        with make_temp_directory() as tmdir:
            nfn = '%s/%s' % (tmdir, fn.basename())
            os.system('cp %s/* %s' % (testdir, tmdir))
            os.chdir(tmdir)
            l2e = latex2edx(nfn, output_dir=tmdir)
            l2e.convert()
            xbfn = nfn[:-4] + '.xbundle'
            self.assertTrue(os.path.exists(xbfn))
            # xb = open(xbfn).read()

            # self.assertIn('<chapter display_name="Unit 1" start="2013-11-22" url_name="Unit_1">', xb)
            xml = etree.parse(xbfn).getroot()
            chapter = xml.find('.//chapter')
            self.assertTrue(chapter.get('display_name') == 'Unit 1')
            self.assertTrue(chapter.get('start') == '2013-11-22')
            self.assertTrue(chapter.get('url_name') == 'Unit_1')

            cfn = path(tmdir) / 'course/2013_Fall.xml'
            self.assertTrue(os.path.exists(cfn))

            cfn = path(tmdir) / 'chapter/Unit_1.xml'
            self.assertTrue(os.path.exists(cfn))

            # self.assertIn('<sequential display_name="Introduction" due="2013-11-22" url_name="Introduction"', open(cfn).read())
            xml = etree.parse(cfn).getroot()
            seq = xml.find('.//sequential')
            self.assertTrue(seq.get('display_name') == 'Introduction')
            self.assertTrue(seq.get('due') == '2013-11-22')
            self.assertTrue(seq.get('url_name') == 'Introduction')

            self.assertIn('<problem url_name="p1"/>', open(cfn).read())
Example #3
0
def init_doc_branch():
    msg("Initialising %s branch" % DOC_TARGET_BRANCH)

    dir = os.getcwdu()
    msg_trace('dir = %r' % dir)

    tmp = tempfile.mkdtemp(prefix=TEMP_CHECKOUT_PREFIX)
    msg_trace('tmp = %r' % tmp)

    try:
        msg("Cloning into a temporary directory...")
        sh('git init -q "%s"' % tmp)
        msg_trace('os.chdir(%r)' % tmp)
        os.chdir(tmp)
        sh('git checkout -q --orphan "%s"' % DOC_TARGET_BRANCH)
        sh('git commit -qm "Initial commit." --allow-empty')
        sh('git remote add origin "%s"' % dir)
        sh('git push -q origin gh-pages')

    finally:
        msg('Cleaning up...')
        msg_trace('os.chdir(%r)' % dir)
        os.chdir(dir)
        msg_trace('shutil.rmtree(%r)' % tmp)
        really_rmtree(tmp)

    msg('%s is ready.  Continuing.' % DOC_TARGET_BRANCH)
Example #4
0
def scoped_cwd(path):
  cwd = os.getcwd()
  os.chdir(path)
  try:
    yield
  finally:
    os.chdir(cwd)
Example #5
0
def daca2(foldernum):
    folders = '0123456789abcdefghijklmnopqrstuvwxyz'
    folder = folders[foldernum % len(folders)]

    print('Daca2 folder=' + folder)

    os.chdir(os.path.expanduser('~/cppcheck'))
    subprocess.call(['git', 'pull'])
    p = subprocess.Popen(['git', 'show', '--format=%h'],
                         stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    comm = p.communicate()
    rev = comm[0]
    rev = rev[:rev.find('\n')]

    # compile cppcheck
    subprocess.call(['nice', 'make', 'SRCDIR=build', 'CFGDIR=' + os.path.expanduser('~/cppcheck/cfg'), 'CXXFLAGS=-g -O2', 'CPPFLAGS=-DMAXTIME=600'])
    subprocess.call(['cp', 'cppcheck', os.path.expanduser('~/daca2/cppcheck-O2')])

    # run cppcheck
    subprocess.call(['rm', '-rf', os.path.expanduser('~/daca2/' + folder)])
    subprocess.call(['nice', '--adjustment=19', 'python', os.path.expanduser('~/cppcheck/tools/daca2.py'), folder, '--rev=' + rev])
    upload(os.path.expanduser('~/daca2/' + folder + '/results.txt'), 'evidente/results-' + folder + '.txt')
    subprocess.call(['rm', '-rf', os.path.expanduser('~/daca2/lib' + folder)])
    subprocess.call(['nice', '--adjustment=19', 'python', os.path.expanduser('~/cppcheck/tools/daca2.py'), 'lib' + folder, '--rev=' + rev])
    upload(os.path.expanduser('~/daca2/lib' + folder + '/results.txt'), 'evidente/results-lib' + folder + '.txt')

    # run cppcheck addons
    subprocess.call(['rm', '-rf', os.path.expanduser('~/daca2/' + folder)])
    subprocess.call(['nice', '--adjustment=19', 'python', os.path.expanduser('~/cppcheck/tools/daca2-addons.py'), folder, '--rev=' + rev])
    upload(os.path.expanduser('~/daca2/' + folder + '/results.txt'), 'evidente/addons-' + folder + '.txt')
    subprocess.call(['rm', '-rf', os.path.expanduser('~/daca2/lib' + folder)])
    subprocess.call(['nice', '--adjustment=19', 'python', os.path.expanduser('~/cppcheck/tools/daca2-addons.py'), 'lib' + folder, '--rev=' + rev])
    upload(os.path.expanduser('~/daca2/lib' + folder + '/results.txt'), 'evidente/addons-lib' + folder + '.txt')
Example #6
0
 def setUp(self):
     self._cwd = os.getcwd()
     dirname = os.path.dirname(__file__)
     if dirname:
         os.chdir(dirname)
     self.run_cmd(['make', 'clean'])
     self.run_cmd(['make'])
def download_file(url, to_directory='.'):
    cwd = os.getcwd()
    os.chdir(to_directory)

    try:
        file_name = url.split('/')[-1]
        u = urllib2.urlopen(url)
        f = open(file_name, 'wb')
        meta = u.info()
        etag = meta.getheaders("etag")[0]
        file_size = int(meta.getheaders("Content-Length")[0])
        print "Downloading: %s Bytes: %s" % (file_name, file_size)

        file_size_dl = 0
        block_sz = 8192
        while True:
            buffer = u.read(block_sz)
            if not buffer:
                break
            file_size_dl += len(buffer)
            f.write(buffer)
            status = r"%10d  [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
            status = status + chr(8)*(len(status)+1)
            print status,
        f.close()
        return etag
    finally:
        os.chdir(cwd)
Example #8
0
def test_cycle_namesource1():
    tmp_infile = setup_file()
    tmpd, nme, ext = split_filename(tmp_infile)
    pwd = os.getcwd()
    os.chdir(tmpd)

    class spec3(nib.CommandLineInputSpec):
        moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
                       position=1, name_template='%s_mootpl')
        poo = nib.File(name_source=['moo'], hash_files=False,
                       argstr="%s", position=2)
        doo = nib.File(name_source=['poo'], hash_files=False,
                       argstr="%s", position=3)

    class TestCycle(nib.CommandLine):
        _cmd = "mycommand"
        input_spec = spec3

    # Check that an exception is raised
    to0 = TestCycle()
    not_raised = True
    try:
        to0.cmdline
    except nib.NipypeInterfaceError:
        not_raised = False
    yield assert_false, not_raised

    os.chdir(pwd)
    teardown_file(tmpd)
Example #9
0
def test_TraitedSpec_withNoFileHashing():
    tmp_infile = setup_file()
    tmpd, nme = os.path.split(tmp_infile)
    pwd = os.getcwd()
    os.chdir(tmpd)
    yield assert_true, os.path.exists(tmp_infile)
    class spec2(nib.TraitedSpec):
        moo = nib.File(exists=True, hash_files=False)
        doo = nib.traits.List(nib.File(exists=True))
    infields = spec2(moo=nme, doo=[tmp_infile])
    hashval = infields.get_hashval(hash_method='content')
    yield assert_equal, hashval[1], '8da4669ff5d72f670a46ea3e7a203215'

    class spec3(nib.TraitedSpec):
        moo = nib.File(exists=True, name_source="doo")
        doo = nib.traits.List(nib.File(exists=True))
    infields = spec3(moo=nme, doo=[tmp_infile])
    hashval1 = infields.get_hashval(hash_method='content')

    class spec4(nib.TraitedSpec):
        moo = nib.File(exists=True)
        doo = nib.traits.List(nib.File(exists=True))
    infields = spec4(moo=nme, doo=[tmp_infile])
    hashval2 = infields.get_hashval(hash_method='content')

    yield assert_not_equal, hashval1[1],  hashval2[1]
    os.chdir(pwd)
    teardown_file(tmpd)
def main():
    args = prog_options()

    # Iterate through all directories and access their files
    for root, dirs, files in os.walk(args.sample_dir):
        if root != args.sample_dir:
            os.chdir(root)
            print root[24:27]

    # Get relevant file name
            for file in files:
                if file.endswith('.fastq'):
                    trim_out = file

    # Option to get counts of adapters in FLASh output merged file
            most_common_seqs = count_common_seqs(args.base_counts,
                                                 args.top_common_count,
                                                 trim_out)
            with open(args.out_fnh, 'w') as outf:
                outf.write('{}\n'.format(trim_out))
                outf.write('Read Count\tCommon Sequence\t'
                           'Common Sequence Counts\tPercent in reads')
                for d in most_common_seqs:
                    outf.write('{}\t{}\t{}\t{}'
                               .format(d[0], d[1], d[2], d[3]))
            return
Example #11
0
def test_chained_namesource():
    tmp_infile = setup_file()
    tmpd, nme, ext = split_filename(tmp_infile)
    pwd = os.getcwd()
    os.chdir(tmpd)

    class spec2(nib.CommandLineInputSpec):
        doo = nib.File(exists=True, argstr="%s", position=1)
        moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
                       position=2, name_template='%s_mootpl')
        poo = nib.File(name_source=['moo'], hash_files=False,
                       argstr="%s", position=3)

    class TestName(nib.CommandLine):
        _cmd = "mycommand"
        input_spec = spec2

    testobj = TestName()
    testobj.inputs.doo = tmp_infile
    res = testobj.cmdline
    yield assert_true, '%s' % tmp_infile in res
    yield assert_true, '%s_mootpl ' % nme in res
    yield assert_true, '%s_mootpl_generated' % nme in res

    os.chdir(pwd)
    teardown_file(tmpd)
Example #12
0
def backup_git_by_page(page, logger):
    git = get_gitlab_instance()
    projects = git.projects.all(page=page, per_page=100)
    git_data_path = GIT_SETTINGS.get('git_data_path')
    if 0 == len(projects):
        logger.info("All projects backup completed !")
        exit(0)
    else:
        logger.info("There are %s projects on page %s." % (len(projects), page))
        try:
            for project in projects:
                git_repo_path = os.path.join(git_data_path, project.path_with_namespace + ".git")
                logger.debug("begin to backup git repo %s !" % project.path_with_namespace)

                # if the project has been cloned,then exec git fetch command,else exec git clone command.
                if os.path.exists(git_repo_path):
                    os.chdir(git_repo_path)
                    for output in os.popen("git fetch 2>&1"):
                        record_log_with_level(logger, output)
                else:
                    for output in os.popen("git clone --mirror %s %s 2>&1" % (project.http_url_to_repo, git_repo_path)):
                        record_log_with_level(logger, output)
        except:
            logger.exception('Got exception on logger handler:')
            raise
        logger.info("The projects of page %s backup completed !" % page)
Example #13
0
def daemonize( errfile ):
    """
    Detach process and become a daemon.
    """
    pid = os.fork()
    if pid:
        os._exit(0)

    os.setsid()
    signal.signal(signal.SIGHUP, signal.SIG_IGN)
    os.umask(0)

    pid = os.fork()
    if pid:
        os._exit(0)

    os.chdir("/")
    for fd in range(0,20):
        try:
            os.close(fd)
        except OSError:
            pass

    sys.stdin = open("/dev/null","r")
    sys.stdout = open("/dev/null","w")
    sys.stderr = ErrorLog( errfile )
Example #14
0
    def processAux(self, dFrag):
        self.depth=self.depth+1
        if not self.files.has_key(self.depth):
            self.files[self.depth]=[]
        thisDir=self.compoundDir(self.topDir, dFrag)
        os.chdir(thisDir)
        self.theDict[thisDir]={'xml': [], 'bin': [], 'dir': []}
        # print "Processing",thisDir," Depth",self.depth
        thisDirContents=os.listdir(thisDir)
        for fname in thisDirContents:
            if stat.S_ISDIR(os.stat(fname)[stat.ST_MODE]):
                if not re.match("^(CVS|images|search|photos|htdig|\.)", fname) and self.depth<4:
                    self.processAux(self.compoundDir(dFrag,fname))
                    self.handleDir(thisDir, fname)
                    os.chdir(thisDir)
            else:
                # print "File",fname
                if re.match(".*\.xml$", fname):
                    self.handleXML(thisDir, dFrag, fname)
                elif re.match(".*\.(jpe?g|JPG|gif|png|html)$",
                              fname):
                    self.handleBinary(thisDir, fname)

        self.writeIndex(dFrag)
        self.depth=self.depth-1
Example #15
0
    def make_new_version_message(self, path):
        """Make a new version message for the repo at the given path."""

        try:
            cwd = os.getcwd()
            os.chdir(path)

            version = self.get_current_tag()

            if version[0] is None:
                return

            messages_path = os.path.join(path, 'messages.json')
            message_path = self.rewrite_messages_json(messages_path, version)

            if os.path.exists(message_path):
                os.remove(message_path)

            with open(message_path, mode='w', encoding='utf-8') as f:
                header = '{} {}'.format(
                    os.path.basename(path),
                    os.path.splitext(os.path.basename(message_path))[0])
                f.write('{}\n{}\n'.format(header, '-' * (len(header) + 1)))
                f.write(self.get_commit_messages_since(version))

            self.window.run_command('open_file', args={'file': message_path})

        except Exception:
            import traceback
            traceback.print_exc()
        finally:
            os.chdir(cwd)
Example #16
0
def test_CommandLine_output():
    tmp_infile = setup_file()
    tmpd, name = os.path.split(tmp_infile)
    pwd = os.getcwd()
    os.chdir(tmpd)
    yield assert_true, os.path.exists(tmp_infile)
    ci = nib.CommandLine(command='ls -l')
    ci.inputs.terminal_output = 'allatonce'
    res = ci.run()
    yield assert_equal, res.runtime.merged, ''
    yield assert_true, name in res.runtime.stdout
    ci = nib.CommandLine(command='ls -l')
    ci.inputs.terminal_output = 'file'
    res = ci.run()
    yield assert_true, 'stdout.nipype' in res.runtime.stdout
    yield assert_equal, type(res.runtime.stdout), type('hi')
    ci = nib.CommandLine(command='ls -l')
    ci.inputs.terminal_output = 'none'
    res = ci.run()
    yield assert_equal, res.runtime.stdout, ''
    ci = nib.CommandLine(command='ls -l')
    res = ci.run()
    yield assert_true, 'stdout.nipype' in res.runtime.stdout
    os.chdir(pwd)
    teardown_file(tmpd)
    def test_no_setup_cfg(self):
        # makes sure easy_install as a command (main)
        # doesn't use a setup.cfg file that is located
        # in the current working directory
        dir = tempfile.mkdtemp()
        setup_cfg = open(os.path.join(dir, 'setup.cfg'), 'w')
        setup_cfg.write('[easy_install]\nfind_links = http://example.com')
        setup_cfg.close()
        setup_py = open(os.path.join(dir, 'setup.py'), 'w')
        setup_py.write(SETUP_PY)
        setup_py.close()

        from setuptools.dist import Distribution

        def _parse_command_line(self):
            msg = 'Error: a local setup.cfg was used'
            opts = self.command_options
            if 'easy_install' in opts:
                assert 'find_links' not in opts['easy_install'], msg
            return self._old_parse_command_line()

        Distribution._old_parse_command_line = Distribution.parse_command_line
        Distribution.parse_command_line = _parse_command_line

        old_wd = os.getcwd()
        try:
            os.chdir(dir)
            reset_setup_stop_context(
                lambda: self.assertRaises(SystemExit, main, [])
            )
        finally:
            os.chdir(old_wd)
            shutil.rmtree(dir)
            Distribution.parse_command_line = Distribution._old_parse_command_line
Example #18
0
def main():
    os.chdir(os.path.dirname(os.path.realpath(__file__)))
    os.system("pip install certifi")

    print "Copying certifi's cacert.pem"
    import certifi
    shutil.copy2(certifi.where(), 'agkyra/resources/cacert.pem')
Example #19
0
def build_launcher(out_path, icon_path, file_desc, product_name, product_version,
                   company_name, entry_point, is_gui):

    src_ico = os.path.abspath(icon_path)
    target = os.path.abspath(out_path)

    file_version = product_version

    dir_ = os.getcwd()
    temp = tempfile.mkdtemp()
    try:
        os.chdir(temp)
        with open("launcher.c", "w") as h:
            h.write(get_launcher_code(entry_point))
        shutil.copyfile(src_ico, "launcher.ico")
        with open("launcher.rc", "w") as h:
            h.write(get_resource_code(
                os.path.basename(target), file_version, file_desc,
                "launcher.ico", product_name, product_version, company_name))

        build_resource("launcher.rc", "launcher.res")
        build_exe("launcher.c", "launcher.res", is_gui, target)
    finally:
        os.chdir(dir_)
        shutil.rmtree(temp)
 def do_deploy():
     def _run_ansible():
         subprocess.check_call(['ansible-playbook', 'masters.yml'],
                               cwd='result/{}'.format(grid_name))
     parent_deployment._status = 'masters_provision_deploying'
     provision_deployment._status = 'masters_deploying'
     provision_deployment.save()
     parent_deployment.save()
     provision_generator = provision_generators[grid.type][
         grid.provider](grid_name, **kwargs)
     provision_generator.generate_all(
         grid_name, infrastructure_deployment._accessip)
     try:
         _run_ansible()
     except:
         provision_deployment._status = 'masters_deploy_failed'
         parent_deployment._status = 'masters_provision_deploy_failed'
     else:
         provision_deployment._status = 'masters_deployed'
         parent_deployment._status = 'masters_provision_finished'
     finally:
         provision_deployment.save()
         self.unlock(parent_deployment)
         parent_deployment.save()
         os.chdir(cwd)
         try:
             del os.environ['AWS_ACCESS_KEY_ID']
         except:
             print('no such env')
         try:
             del os.environ['AWS_SECRET_ACCESS_KEY']
         except:
             print('no such env')
         shutil.rmtree('result/{}'.format(grid_name))
Example #21
0
def upgradeMaster(config, _noMonkey=False):
    if not _noMonkey:  # pragma: no cover
        monkeypatches.patch_all()

    if not base.checkBasedir(config):
        defer.returnValue(1)
        return

    os.chdir(config['basedir'])

    try:
        configFile = base.getConfigFileFromTac(config['basedir'])
    except (SyntaxError, ImportError) as e:
        print("Unable to load 'buildbot.tac' from '%s':" % config['basedir'])
        print(e)

        defer.returnValue(1)
        return
    master_cfg = base.loadConfig(config, configFile)
    if not master_cfg:
        defer.returnValue(1)
        return

    upgradeFiles(config)
    yield upgradeDatabase(config, master_cfg)

    if not config['quiet']:
        print("upgrade complete")

    defer.returnValue(0)
Example #22
0
def test_run_multiproc_nondaemon_with_flag(nondaemon_flag):
    '''
    Start a pipe with two nodes using the multiproc plugin and passing the nondaemon_flag.
    '''
    
    cur_dir = os.getcwd()
    temp_dir = mkdtemp(prefix='test_engine_')
    os.chdir(temp_dir)
    
    pipe = pe.Workflow(name='pipe')
    
    f1 = pe.Node(interface=Function(function=TestInterface.testFunction, input_names=['sum'], output_names=['sum_out']), name='f1')
    f2 = pe.Node(interface=Function(function=TestInterface.testFunction, input_names=['sum'], output_names=['sum_out']), name='f2')

    pipe.connect([(f1,f2,[('sum_out','sum')])])
    pipe.base_dir = os.getcwd()
    f1.inputs.sum = 0
    
    # execute the pipe using the MultiProc plugin with 2 processes and the non_daemon flag
    # to enable child processes which start other multiprocessing jobs
    execgraph = pipe.run(plugin="MultiProc", plugin_args={'n_procs':2, 'non_daemon':nondaemon_flag})
    
    names = ['.'.join((node._hierarchy,node.name)) for node in execgraph.nodes()]
    node = execgraph.nodes()[names.index('pipe.f2')]
    result = node.get_output('sum_out')
    yield assert_equal, result, 180 # n_procs (2) * numberOfThreads (2) * 45 == 180
    os.chdir(cur_dir)
    rmtree(temp_dir)
Example #23
0
def buildComponent(component, componentDir=None):
    """ Build the component. Return a pair of paths
        (pathToBinary, pathToXPTFile)"""
    logger = build_util.getLogger('build_components')
    # Save current working directory to set it back later
    prevDir = os.getcwd()
    # Component Directory
    if componentDir is None:
        componentDir = os.path.join(Settings.prefs.src_dir, "components", component)

    os.chdir(componentDir)
    logger.info("Making build and bin dirs for component %s"%component)
    buildDir = os.path.join(componentDir, "build")
    binDir = os.path.join(componentDir, "bin")
    
    for dir in [buildDir, binDir]:
        try:
            os.mkdir(dir)
        except OSError, err:
            if err.errno == errno.EEXIST:
                logger.warning("Couldn't make %s because it exists."%dir)
                logger.warning("Deleting %s"%dir)
                shutil.rmtree(dir)
                logger.warning("Trying to make %s again"%dir)
                os.mkdir(dir)
            else:
                raise
Example #24
0
def working_directory(path):
    old = os.getcwd()
    try:
        os.chdir(path)
        yield
    finally:
        os.chdir(old)
Example #25
0
File: Git.py Project: njqaaa/deploy
def mergeToBranch(branchName,branchType,branchDesc):
    '''
    把分支合并进 master
    '''
    shellPath = settings.SHELL_PATH
    os.chdir(shellPath)
    try:
        if_merge = VersionList.objects.filter(branch_name = branchName)[0]
        message = "该分支已被合并到master了,请去进行中项目里生成新分支"
    except:
        if_merge = 'no_merge'
        opResult = os.popen(shellPath+"git-merge-branch.sh %s %s %s %s" % (branchName,branchType,branchType,branchDesc))
        outputs = "<br/>".join(opResult.readlines()) # log日志
        preg = outputs.find('have conflict')
        no_branch = outputs.find('no this branch')
        error_repo = outputs.find('repo is error')
        if_push_ok = outputs.find('FAIL')
        if_pass = outputs.find('PASS')
        if error_repo >=0:
            message = str(branchType)+"仓库异常,请联系管理员"
        elif no_branch >=0:
            message = "该分支不存在,请仔细核对,专心点啊=。="
        elif preg >=0:
            message = "rebase master 出现冲突,请解决冲突,分支号为:"+str(branchName)
        elif if_push_ok >=0:
            message = "git上没有显示该分支的merge信息,也没有冲突,请确认分支名"
        elif if_pass >=0:
            message = "合并成功"
        else:
            message = "no status"

    return  message
Example #26
0
    def configure(self):

        # configure for 64-bit build
        self.updatecfg('configopts', "-b 64")

        if self.getcfg('ignorethrottling'):
            # ignore CPU throttling check
            # this is not recommended, it will disturb the measurements done by ATLAS
            # used for the EasyBuild demo, to avoid requiring root privileges
            self.updatecfg('configopts', '-Si cputhrchk 0')

        # if LAPACK is found, instruct ATLAS to provide a full LAPACK library
        # ATLAS only provides a few LAPACK routines natively
        if self.getcfg('full_lapack'):
            lapack = get_software_root('LAPACK')
            if lapack:
                self.updatecfg('configopts', ' --with-netlib-lapack=%s/lib/liblapack.a' % lapack)
            else:
                self.log.error("netlib's LAPACK library not available,"\
                               " required to build ATLAS with a full LAPACK library.")

        # enable building of shared libraries (requires -fPIC)
        if self.getcfg('sharedlibs') or self.toolkit().opts['pic']:
            self.log.debug("Enabling -fPIC because we're building shared ATLAS libs, or just because.")
            self.updatecfg('configopts', '-Fa alg -fPIC')

        # ATLAS only wants to be configured/built in a separate dir'
        try:
            objdir = "obj"
            os.makedirs(objdir)
            os.chdir(objdir)
        except OSError, err:
            self.log.error("Failed to create obj directory to build in: %s" % err)
Example #27
0
    def test_merge(self):
        testdir = path(l2emod.__file__).parent / 'testtex'
        with make_temp_directory() as tmdir:
            fn = testdir / 'example1.tex'
            print "file %s" % fn
            nfn = '%s/%s' % (tmdir, fn.basename())
            os.system('cp %s/* %s' % (testdir, tmdir))
            os.chdir(tmdir)
            l2e = latex2edx(nfn, output_dir=tmdir)
            l2e.convert()

            fn = testdir / 'example2.tex'
            print "file %s" % fn
            nfn = '%s/%s' % (tmdir, fn.basename())
            l2e = latex2edx(nfn, output_dir=tmdir, do_merge=True)
            l2e.convert()

            cfn = path(tmdir) / 'course/2013_Fall.xml'
            self.assertTrue(os.path.exists(cfn))

            self.assertIn('<chapter url_name="Unit_1"', open(cfn).read())
            self.assertIn('<chapter url_name="Unit_2"', open(cfn).read())

            cfn = path(tmdir) / 'chapter/Unit_1.xml'
            self.assertTrue(os.path.exists(cfn))

            cfn = path(tmdir) / 'chapter/Unit_2.xml'
            self.assertTrue(os.path.exists(cfn))
Example #28
0
def initEnKFrun(callback=None,basedir=None,site=None,siteparam=None):
    if not site:
        raise "site is a required parameter"
    if siteparam:
        param = ast.literal_eval(siteparam)
    else:
        raise "Parameters are required. Please submit Paramerers with task submission"
    if not basedir:
        raise "Basedir is required"
    #Create working directory
    newDir = basedir + "celery_data/" + str(initEnKFrun.request.id)
    check_call(["mkdir",newDir])
    os.chdir(newDir)
    #copy matlab code to working directory
    codeDir =basedir + 'enfk_matlab/'
    for file in glob.glob(codeDir + '*'): 
        shutil.copy(file, newDir) 
    #check_call(["cp","-r",codeDir + '*',newDir])
    #set inital Paramters files
    setup_param(newDir,param)
    if callback:
        result=subtask(callback).delay(task_id=str(initEnKFrun.request.id),wkdir=newDir)
        return {'task_id':result.task_id,'task_name':result.task_name}
    else:
        return newDir
Example #29
0
def main(name):
  try:
    from python_qt_binding.QtGui import QApplication
  except:
    print >> sys.stderr, "please install 'python_qt_binding' package!!"
    sys.exit(-1)

  masteruri = init_cfg_path()
  parser = init_arg_parser()
  args = rospy.myargv(argv=sys.argv)
  parsed_args = parser.parse_args(args[1:])
  # Initialize Qt
  global app
  app = QApplication(sys.argv)

  # decide to show main or echo dialog
  global main_form
  if parsed_args.echo:
    main_form = init_echo_dialog(name, masteruri, parsed_args.echo[0], parsed_args.echo[1], parsed_args.hz)
  else:
    main_form = init_main_window(name, masteruri, parsed_args.file)

  # resize and show the qt window
  if not rospy.is_shutdown():
    os.chdir(PACKAGE_DIR) # change path to be able to the images of descriptions
    main_form.resize(1024, 720)
    screen_size = QApplication.desktop().availableGeometry()
    if main_form.size().width() >= screen_size.width() or main_form.size().height() >= screen_size.height()-24:
      main_form.showMaximized()
    else:
      main_form.show()
    exit_code = -1
    rospy.on_shutdown(finish)
    exit_code = app.exec_()
Example #30
0
def main():
    cur_dir = os.path.dirname(__file__)
    os.chdir(os.path.join(cur_dir, ".."))
    modules = sys.argv[1:]

    if not modules:
        modules = ['django_evolution']

    p = subprocess.Popen(['pyflakes'] + modules,
                         stderr=subprocess.PIPE,
                         stdout=subprocess.PIPE,
                         close_fds=True)

    contents = p.stdout.readlines()
    # Read in the exclusions file
    exclusions = {}
    fp = open(os.path.join(cur_dir, "pyflakes.exclude"), "r")

    for line in fp.readlines():
        if not line.startswith("#"):
            exclusions[line.rstrip()] = 1

    fp.close()

    # Now filter thin
    for line in contents:
        line = line.rstrip()
        test_line = re.sub(r':[0-9]+:', r':*:', line, 1)
        test_line = re.sub(r'line [0-9]+', r'line *', test_line)

        if test_line not in exclusions:
            print line